diff --git a/common.gypi b/common.gypi index 96d7ebe61acfeb..ed0d11a619c357 100644 --- a/common.gypi +++ b/common.gypi @@ -36,7 +36,7 @@ # Reset this number to 0 on major V8 upgrades. # Increment by one for each non-official patch applied to deps/v8. - 'v8_embedder_string': '-node.20', + 'v8_embedder_string': '-node.7', ##### V8 defaults for Node.js ##### diff --git a/deps/v8/.vpython b/deps/v8/.vpython deleted file mode 100644 index d4a07677ca9a22..00000000000000 --- a/deps/v8/.vpython +++ /dev/null @@ -1,91 +0,0 @@ -# This is a vpython "spec" file. -# -# It describes patterns for python wheel dependencies of the python scripts in -# the V8 repo, particularly for dependencies that have compiled components -# (since pure-python dependencies can be easily vendored into third_party). -# -# When vpython is invoked, it finds this file and builds a python VirtualEnv, -# containing all of the dependencies described in this file, fetching them from -# CIPD (the "Chrome Infrastructure Package Deployer" service). Unlike `pip`, -# this never requires the end-user machine to have a working python extension -# compilation environment. All of these packages are built using: -# https://chromium.googlesource.com/infra/infra/+/master/infra/tools/dockerbuild/ -# -# All python scripts in the repo share this same spec, to avoid dependency -# fragmentation. -# -# If you have depot_tools installed in your $PATH, you can invoke python scripts -# in this repo by running them as you normally would run them, except -# substituting `vpython` instead of `python` on the command line, e.g.: -# vpython path/to/script.py some --arguments -# -# Read more about `vpython` and how to modify this file here: -# https://chromium.googlesource.com/infra/infra/+/master/doc/users/vpython.md - -python_version: "2.7" - -# The default set of platforms vpython checks does not yet include mac-arm64. -# Setting `verify_pep425_tag` to the list of platforms we explicitly must support -# allows us to ensure that vpython specs stay mac-arm64-friendly -verify_pep425_tag: [ - {python: "cp27", abi: "cp27mu", platform: "manylinux1_x86_64"}, - {python: "cp27", abi: "cp27mu", platform: "linux_arm64"}, - {python: "cp27", abi: "cp27mu", platform: "linux_armv6l"}, - - {python: "cp27", abi: "cp27m", platform: "macosx_10_10_intel"}, - {python: "cp27", abi: "cp27m", platform: "macosx_11_0_arm64"}, - - {python: "cp27", abi: "cp27m", platform: "win32"}, - {python: "cp27", abi: "cp27m", platform: "win_amd64"} -] - -# Needed by third_party/catapult/devil/devil, which is imported by -# build/android/test_runner.py when running performance tests. -wheel: < - name: "infra/python/wheels/psutil/${vpython_platform}" - version: "version:5.2.2" -> - -# Used by: -# build/toolchain/win -wheel: < - name: "infra/python/wheels/pypiwin32/${vpython_platform}" - version: "version:219" - match_tag: < - platform: "win32" - > - match_tag: < - platform: "win_amd64" - > -> - -# Used by: -# tools/unittests/run_perf_test.py -wheel: < - name: "infra/python/wheels/coverage/${vpython_platform}" - version: "version:4.3.4" -> -wheel: < - name: "infra/python/wheels/six-py2_py3" - version: "version:1.10.0" -> -wheel: < - name: "infra/python/wheels/pbr-py2_py3" - version: "version:3.0.0" -> -wheel: < - name: "infra/python/wheels/funcsigs-py2_py3" - version: "version:1.0.2" -> -wheel: < - name: "infra/python/wheels/mock-py2_py3" - version: "version:2.0.0" -> - -# Used by: -# tools/run_perf.py -# tools/unittests/run_perf_test.py -wheel: < - name: "infra/python/wheels/numpy/${vpython_platform}" - version: "version:1.11.3" -> diff --git a/deps/v8/.vpython3 b/deps/v8/.vpython3 index 50fab3bb519735..1187542f5e19a1 100644 --- a/deps/v8/.vpython3 +++ b/deps/v8/.vpython3 @@ -47,7 +47,7 @@ wheel: < wheel: < name: "infra/python/wheels/coverage/${vpython_platform}" - version: "version:5.5.chromium.2" + version: "version:5.5.chromium.3" > wheel: < @@ -74,3 +74,8 @@ wheel: < name: "infra/python/wheels/protobuf-py3" version: "version:3.19.3" > + +wheel: < + name: "infra/python/wheels/requests-py2_py3" + version: "version:2.13.0" +> diff --git a/deps/v8/AUTHORS b/deps/v8/AUTHORS index 74d6f3b07ff643..af37f8db25121e 100644 --- a/deps/v8/AUTHORS +++ b/deps/v8/AUTHORS @@ -137,6 +137,7 @@ Ingvar Stepanyan <me@rreverser.com> Ioseb Dzmanashvili <ioseb.dzmanashvili@gmail.com> Isiah Meadows <impinball@gmail.com> Jaime Bernardo <jaime@janeasystems.com> +Jake Hughes <jh@jakehughes.uk> James M Snell <jasnell@gmail.com> James Pike <g00gle@chilon.net> Jan Krems <jan.krems@gmail.com> diff --git a/deps/v8/BUILD.bazel b/deps/v8/BUILD.bazel index 4e89f90e7e31e1..f216a1811da852 100644 --- a/deps/v8/BUILD.bazel +++ b/deps/v8/BUILD.bazel @@ -891,6 +891,7 @@ filegroup( "src/builtins/typed-array-sort.tq", "src/builtins/typed-array-subarray.tq", "src/builtins/typed-array-to-reversed.tq", + "src/builtins/typed-array-to-sorted.tq", "src/builtins/typed-array-values.tq", "src/builtins/typed-array-with.tq", "src/builtins/typed-array.tq", @@ -925,6 +926,7 @@ filegroup( "src/objects/js-objects.tq", "src/objects/js-promise.tq", "src/objects/js-proxy.tq", + "src/objects/js-raw-json.tq", "src/objects/js-regexp-string-iterator.tq", "src/objects/js-regexp.tq", "src/objects/js-shadow-realm.tq", @@ -980,6 +982,7 @@ filegroup( "src/objects/js-collator.tq", "src/objects/js-date-time-format.tq", "src/objects/js-display-names.tq", + "src/objects/js-duration-format.tq", "src/objects/js-list-format.tq", "src/objects/js-locale.tq", "src/objects/js-number-format.tq", @@ -1267,8 +1270,6 @@ filegroup( "src/debug/debug-scopes.h", "src/debug/debug-stack-trace-iterator.cc", "src/debug/debug-stack-trace-iterator.h", - "src/debug/debug-type-profile.cc", - "src/debug/debug-type-profile.h", "src/debug/debug.cc", "src/debug/debug.h", "src/debug/interface-types.h", @@ -1435,6 +1436,9 @@ filegroup( "src/heap/embedder-tracing.cc", "src/heap/embedder-tracing.h", "src/heap/embedder-tracing-inl.h", + "src/heap/evacuation-verifier.cc", + "src/heap/evacuation-verifier.h", + "src/heap/evacuation-verifier-inl.h", "src/heap/factory-base.cc", "src/heap/factory-base.h", "src/heap/factory-base-inl.h", @@ -1497,6 +1501,8 @@ filegroup( "src/heap/marking-barrier.cc", "src/heap/marking-barrier.h", "src/heap/marking-barrier-inl.h", + "src/heap/marking-state.h", + "src/heap/marking-state-inl.h", "src/heap/marking-visitor-inl.h", "src/heap/marking-visitor.h", "src/heap/marking-worklist-inl.h", @@ -1529,6 +1535,9 @@ filegroup( "src/heap/paged-spaces.h", "src/heap/parallel-work-item.h", "src/heap/parked-scope.h", + "src/heap/pretenuring-handler-inl.h", + "src/heap/pretenuring-handler.cc", + "src/heap/pretenuring-handler.h", "src/heap/progress-bar.h", "src/heap/read-only-heap-inl.h", "src/heap/read-only-heap.cc", @@ -1753,6 +1762,9 @@ filegroup( "src/objects/js-promise.h", "src/objects/js-proxy-inl.h", "src/objects/js-proxy.h", + "src/objects/js-raw-json-inl.h", + "src/objects/js-raw-json.h", + "src/objects/js-raw-json.cc", "src/objects/js-regexp-inl.h", "src/objects/js-regexp-string-iterator-inl.h", "src/objects/js-regexp-string-iterator.h", @@ -1978,8 +1990,6 @@ filegroup( "src/regexp/experimental/experimental-interpreter.h", "src/regexp/experimental/experimental.cc", "src/regexp/experimental/experimental.h", - "src/regexp/property-sequences.cc", - "src/regexp/property-sequences.h", "src/regexp/regexp-ast.cc", "src/regexp/regexp-ast.h", "src/regexp/regexp-bytecode-generator-inl.h", @@ -2061,6 +2071,8 @@ filegroup( "src/sandbox/sandbox.h", "src/sandbox/sandboxed-pointer-inl.h", "src/sandbox/sandboxed-pointer.h", + "src/sandbox/bounded-size-inl.h", + "src/sandbox/bounded-size.h", "src/base/sanitizer/asan.h", "src/base/sanitizer/lsan-page-allocator.cc", "src/base/sanitizer/lsan-page-allocator.h", @@ -2527,8 +2539,8 @@ filegroup( "src/wasm/names-provider.cc", "src/wasm/names-provider.h", "src/wasm/object-access.h", - "src/wasm/signature-map.cc", - "src/wasm/signature-map.h", + "src/wasm/pgo.cc", + "src/wasm/pgo.h", "src/wasm/simd-shuffle.cc", "src/wasm/simd-shuffle.h", "src/wasm/stacks.cc", @@ -2606,6 +2618,9 @@ filegroup( "src/objects/js-display-names.cc", "src/objects/js-display-names.h", "src/objects/js-display-names-inl.h", + "src/objects/js-duration-format.cc", + "src/objects/js-duration-format.h", + "src/objects/js-duration-format-inl.h", "src/objects/js-list-format.cc", "src/objects/js-list-format.h", "src/objects/js-list-format-inl.h", @@ -2872,13 +2887,20 @@ filegroup( "src/compiler/turboshaft/graph.h", "src/compiler/turboshaft/graph-visualizer.cc", "src/compiler/turboshaft/graph-visualizer.h", + "src/compiler/turboshaft/machine-optimization-assembler.h", "src/compiler/turboshaft/operations.cc", "src/compiler/turboshaft/operations.h", + "src/compiler/turboshaft/operation-matching.h", "src/compiler/turboshaft/optimization-phase.cc", "src/compiler/turboshaft/optimization-phase.h", "src/compiler/turboshaft/recreate-schedule.cc", "src/compiler/turboshaft/recreate-schedule.h", + "src/compiler/turboshaft/representations.cc", + "src/compiler/turboshaft/representations.h", "src/compiler/turboshaft/sidetable.h", + "src/compiler/turboshaft/simplify-tf-loops.cc", + "src/compiler/turboshaft/simplify-tf-loops.h", + "src/compiler/turboshaft/utils.cc", "src/compiler/turboshaft/utils.h", "src/compiler/turboshaft/value-numbering-assembler.h", "src/compiler/type-cache.cc", @@ -2891,6 +2913,7 @@ filegroup( "src/compiler/typer.h", "src/compiler/types.cc", "src/compiler/types.h", + "src/compiler/use-info.h", "src/compiler/value-numbering-reducer.cc", "src/compiler/value-numbering-reducer.h", "src/compiler/verifier.cc", @@ -3101,6 +3124,7 @@ filegroup( "src/heap/cppgc/stats-collector.h", "src/heap/cppgc/sweeper.cc", "src/heap/cppgc/sweeper.h", + "src/heap/cppgc/heap-config.h", "src/heap/cppgc/task-handle.h", "src/heap/cppgc/trace-event.h", "src/heap/cppgc/trace-trait.cc", @@ -3119,6 +3143,7 @@ filegroup( srcs = [ "src/heap/base/active-system-pages.cc", "src/heap/base/active-system-pages.h", + "src/heap/base/basic-slot-set.h", "src/heap/base/stack.cc", "src/heap/base/stack.h", "src/heap/base/worklist.cc", diff --git a/deps/v8/BUILD.gn b/deps/v8/BUILD.gn index 3f78b1773f3907..61187af3053a02 100644 --- a/deps/v8/BUILD.gn +++ b/deps/v8/BUILD.gn @@ -285,8 +285,11 @@ declare_args() { target_os == "fuchsia" # Enable control-flow integrity features, such as pointer authentication for - # ARM64. - v8_control_flow_integrity = false + # ARM64. Enable it by default for simulator builds and when native code + # supports it as well. + v8_control_flow_integrity = + v8_current_cpu == "arm64" && + (target_is_simulator || arm_control_flow_integrity != "none") # Enable heap reservation of size 4GB. Only possible for 64bit archs. cppgc_enable_caged_heap = @@ -465,6 +468,8 @@ if (v8_builtins_profiling_log_file == "default") { v8_builtins_profiling_log_file = "tools/builtins-pgo/x64.profile" } else if (v8_current_cpu == "arm64") { v8_builtins_profiling_log_file = "tools/builtins-pgo/arm64.profile" + } else if (v8_current_cpu == "arm") { + v8_builtins_profiling_log_file = "tools/builtins-pgo/arm.profile" } } } @@ -496,14 +501,6 @@ assert(!v8_enable_trace_ignition || v8_enable_trace_unoptimized, assert(!v8_enable_trace_baseline_exec || v8_enable_trace_unoptimized, "Baseline tracing requires unoptimized tracing to be enabled.") -# Check if it is a Chromium build and activate PAC/BTI if needed. -# TODO(cavalcantii): have a single point of integration with PAC/BTI flags. -if (build_with_chromium && v8_current_cpu == "arm64" && - (arm_control_flow_integrity == "standard" || - arm_control_flow_integrity == "pac")) { - v8_control_flow_integrity = true -} - if (v8_enable_short_builtin_calls && (!v8_enable_pointer_compression && v8_current_cpu != "x64")) { # Disable short calls when pointer compression is not enabled, except x64, @@ -521,9 +518,8 @@ if (v8_enable_sandbox == "") { # once that is enabled everywhere by default. # TODO(chromium:1325784) the sandbox is not currently supported in Chromium # on Fuchsia. - v8_enable_sandbox = - build_with_chromium && v8_enable_pointer_compression_shared_cage && - v8_enable_external_code_space && target_os != "fuchsia" + v8_enable_sandbox = v8_enable_pointer_compression_shared_cage && + v8_enable_external_code_space && target_os != "fuchsia" } # Enable all available sandbox features if sandbox future is enabled. @@ -1102,18 +1098,9 @@ config("toolchain") { } if (v8_current_cpu == "arm64") { defines += [ "V8_TARGET_ARCH_ARM64" ] - if (current_cpu == "arm64") { - # This will enable PAC+BTI in code generation and static code. - if (v8_control_flow_integrity && - (!build_with_chromium || arm_control_flow_integrity == "standard")) { - cflags += [ "-mbranch-protection=standard" ] - asmflags = [ "-mmark-bti-property" ] - } else if (build_with_chromium && arm_control_flow_integrity == "pac") { - # This should enable PAC only in C++ code (and no CFI in runtime - # generated code). For details, see crbug.com/919548. - cflags += [ "-mbranch-protection=pac-ret" ] - asmflags = [ "-mbranch-protection=pac-ret" ] - } + if (current_cpu == "arm64" && v8_control_flow_integrity && is_clang) { + # Mark assembly code as BTI-compatible. + asmflags = [ "-mmark-bti-property" ] } } @@ -1619,6 +1606,9 @@ action("postmortem-metadata") { "src/objects/js-objects-inl.h", "src/objects/js-promise.h", "src/objects/js-promise-inl.h", + "src/objects/js-raw-json.cc", + "src/objects/js-raw-json.h", + "src/objects/js-raw-json-inl.h", "src/objects/js-regexp.cc", "src/objects/js-regexp.h", "src/objects/js-regexp-inl.h", @@ -1790,6 +1780,7 @@ torque_files = [ "src/builtins/typed-array-sort.tq", "src/builtins/typed-array-subarray.tq", "src/builtins/typed-array-to-reversed.tq", + "src/builtins/typed-array-to-sorted.tq", "src/builtins/typed-array-values.tq", "src/builtins/typed-array-with.tq", "src/builtins/typed-array.tq", @@ -1824,6 +1815,7 @@ torque_files = [ "src/objects/js-objects.tq", "src/objects/js-promise.tq", "src/objects/js-proxy.tq", + "src/objects/js-raw-json.tq", "src/objects/js-regexp-string-iterator.tq", "src/objects/js-regexp.tq", "src/objects/js-shadow-realm.tq", @@ -1870,6 +1862,7 @@ if (v8_enable_i18n_support) { "src/objects/js-collator.tq", "src/objects/js-date-time-format.tq", "src/objects/js-display-names.tq", + "src/objects/js-duration-format.tq", "src/objects/js-list-format.tq", "src/objects/js-locale.tq", "src/objects/js-number-format.tq", @@ -2930,10 +2923,14 @@ v8_header_set("v8_internal_headers") { "src/compiler/turboshaft/graph-builder.h", "src/compiler/turboshaft/graph-visualizer.h", "src/compiler/turboshaft/graph.h", + "src/compiler/turboshaft/machine-optimization-assembler.h", + "src/compiler/turboshaft/operation-matching.h", "src/compiler/turboshaft/operations.h", "src/compiler/turboshaft/optimization-phase.h", "src/compiler/turboshaft/recreate-schedule.h", + "src/compiler/turboshaft/representations.h", "src/compiler/turboshaft/sidetable.h", + "src/compiler/turboshaft/simplify-tf-loops.h", "src/compiler/turboshaft/utils.h", "src/compiler/turboshaft/value-numbering-assembler.h", "src/compiler/type-cache.h", @@ -2941,6 +2938,7 @@ v8_header_set("v8_internal_headers") { "src/compiler/typed-optimization.h", "src/compiler/typer.h", "src/compiler/types.h", + "src/compiler/use-info.h", "src/compiler/value-numbering-reducer.h", "src/compiler/verifier.h", "src/compiler/write-barrier-kind.h", @@ -2956,7 +2954,6 @@ v8_header_set("v8_internal_headers") { "src/debug/debug-scope-iterator.h", "src/debug/debug-scopes.h", "src/debug/debug-stack-trace-iterator.h", - "src/debug/debug-type-profile.h", "src/debug/debug.h", "src/debug/interface-types.h", "src/debug/liveedit-diff.h", @@ -3053,6 +3050,8 @@ v8_header_set("v8_internal_headers") { "src/heap/embedder-tracing.h", "src/heap/evacuation-allocator-inl.h", "src/heap/evacuation-allocator.h", + "src/heap/evacuation-verifier-inl.h", + "src/heap/evacuation-verifier.h", "src/heap/factory-base-inl.h", "src/heap/factory-base.h", "src/heap/factory-inl.h", @@ -3090,6 +3089,8 @@ v8_header_set("v8_internal_headers") { "src/heap/mark-compact.h", "src/heap/marking-barrier-inl.h", "src/heap/marking-barrier.h", + "src/heap/marking-state-inl.h", + "src/heap/marking-state.h", "src/heap/marking-visitor-inl.h", "src/heap/marking-visitor.h", "src/heap/marking-worklist-inl.h", @@ -3111,6 +3112,8 @@ v8_header_set("v8_internal_headers") { "src/heap/paged-spaces.h", "src/heap/parallel-work-item.h", "src/heap/parked-scope.h", + "src/heap/pretenuring-handler-inl.h", + "src/heap/pretenuring-handler.h", "src/heap/progress-bar.h", "src/heap/read-only-heap-inl.h", "src/heap/read-only-heap.h", @@ -3269,6 +3272,8 @@ v8_header_set("v8_internal_headers") { "src/objects/js-promise.h", "src/objects/js-proxy-inl.h", "src/objects/js-proxy.h", + "src/objects/js-raw-json-inl.h", + "src/objects/js-raw-json.h", "src/objects/js-regexp-inl.h", "src/objects/js-regexp-string-iterator-inl.h", "src/objects/js-regexp-string-iterator.h", @@ -3431,7 +3436,6 @@ v8_header_set("v8_internal_headers") { "src/regexp/experimental/experimental-compiler.h", "src/regexp/experimental/experimental-interpreter.h", "src/regexp/experimental/experimental.h", - "src/regexp/property-sequences.h", "src/regexp/regexp-ast.h", "src/regexp/regexp-bytecode-generator-inl.h", "src/regexp/regexp-bytecode-generator.h", @@ -3455,6 +3459,8 @@ v8_header_set("v8_internal_headers") { "src/roots/roots.h", "src/runtime/runtime-utils.h", "src/runtime/runtime.h", + "src/sandbox/bounded-size-inl.h", + "src/sandbox/bounded-size.h", "src/sandbox/external-pointer-inl.h", "src/sandbox/external-pointer-table-inl.h", "src/sandbox/external-pointer-table.h", @@ -3625,7 +3631,7 @@ v8_header_set("v8_internal_headers") { "src/wasm/module-instantiate.h", "src/wasm/names-provider.h", "src/wasm/object-access.h", - "src/wasm/signature-map.h", + "src/wasm/pgo.h", "src/wasm/simd-shuffle.h", "src/wasm/stacks.h", "src/wasm/streaming-decoder.h", @@ -3682,6 +3688,8 @@ v8_header_set("v8_internal_headers") { "src/objects/js-date-time-format.h", "src/objects/js-display-names-inl.h", "src/objects/js-display-names.h", + "src/objects/js-duration-format-inl.h", + "src/objects/js-duration-format.h", "src/objects/js-list-format-inl.h", "src/objects/js-list-format.h", "src/objects/js-locale-inl.h", @@ -4229,6 +4237,9 @@ v8_source_set("v8_turboshaft") { "src/compiler/turboshaft/operations.cc", "src/compiler/turboshaft/optimization-phase.cc", "src/compiler/turboshaft/recreate-schedule.cc", + "src/compiler/turboshaft/representations.cc", + "src/compiler/turboshaft/simplify-tf-loops.cc", + "src/compiler/turboshaft/utils.cc", ] public_deps = [ @@ -4369,7 +4380,6 @@ v8_source_set("v8_base_without_compiler") { "src/debug/debug-scope-iterator.cc", "src/debug/debug-scopes.cc", "src/debug/debug-stack-trace-iterator.cc", - "src/debug/debug-type-profile.cc", "src/debug/debug.cc", "src/debug/liveedit-diff.cc", "src/debug/liveedit.cc", @@ -4436,6 +4446,7 @@ v8_source_set("v8_base_without_compiler") { "src/heap/cppgc-js/unified-heap-marking-verifier.cc", "src/heap/cppgc-js/unified-heap-marking-visitor.cc", "src/heap/embedder-tracing.cc", + "src/heap/evacuation-verifier.cc", "src/heap/factory-base.cc", "src/heap/factory.cc", "src/heap/finalization-registry-cleanup-task.cc", @@ -4469,6 +4480,7 @@ v8_source_set("v8_base_without_compiler") { "src/heap/object-stats.cc", "src/heap/objects-visiting.cc", "src/heap/paged-spaces.cc", + "src/heap/pretenuring-handler.cc", "src/heap/read-only-heap.cc", "src/heap/read-only-spaces.cc", "src/heap/safepoint.cc", @@ -4541,12 +4553,14 @@ v8_source_set("v8_base_without_compiler") { "src/objects/js-collator.cc", "src/objects/js-date-time-format.cc", "src/objects/js-display-names.cc", + "src/objects/js-duration-format.cc", "src/objects/js-function.cc", "src/objects/js-list-format.cc", "src/objects/js-locale.cc", "src/objects/js-number-format.cc", "src/objects/js-objects.cc", "src/objects/js-plural-rules.cc", + "src/objects/js-raw-json.cc", "src/objects/js-regexp.cc", "src/objects/js-relative-time-format.cc", "src/objects/js-segment-iterator.cc", @@ -4615,7 +4629,6 @@ v8_source_set("v8_base_without_compiler") { "src/regexp/experimental/experimental-compiler.cc", "src/regexp/experimental/experimental-interpreter.cc", "src/regexp/experimental/experimental.cc", - "src/regexp/property-sequences.cc", "src/regexp/regexp-ast.cc", "src/regexp/regexp-bytecode-generator.cc", "src/regexp/regexp-bytecode-peephole.cc", @@ -4767,7 +4780,7 @@ v8_source_set("v8_base_without_compiler") { "src/wasm/module-decoder.cc", "src/wasm/module-instantiate.cc", "src/wasm/names-provider.cc", - "src/wasm/signature-map.cc", + "src/wasm/pgo.cc", "src/wasm/simd-shuffle.cc", "src/wasm/stacks.cc", "src/wasm/streaming-decoder.cc", @@ -5130,6 +5143,7 @@ v8_source_set("v8_base_without_compiler") { "src/objects/js-collator.cc", "src/objects/js-date-time-format.cc", "src/objects/js-display-names.cc", + "src/objects/js-duration-format.cc", "src/objects/js-list-format.cc", "src/objects/js-locale.cc", "src/objects/js-number-format.cc", @@ -5712,6 +5726,7 @@ v8_source_set("v8_bigint") { v8_header_set("v8_heap_base_headers") { sources = [ "src/heap/base/active-system-pages.h", + "src/heap/base/basic-slot-set.h", "src/heap/base/stack.h", "src/heap/base/worklist.h", ] @@ -5856,6 +5871,7 @@ v8_source_set("cppgc_base") { "src/heap/cppgc/globals.h", "src/heap/cppgc/heap-base.cc", "src/heap/cppgc/heap-base.h", + "src/heap/cppgc/heap-config.h", "src/heap/cppgc/heap-consistency.cc", "src/heap/cppgc/heap-growing.cc", "src/heap/cppgc/heap-growing.h", @@ -6253,7 +6269,7 @@ group("gn_all") { } group("v8_python_base") { - data = [ ".vpython" ] + data = [ ".vpython3" ] } group("v8_clusterfuzz") { diff --git a/deps/v8/COMMON_OWNERS b/deps/v8/COMMON_OWNERS index 39f241b3e9a2f1..b7dc8f2147de77 100644 --- a/deps/v8/COMMON_OWNERS +++ b/deps/v8/COMMON_OWNERS @@ -18,6 +18,7 @@ machenbach@chromium.org manoskouk@chromium.org mathias@chromium.org marja@chromium.org +mliedtke@chromium.org mlippautz@chromium.org mslekova@chromium.org nicohartmann@chromium.org diff --git a/deps/v8/DEPS b/deps/v8/DEPS index 457fcc13f7477e..6304c386414450 100644 --- a/deps/v8/DEPS +++ b/deps/v8/DEPS @@ -45,15 +45,19 @@ vars = { 'reclient_version': 're_client_version:0.69.0.458df98-gomaip', # GN CIPD package version. - 'gn_version': 'git_revision:b4851eb2062f76a880c07f7fa0d12913beb6d79e', + 'gn_version': 'git_revision:cc28efe62ef0c2fb32455f414a29c4a55bb7fbc4', + + # ninja CIPD package version + # https://chrome-infra-packages.appspot.com/p/infra/3pp/tools/ninja + 'ninja_version': 'version:2@1.8.2.chromium.3', # luci-go CIPD package version. - 'luci_go': 'git_revision:c93fd3c5ebdc3999eea86a7623dbd1ed4b40bc78', + 'luci_go': 'git_revision:20c50aa39686d91330c2daceccaa4ef1a0a72ee4', # Three lines of non-changing comments so that # the commit queue can handle CLs rolling Fuchsia sdk # and whatever else without interference from each other. - 'fuchsia_version': 'version:9.20220913.3.1', + 'fuchsia_version': 'version:9.20220919.2.1', # Three lines of non-changing comments so that # the commit queue can handle CLs rolling android_sdk_build-tools_version @@ -93,9 +97,9 @@ deps = { 'base/trace_event/common': Var('chromium_url') + '/chromium/src/base/trace_event/common.git' + '@' + '521ac34ebd795939c7e16b37d9d3ddb40e8ed556', 'build': - Var('chromium_url') + '/chromium/src/build.git' + '@' + '4157fb6cb44135013300168c9f4c5b95d04acf70', + Var('chromium_url') + '/chromium/src/build.git' + '@' + '7e7c21a9ac34c4fc2b255aa44d639efec9c33b90', 'buildtools': - Var('chromium_url') + '/chromium/src/buildtools.git' + '@' + 'e713c13e2fa3b7aa9131276f27990011e1aa6a73', + Var('chromium_url') + '/chromium/src/buildtools.git' + '@' + '9174abb6ac087b46f22248dc713b6c0328b8f774', 'buildtools/clang_format/script': Var('chromium_url') + '/external/github.com/llvm/llvm-project/clang/tools/clang-format.git' + '@' + '8b525d2747f2584fc35d8c7e612e66f377858df7', 'buildtools/linux64': { @@ -119,11 +123,11 @@ deps = { 'condition': 'host_os == "mac"', }, 'buildtools/third_party/libc++/trunk': - Var('chromium_url') + '/external/github.com/llvm/llvm-project/libcxx.git' + '@' + 'c1e647c7c30238f7c512457eec55798e3458fd8a', + Var('chromium_url') + '/external/github.com/llvm/llvm-project/libcxx.git' + '@' + '2e919977e0030ce61bd19c40cefe31b995f1e2d4', 'buildtools/third_party/libc++abi/trunk': - Var('chromium_url') + '/external/github.com/llvm/llvm-project/libcxxabi.git' + '@' + '5c3e02e92ae8bbc1bf1001bd9ef0d76e044ddb86', + Var('chromium_url') + '/external/github.com/llvm/llvm-project/libcxxabi.git' + '@' + 'db2a783a7d1ef0f0ef31da4b6e3de0c31fcfd93f', 'buildtools/third_party/libunwind/trunk': - Var('chromium_url') + '/external/github.com/llvm/llvm-project/libunwind.git' + '@' + '60a480ee1819266cf8054548454f99838583cd76', + Var('chromium_url') + '/external/github.com/llvm/llvm-project/libunwind.git' + '@' + '08ebcbe7b672a04e341cb3a88d8bf4276f96ac6e', 'buildtools/win': { 'packages': [ { @@ -149,7 +153,7 @@ deps = { 'test/mozilla/data': Var('chromium_url') + '/v8/deps/third_party/mozilla-tests.git' + '@' + 'f6c578a10ea707b1a8ab0b88943fe5115ce2b9be', 'test/test262/data': - Var('chromium_url') + '/external/github.com/tc39/test262.git' + '@' + '746197355c1705b7d4463fc75c29433c0ce2fd0d', + Var('chromium_url') + '/external/github.com/tc39/test262.git' + '@' + '58b7a2358286b918efd38eac4b2facbc8ada1206', 'third_party/android_ndk': { 'url': Var('chromium_url') + '/android_ndk.git' + '@' + '8388a2be5421311dc75c5f937aae13d821a27f3d', 'condition': 'checkout_android', @@ -197,7 +201,7 @@ deps = { 'dep_type': 'cipd', }, 'third_party/catapult': { - 'url': Var('chromium_url') + '/catapult.git' + '@' + '37391a1619e953e23d3441dbc61e658e881fede4', + 'url': Var('chromium_url') + '/catapult.git' + '@' + 'ff03621a71c01a6f2b0f3bf2677cf815291a9e85', 'condition': 'checkout_android', }, 'third_party/colorama/src': { @@ -205,7 +209,7 @@ deps = { 'condition': 'checkout_android', }, 'third_party/depot_tools': - Var('chromium_url') + '/chromium/tools/depot_tools.git' + '@' + '9ebcfa6be17c2d1e7bd72135ceab5e767ed89b7d', + Var('chromium_url') + '/chromium/tools/depot_tools.git' + '@' + 'a724859f7a9b3531c0373d86886a42314e772532', 'third_party/fuchsia-sdk/sdk': { 'packages': [ { @@ -239,6 +243,16 @@ deps = { Var('chromium_url') + '/infra/luci/luci-py/client/libs/logdog' + '@' + '0b2078a90f7a638d576b3a7c407d136f2fb62399', 'third_party/markupsafe': Var('chromium_url') + '/chromium/src/third_party/markupsafe.git' + '@' + '1b882ef6372b58bfd55a3285f37ed801be9137cd', + 'third_party/ninja': { + 'packages': [ + { + 'package': 'infra/3pp/tools/ninja/${{platform}}', + 'version': Var('ninja_version'), + } + ], + 'dep_type': 'cipd', + 'condition': 'host_cpu != "s390" and host_cpu != "ppc"' + }, 'third_party/perfetto': Var('android_url') + '/platform/external/perfetto.git' + '@' + '0eba417b2c72264fa825dc21067b9adc9b8adf70', 'third_party/protobuf': @@ -248,9 +262,9 @@ deps = { 'condition': 'checkout_android', }, 'third_party/zlib': - Var('chromium_url') + '/chromium/src/third_party/zlib.git'+ '@' + 'f48cb14d487038d20c85680e29351e095a0fea8b', + Var('chromium_url') + '/chromium/src/third_party/zlib.git'+ '@' + 'd689fca54d7b43154f7cf77f785d19f2628fa133', 'tools/clang': - Var('chromium_url') + '/chromium/src/tools/clang.git' + '@' + '02a202a7b1fa863352c0c9fb088fd3c0cf48c978', + Var('chromium_url') + '/chromium/src/tools/clang.git' + '@' + 'a5e0d72349d028a4023927d6d166a8478355fac3', 'tools/luci-go': { 'packages': [ { @@ -574,16 +588,6 @@ hooks = [ 'action': ['python3', 'tools/clang/scripts/update.py', '--package=objdump'], }, - # Download and initialize "vpython" VirtualEnv environment packages. - { - 'name': 'vpython_common', - 'pattern': '.', - 'condition': 'checkout_android', - 'action': [ 'vpython', - '-vpython-spec', '.vpython', - '-vpython-tool', 'install', - ], - }, { 'name': 'vpython3_common', 'pattern': '.', diff --git a/deps/v8/ENG_REVIEW_OWNERS b/deps/v8/ENG_REVIEW_OWNERS index 78d53b821d67e5..4f80f9d15a74c0 100644 --- a/deps/v8/ENG_REVIEW_OWNERS +++ b/deps/v8/ENG_REVIEW_OWNERS @@ -5,5 +5,6 @@ adamk@chromium.org danno@chromium.org hpayer@chromium.org +mlippautz@chromium.org verwaest@chromium.org vahl@chromium.org diff --git a/deps/v8/bazel/defs.bzl b/deps/v8/bazel/defs.bzl index e957c0fad3bc4b..d8db3fe8ba9a62 100644 --- a/deps/v8/bazel/defs.bzl +++ b/deps/v8/bazel/defs.bzl @@ -151,6 +151,14 @@ def _default_args(): "-fno-integrated-as", ], "//conditions:default": [], + }) + select({ + "@v8//bazel/config:is_debug":[ + "-fvisibility=default", + ], + "//conditions:default": [ + "-fvisibility=hidden", + "-fvisibility-inlines-hidden", + ], }), includes = ["include"], linkopts = select({ @@ -407,15 +415,19 @@ v8_target_cpu_transition = transition( ) def _mksnapshot(ctx): + prefix = ctx.attr.prefix + suffix = ctx.attr.suffix outs = [ - ctx.actions.declare_file(ctx.attr.prefix + "/snapshot.cc"), - ctx.actions.declare_file(ctx.attr.prefix + "/embedded.S"), + ctx.actions.declare_file(prefix + "/snapshot" + suffix + ".cc"), + ctx.actions.declare_file(prefix + "/embedded" + suffix + ".S"), ] ctx.actions.run( outputs = outs, inputs = [], arguments = [ "--embedded_variant=Default", + "--target_os", + ctx.attr.target_os, "--startup_src", outs[0].path, "--embedded_src", @@ -436,26 +448,38 @@ _v8_mksnapshot = rule( executable = True, cfg = "exec", ), + "target_os": attr.string(mandatory = True), "_allowlist_function_transition": attr.label( default = "@bazel_tools//tools/allowlists/function_transition_allowlist", ), "prefix": attr.string(mandatory = True), + "suffix": attr.string(mandatory = True), }, cfg = v8_target_cpu_transition, ) -def v8_mksnapshot(name, args): +def v8_mksnapshot(name, args, suffix = ""): _v8_mksnapshot( name = "noicu/" + name, args = args, prefix = "noicu", - tool = ":noicu/mksnapshot", + tool = ":noicu/mksnapshot" + suffix, + suffix = suffix, + target_os = select({ + "@v8//bazel/config:is_macos": "mac", + "//conditions:default": "", + }), ) _v8_mksnapshot( name = "icu/" + name, args = args, prefix = "icu", - tool = ":icu/mksnapshot", + tool = ":icu/mksnapshot" + suffix, + suffix = suffix, + target_os = select({ + "@v8//bazel/config:is_macos": "mac", + "//conditions:default": "", + }), ) def _quote(val): diff --git a/deps/v8/include/cppgc/heap-handle.h b/deps/v8/include/cppgc/heap-handle.h index 5a0f9cd2edcb9f..8d825133b065d6 100644 --- a/deps/v8/include/cppgc/heap-handle.h +++ b/deps/v8/include/cppgc/heap-handle.h @@ -12,6 +12,7 @@ namespace cppgc { namespace internal { class HeapBase; class WriteBarrierTypeForCagedHeapPolicy; +class WriteBarrierTypeForNonCagedHeapPolicy; } // namespace internal /** @@ -34,6 +35,7 @@ class HeapHandle { friend class internal::HeapBase; friend class internal::WriteBarrierTypeForCagedHeapPolicy; + friend class internal::WriteBarrierTypeForNonCagedHeapPolicy; }; } // namespace cppgc diff --git a/deps/v8/include/cppgc/internal/member-storage.h b/deps/v8/include/cppgc/internal/member-storage.h index 98389b8cd3d531..0eb6382070c4da 100644 --- a/deps/v8/include/cppgc/internal/member-storage.h +++ b/deps/v8/include/cppgc/internal/member-storage.h @@ -61,7 +61,7 @@ class CageBaseGlobal final { #undef CPPGC_REQUIRE_CONSTANT_INIT #undef CPPGC_CONST -class CompressedPointer final { +class V8_TRIVIAL_ABI CompressedPointer final { public: using IntegralType = uint32_t; @@ -170,7 +170,7 @@ class CompressedPointer final { #endif // defined(CPPGC_POINTER_COMPRESSION) -class RawPointer final { +class V8_TRIVIAL_ABI RawPointer final { public: using IntegralType = uintptr_t; diff --git a/deps/v8/include/cppgc/internal/write-barrier.h b/deps/v8/include/cppgc/internal/write-barrier.h index 2d8e14be086d64..37bc5c973ef995 100644 --- a/deps/v8/include/cppgc/internal/write-barrier.h +++ b/deps/v8/include/cppgc/internal/write-barrier.h @@ -12,6 +12,7 @@ #include "cppgc/heap-state.h" #include "cppgc/internal/api-constants.h" #include "cppgc/internal/atomic-entry-flag.h" +#include "cppgc/internal/base-page-handle.h" #include "cppgc/internal/member-storage.h" #include "cppgc/platform.h" #include "cppgc/sentinel-pointer.h" @@ -283,7 +284,7 @@ struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch< return SetAndReturnType<WriteBarrier::Type::kGenerational>(params); } #else // !defined(CPPGC_YOUNG_GENERATION) - if (V8_UNLIKELY(!subtle::HeapState::IsMarking(handle))) { + if (V8_UNLIKELY(!handle.is_incremental_marking_in_progress())) { return SetAndReturnType<WriteBarrier::Type::kNone>(params); } #endif // !defined(CPPGC_YOUNG_GENERATION) @@ -326,11 +327,6 @@ class V8_EXPORT WriteBarrierTypeForNonCagedHeapPolicy final { template <WriteBarrier::ValueMode value_mode> struct ValueModeDispatch; - // TODO(chromium:1056170): Create fast path on API. - static bool IsMarking(const void*, HeapHandle**); - // TODO(chromium:1056170): Create fast path on API. - static bool IsMarking(HeapHandle&); - WriteBarrierTypeForNonCagedHeapPolicy() = delete; }; @@ -348,7 +344,13 @@ struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch< if (V8_LIKELY(!WriteBarrier::IsEnabled())) { return SetAndReturnType<WriteBarrier::Type::kNone>(params); } - if (IsMarking(object, ¶ms.heap)) { + // We know that |object| is within the normal page or in the beginning of a + // large page, so extract the page header by bitmasking. + BasePageHandle* page = + BasePageHandle::FromPayload(const_cast<void*>(object)); + + HeapHandle& heap_handle = page->heap_handle(); + if (V8_LIKELY(heap_handle.is_incremental_marking_in_progress())) { return SetAndReturnType<WriteBarrier::Type::kMarking>(params); } return SetAndReturnType<WriteBarrier::Type::kNone>(params); @@ -364,7 +366,7 @@ struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch< HeapHandleCallback callback) { if (V8_UNLIKELY(WriteBarrier::IsEnabled())) { HeapHandle& handle = callback(); - if (IsMarking(handle)) { + if (V8_LIKELY(handle.is_incremental_marking_in_progress())) { params.heap = &handle; return SetAndReturnType<WriteBarrier::Type::kMarking>(params); } diff --git a/deps/v8/include/cppgc/member.h b/deps/v8/include/cppgc/member.h index 71f9cab65280da..9bc383634f18ea 100644 --- a/deps/v8/include/cppgc/member.h +++ b/deps/v8/include/cppgc/member.h @@ -28,7 +28,7 @@ namespace internal { // MemberBase always refers to the object as const object and defers to // BasicMember on casting to the right type as needed. -class MemberBase { +class V8_TRIVIAL_ABI MemberBase { public: #if defined(CPPGC_POINTER_COMPRESSION) using RawStorage = CompressedPointer; @@ -68,13 +68,16 @@ class MemberBase { V8_INLINE void ClearFromGC() const { raw_.Clear(); } private: + friend class MemberDebugHelper; + mutable RawStorage raw_; }; // The basic class from which all Member classes are 'generated'. template <typename T, typename WeaknessTag, typename WriteBarrierPolicy, typename CheckingPolicy> -class BasicMember final : private MemberBase, private CheckingPolicy { +class V8_TRIVIAL_ABI BasicMember final : private MemberBase, + private CheckingPolicy { public: using PointeeType = T; diff --git a/deps/v8/include/js_protocol-1.3.json b/deps/v8/include/js_protocol-1.3.json index ea573d11a61b03..a998d4611d16d3 100644 --- a/deps/v8/include/js_protocol-1.3.json +++ b/deps/v8/include/js_protocol-1.3.json @@ -946,34 +946,6 @@ { "name": "url", "type": "string", "description": "JavaScript script name or url." }, { "name": "functions", "type": "array", "items": { "$ref": "FunctionCoverage" }, "description": "Functions contained in the script that has coverage data." } ] - }, - { "id": "TypeObject", - "type": "object", - "description": "Describes a type collected during runtime.", - "properties": [ - { "name": "name", "type": "string", "description": "Name of a type collected with type profiling." } - ], - "experimental": true - }, - { "id": "TypeProfileEntry", - "type": "object", - "description": "Source offset and types for a parameter or return value.", - "properties": [ - { "name": "offset", "type": "integer", "description": "Source offset of the parameter or end of function for return values." }, - { "name": "types", "type": "array", "items": {"$ref": "TypeObject"}, "description": "The types for this parameter or return value."} - ], - "experimental": true - }, - { - "id": "ScriptTypeProfile", - "type": "object", - "description": "Type profile data collected during runtime for a JavaScript script.", - "properties": [ - { "name": "scriptId", "$ref": "Runtime.ScriptId", "description": "JavaScript script id." }, - { "name": "url", "type": "string", "description": "JavaScript script name or url." }, - { "name": "entries", "type": "array", "items": { "$ref": "TypeProfileEntry" }, "description": "Type profile entries for parameters and return values of the functions in the script." } - ], - "experimental": true } ], "commands": [ @@ -1024,24 +996,6 @@ { "name": "result", "type": "array", "items": { "$ref": "ScriptCoverage" }, "description": "Coverage data for the current isolate." } ], "description": "Collect coverage data for the current isolate. The coverage data may be incomplete due to garbage collection." - }, - { - "name": "startTypeProfile", - "description": "Enable type profile.", - "experimental": true - }, - { - "name": "stopTypeProfile", - "description": "Disable type profile. Disabling releases type profile data collected so far.", - "experimental": true - }, - { - "name": "takeTypeProfile", - "returns": [ - { "name": "result", "type": "array", "items": { "$ref": "ScriptTypeProfile" }, "description": "Type profile for all scripts since startTypeProfile() was turned on." } - ], - "description": "Collect type profile.", - "experimental": true } ], "events": [ diff --git a/deps/v8/include/js_protocol.pdl b/deps/v8/include/js_protocol.pdl index 2d560435522769..b3b97fa11768e6 100644 --- a/deps/v8/include/js_protocol.pdl +++ b/deps/v8/include/js_protocol.pdl @@ -918,30 +918,6 @@ domain Profiler # Functions contained in the script that has coverage data. array of FunctionCoverage functions - # Describes a type collected during runtime. - experimental type TypeObject extends object - properties - # Name of a type collected with type profiling. - string name - - # Source offset and types for a parameter or return value. - experimental type TypeProfileEntry extends object - properties - # Source offset of the parameter or end of function for return values. - integer offset - # The types for this parameter or return value. - array of TypeObject types - - # Type profile data collected during runtime for a JavaScript script. - experimental type ScriptTypeProfile extends object - properties - # JavaScript script id. - Runtime.ScriptId scriptId - # JavaScript script name or url. - string url - # Type profile entries for parameters and return values of the functions in the script. - array of TypeProfileEntry entries - command disable command enable @@ -976,9 +952,6 @@ domain Profiler # Monotonically increasing time (in seconds) when the coverage update was taken in the backend. number timestamp - # Enable type profile. - experimental command startTypeProfile - command stop returns # Recorded profile. @@ -988,9 +961,6 @@ domain Profiler # executing optimized code. command stopPreciseCoverage - # Disable type profile. Disabling releases type profile data collected so far. - experimental command stopTypeProfile - # Collect coverage data for the current isolate, and resets execution counters. Precise code # coverage needs to have started. command takePreciseCoverage @@ -1000,12 +970,6 @@ domain Profiler # Monotonically increasing time (in seconds) when the coverage update was taken in the backend. number timestamp - # Collect type profile. - experimental command takeTypeProfile - returns - # Type profile for all scripts since startTypeProfile() was turned on. - array of ScriptTypeProfile result - event consoleProfileFinished parameters string id diff --git a/deps/v8/include/v8-context.h b/deps/v8/include/v8-context.h index be52c414b4e028..427f3a738607bb 100644 --- a/deps/v8/include/v8-context.h +++ b/deps/v8/include/v8-context.h @@ -290,6 +290,7 @@ class V8_EXPORT Context : public Data { Local<Function> after_hook, Local<Function> resolve_hook); + bool HasTemplateLiteralObject(Local<Value> object); /** * Stack-allocated class which sets the execution context for all * operations executed within a local scope. diff --git a/deps/v8/include/v8-internal.h b/deps/v8/include/v8-internal.h index c97942ed1b476a..a52d066c835e2d 100644 --- a/deps/v8/include/v8-internal.h +++ b/deps/v8/include/v8-internal.h @@ -182,7 +182,7 @@ constexpr size_t kSandboxSizeLog2 = 37; // 128 GB #else // Everywhere else use a 1TB sandbox. constexpr size_t kSandboxSizeLog2 = 40; // 1 TB -#endif // V8_OS_ANDROID +#endif // V8_TARGET_OS_ANDROID constexpr size_t kSandboxSize = 1ULL << kSandboxSizeLog2; // Required alignment of the sandbox. For simplicity, we require the @@ -223,6 +223,21 @@ static_assert(kSandboxMinimumReservationSize > kPtrComprCageReservationSize, "The minimum reservation size for a sandbox must be larger than " "the pointer compression cage contained within it."); +// The maximum buffer size allowed inside the sandbox. This is mostly dependent +// on the size of the guard regions around the sandbox: an attacker must not be +// able to construct a buffer that appears larger than the guard regions and +// thereby "reach out of" the sandbox. +constexpr size_t kMaxSafeBufferSizeForSandbox = 32ULL * GB - 1; +static_assert(kMaxSafeBufferSizeForSandbox <= kSandboxGuardRegionSize, + "The maximum allowed buffer size must not be larger than the " + "sandbox's guard regions"); + +constexpr size_t kBoundedSizeShift = 29; +static_assert(1ULL << (64 - kBoundedSizeShift) == + kMaxSafeBufferSizeForSandbox + 1, + "The maximum size of a BoundedSize must be synchronized with the " + "kMaxSafeBufferSizeForSandbox"); + #endif // V8_ENABLE_SANDBOX #ifdef V8_COMPRESS_POINTERS diff --git a/deps/v8/include/v8-isolate.h b/deps/v8/include/v8-isolate.h index 4f31d8c7a80835..2f8acc88682e38 100644 --- a/deps/v8/include/v8-isolate.h +++ b/deps/v8/include/v8-isolate.h @@ -536,6 +536,7 @@ class V8_EXPORT Isolate { kFunctionPrototypeCaller = 114, kTurboFanOsrCompileStarted = 115, kAsyncStackTaggingCreateTaskCall = 116, + kDurationFormat = 117, // If you add new values here, you'll also need to update Chromium's: // web_feature.mojom, use_counter_callback.cc, and enums.xml. V8 changes to diff --git a/deps/v8/include/v8-version.h b/deps/v8/include/v8-version.h index 4061987dc1161f..027a13a3b239bc 100644 --- a/deps/v8/include/v8-version.h +++ b/deps/v8/include/v8-version.h @@ -9,9 +9,9 @@ // NOTE these macros are used by some of the tool scripts and the build // system so their names cannot be changed without changing the scripts. #define V8_MAJOR_VERSION 10 -#define V8_MINOR_VERSION 7 -#define V8_BUILD_NUMBER 193 -#define V8_PATCH_LEVEL 22 +#define V8_MINOR_VERSION 8 +#define V8_BUILD_NUMBER 168 +#define V8_PATCH_LEVEL 20 // Use 1 for candidates and 0 otherwise. // (Boolean macro values are not supported by all preprocessors.) diff --git a/deps/v8/include/v8config.h b/deps/v8/include/v8config.h index 207afac8b0adeb..a959be130d5319 100644 --- a/deps/v8/include/v8config.h +++ b/deps/v8/include/v8config.h @@ -579,6 +579,37 @@ path. Add it with -I<path> to the command line #define V8_NO_UNIQUE_ADDRESS /* NOT SUPPORTED */ #endif +// Marks a type as being eligible for the "trivial" ABI despite having a +// non-trivial destructor or copy/move constructor. Such types can be relocated +// after construction by simply copying their memory, which makes them eligible +// to be passed in registers. The canonical example is std::unique_ptr. +// +// Use with caution; this has some subtle effects on constructor/destructor +// ordering and will be very incorrect if the type relies on its address +// remaining constant. When used as a function argument (by value), the value +// may be constructed in the caller's stack frame, passed in a register, and +// then used and destructed in the callee's stack frame. A similar thing can +// occur when values are returned. +// +// TRIVIAL_ABI is not needed for types which have a trivial destructor and +// copy/move constructors, since those are automatically trivial by the ABI +// spec. +// +// It is also not likely to be effective on types too large to be passed in one +// or two registers on typical target ABIs. +// +// See also: +// https://clang.llvm.org/docs/AttributeReference.html#trivial-abi +// https://libcxx.llvm.org/docs/DesignDocs/UniquePtrTrivialAbi.html +#if defined(__clang__) && defined(__has_attribute) +#if __has_attribute(trivial_abi) +#define V8_TRIVIAL_ABI [[clang::trivial_abi]] +#endif // __has_attribute(trivial_abi) +#endif // defined(__clang__) && defined(__has_attribute) +#if !defined(V8_TRIVIAL_ABI) +#define V8_TRIVIAL_ABI +#endif //!defined(V8_TRIVIAL_ABI) + // Helper macro to define no_sanitize attributes only with clang. #if defined(__clang__) && defined(__has_attribute) #if __has_attribute(no_sanitize) diff --git a/deps/v8/infra/mb/mb_config.pyl b/deps/v8/infra/mb/mb_config.pyl index 5488996a641ae4..b5d6231600488a 100644 --- a/deps/v8/infra/mb/mb_config.pyl +++ b/deps/v8/infra/mb/mb_config.pyl @@ -116,6 +116,7 @@ 'V8 Linux64 - disable runtime call stats - builder': 'release_x64_disable_runtime_call_stats', 'V8 Linux64 - debug - single generation - builder': 'debug_x64_single_generation', 'V8 Linux64 - no pointer compression - builder': 'release_x64_no_pointer_compression', + 'V8 Linux64 css - debug builder': 'debug_x64_conservative_stack_scanning', 'V8 Linux64 gcc - builder': 'release_x64_gcc', 'V8 Linux64 gcc - debug builder': 'debug_x64_gcc', 'V8 Linux64 gcc light - debug builder': 'debug_x64_gcc', @@ -207,105 +208,98 @@ 'tryserver.v8': { 'v8_android_arm_compile_rel': 'release_android_arm', 'v8_android_arm64_compile_dbg': 'debug_android_arm64', - 'v8_android_arm64_n5x_rel_ng': 'release_android_arm64', + 'v8_android_arm64_n5x_compile_rel': 'release_android_arm64', 'v8_fuchsia_compile_rel': 'release_x64_fuchsia_trybot', - 'v8_fuchsia_rel_ng': 'release_x64_fuchsia_trybot', 'v8_ios_simulator': 'release_x64_ios_simulator', - 'v8_linux_rel_ng': 'release_x86_gcmole_trybot', - 'v8_linux_optional_rel_ng': 'release_x86_trybot', - 'v8_linux_verify_csa_rel_ng': 'release_x86_verify_csa', - 'v8_linux_nodcheck_rel_ng': 'release_x86_minimal_symbols', - 'v8_linux_dbg_ng': 'debug_x86_trybot', + 'v8_linux_compile_rel': 'release_x86_gcmole_trybot', + 'v8_linux_optional_compile_rel': 'release_x86_trybot', + 'v8_linux_verify_csa_compile_rel': 'release_x86_verify_csa', + 'v8_linux_nodcheck_compile_rel': 'release_x86_minimal_symbols', + 'v8_linux_compile_dbg': 'debug_x86_trybot', 'v8_linux_noi18n_compile_dbg': 'debug_x86_no_i18n', - 'v8_linux_noi18n_rel_ng': 'release_x86_no_i18n_trybot', - 'v8_linux_gc_stress_dbg_ng': 'debug_x86_trybot', + 'v8_linux_noi18n_compile_rel': 'release_x86_no_i18n_trybot', + 'v8_linux_gc_stress_compile_dbg': 'debug_x86_trybot', 'v8_linux_shared_compile_rel': 'release_x86_shared_verify_heap', 'v8_linux_vtunejit': 'debug_x86_vtunejit', - 'v8_linux64_arm64_no_pointer_compression_rel_ng': + 'v8_linux64_arm64_no_pointer_compression_compile_rel': 'release_simulate_arm64_no_pointer_compression', - 'v8_linux64_cppgc_non_default_dbg_ng': 'debug_x64_non_default_cppgc', - 'v8_linux64_dbg_ng': 'debug_x64_trybot', - 'v8_linux64_no_sandbox_dbg_ng': 'debug_x64_no_sandbox', - 'v8_linux64_dict_tracking_dbg_ng': 'debug_x64_dict_tracking_trybot', - 'v8_linux64_disable_runtime_call_stats_rel_ng': 'release_x64_disable_runtime_call_stats', - 'v8_linux64_external_code_space_dbg_ng': 'debug_x64_external_code_space', - 'v8_linux64_gc_stress_custom_snapshot_dbg_ng': 'debug_x64_trybot_custom', - 'v8_linux64_gc_stress_dbg_ng': 'debug_x64_trybot', + 'v8_linux64_cppgc_non_default_compile_dbg': 'debug_x64_non_default_cppgc', + 'v8_linux64_compile_dbg': 'debug_x64_trybot', + 'v8_linux64_no_sandbox_compile_dbg': 'debug_x64_no_sandbox', + 'v8_linux64_dict_tracking_compile_dbg': 'debug_x64_dict_tracking_trybot', + 'v8_linux64_disable_runtime_call_stats_compile_rel': 'release_x64_disable_runtime_call_stats', + 'v8_linux64_external_code_space_compile_dbg': 'debug_x64_external_code_space', + 'v8_linux64_css_compile_dbg': 'debug_x64_conservative_stack_scanning', + 'v8_linux64_gc_stress_custom_snapshot_compile_dbg': 'debug_x64_trybot_custom', + 'v8_linux64_gc_stress_compile_dbg': 'debug_x64_trybot', 'v8_linux64_gcc_compile_dbg': 'debug_x64_gcc', - 'v8_linux64_gcc_compile_rel': 'release_x64_gcc', 'v8_linux64_gcc_light_compile_dbg': 'debug_x64_gcc', - 'v8_linux64_gcc_rel_ng': 'release_x64_gcc', + 'v8_linux64_gcc_compile_rel': 'release_x64_gcc', 'v8_linux64_gcov_coverage': 'release_x64_gcc_coverage', 'v8_linux64_header_includes_dbg': 'debug_x64_header_includes', - 'v8_linux64_heap_sandbox_dbg_ng': 'debug_x64_heap_sandbox', - 'v8_linux64_minor_mc_dbg_ng': 'debug_x64_trybot', - 'v8_linux_arm64_sim_heap_sandbox_dbg_ng': 'debug_x64_heap_sandbox_arm64_sim', - 'v8_linux64_fyi_rel_ng': 'release_x64_test_features_trybot', - 'v8_linux64_nodcheck_rel_ng': 'release_x64', - 'v8_linux64_perfetto_dbg_ng': 'debug_x64_perfetto', - 'v8_linux64_no_pointer_compression_rel_ng': 'release_x64_no_pointer_compression', - 'v8_linux64_rel_ng': 'release_x64_test_features_trybot', - 'v8_linux64_no_sandbox_rel_ng': 'release_x64_no_sandbox', - 'v8_linux64_predictable_rel_ng': 'release_x64_predictable', + 'v8_linux64_heap_sandbox_compile_dbg': 'debug_x64_heap_sandbox', + 'v8_linux64_minor_mc_compile_dbg': 'debug_x64_trybot', + 'v8_linux_arm64_sim_heap_sandbox_compile_dbg': 'debug_x64_heap_sandbox_arm64_sim', + 'v8_linux64_fyi_compile_rel': 'release_x64_test_features_trybot', + 'v8_linux64_nodcheck_compile_rel': 'release_x64', + 'v8_linux64_perfetto_compile_dbg': 'debug_x64_perfetto', + 'v8_linux64_no_pointer_compression_compile_rel': 'release_x64_no_pointer_compression', + 'v8_linux64_compile_rel': 'release_x64_test_features_trybot', + 'v8_linux64_no_sandbox_compile_rel': 'release_x64_no_sandbox', + 'v8_linux64_predictable_compile_rel': 'release_x64_predictable', 'v8_linux64_shared_compile_rel': 'release_x64_shared_verify_heap', - 'v8_linux64_single_generation_dbg_ng': 'debug_x64_single_generation', + 'v8_linux64_single_generation_compile_dbg': 'debug_x64_single_generation', 'v8_linux64_no_wasm_compile_rel': 'release_x64_webassembly_disabled', - 'v8_linux64_verify_csa_rel_ng': 'release_x64_verify_csa', - 'v8_linux64_asan_rel_ng': 'release_x64_asan_minimal_symbols', - 'v8_linux64_cfi_rel_ng': 'release_x64_cfi', - 'v8_linux64_fuzzilli_ng': 'release_x64_fuzzilli', - 'v8_linux64_loong64_rel_ng': 'release_simulate_loong64', - 'v8_linux64_msan_rel_ng': 'release_simulate_arm64_msan_minimal_symbols', - 'v8_linux_riscv32_rel_ng': 'release_simulate_riscv32', - 'v8_linux64_riscv64_rel_ng': 'release_simulate_riscv64', - 'v8_linux64_tsan_rel_ng': 'release_x64_tsan_minimal_symbols', - 'v8_linux64_tsan_no_cm_rel_ng': 'release_x64_tsan_no_cm', - 'v8_linux64_tsan_isolates_rel_ng': + 'v8_linux64_verify_csa_compile_rel': 'release_x64_verify_csa', + 'v8_linux64_asan_compile_rel': 'release_x64_asan_minimal_symbols', + 'v8_linux64_cfi_compile_rel': 'release_x64_cfi', + 'v8_linux64_fuzzilli_compile_rel': 'release_x64_fuzzilli', + 'v8_linux64_loong64_compile_rel': 'release_simulate_loong64', + 'v8_linux64_msan_compile_rel': 'release_simulate_arm64_msan_minimal_symbols', + 'v8_linux_riscv32_compile_rel': 'release_simulate_riscv32', + 'v8_linux64_riscv64_compile_rel': 'release_simulate_riscv64', + 'v8_linux64_tsan_compile_rel': 'release_x64_tsan_minimal_symbols', + 'v8_linux64_tsan_no_cm_compile_rel': 'release_x64_tsan_no_cm', + 'v8_linux64_tsan_isolates_compile_rel': 'release_x64_tsan_minimal_symbols', - 'v8_linux64_ubsan_rel_ng': 'release_x64_ubsan_minimal_symbols', - 'v8_odroid_arm_rel_ng': 'release_arm', + 'v8_linux64_ubsan_compile_rel': 'release_x64_ubsan_minimal_symbols', + 'v8_odroid_arm_compile_rel': 'release_arm', 'v8_linux_torque_compare': 'torque_compare', # TODO(machenbach): Remove after switching to x64 on infra side. - 'v8_win_dbg_ng': 'debug_x86_trybot', 'v8_win_compile_dbg': 'debug_x86_trybot', - 'v8_win_rel_ng': 'release_x86_trybot', - 'v8_win64_asan_rel_ng': 'release_x64_asan_no_lsan', + 'v8_win_compile_rel': 'release_x86_trybot', + 'v8_win64_asan_compile_rel': 'release_x64_asan_no_lsan', + 'v8_win64_msvc_light_compile_rel': 'release_x64_msvc', + 'v8_win64_compile_dbg': 'debug_x64_minimal_symbols', 'v8_win64_msvc_compile_rel': 'release_x64_msvc', - 'v8_win64_dbg_ng': 'debug_x64_minimal_symbols', - 'v8_win64_msvc_rel_ng': 'release_x64_msvc', - 'v8_win64_rel_ng': 'release_x64_trybot', - 'v8_mac_arm64_rel_ng': 'release_arm64', - 'v8_mac_arm64_dbg_ng': 'debug_arm64', - 'v8_mac_arm64_full_dbg_ng': 'full_debug_arm64', - 'v8_mac_arm64_no_pointer_compression_dbg_ng': 'debug_arm64_no_pointer_compression', - 'v8_mac_arm64_compile_dbg': 'debug_arm64', + 'v8_win64_compile_rel': 'release_x64_trybot', 'v8_mac_arm64_compile_rel': 'release_arm64', + 'v8_mac_arm64_compile_dbg': 'debug_arm64', + 'v8_mac_arm64_full_compile_dbg': 'full_debug_arm64', + 'v8_mac_arm64_no_pointer_compression_compile_dbg': 'debug_arm64_no_pointer_compression', + 'v8_mac_arm64_sim_compile_rel': 'release_simulate_arm64_trybot', 'v8_mac_arm64_sim_compile_dbg': 'debug_simulate_arm64', - 'v8_mac_arm64_sim_compile_rel': 'release_simulate_arm64', - 'v8_mac_arm64_sim_rel_ng': 'release_simulate_arm64_trybot', - 'v8_mac_arm64_sim_dbg_ng': 'debug_simulate_arm64', - 'v8_mac_arm64_sim_nodcheck_rel_ng': 'release_simulate_arm64', - 'v8_mac64_gc_stress_dbg_ng': 'debug_x64_trybot', - 'v8_mac64_rel_ng': 'release_x64_trybot', + 'v8_mac_arm64_sim_nodcheck_compile_rel': 'release_simulate_arm64', + 'v8_mac64_gc_stress_compile_dbg': 'debug_x64_trybot', + 'v8_mac64_compile_rel': 'release_x64_trybot', 'v8_mac64_dbg': 'debug_x64', - 'v8_mac64_dbg_ng': 'debug_x64', - 'v8_mac64_compile_full_dbg_ng': 'full_debug_x64', + 'v8_mac64_compile_dbg': 'debug_x64', + 'v8_mac64_compile_full_compile_dbg': 'full_debug_x64', 'v8_mac64_asan_compile_rel': 'release_x64_asan_no_lsan', - 'v8_mac64_asan_rel_ng': 'release_x64_asan_no_lsan', - 'v8_linux_arm_rel_ng': 'release_simulate_arm_trybot', + 'v8_linux_arm_compile_rel': 'release_simulate_arm_trybot', 'v8_linux_arm_lite_compile_dbg': 'debug_simulate_arm_lite', - 'v8_linux_arm_lite_rel_ng': 'release_simulate_arm_lite_trybot', - 'v8_linux_arm_dbg_ng': 'debug_simulate_arm', + 'v8_linux_arm_lite_compile_rel': 'release_simulate_arm_lite_trybot', + 'v8_linux_arm_compile_dbg': 'debug_simulate_arm', 'v8_linux_arm_armv8a_rel': 'release_simulate_arm_trybot', 'v8_linux_arm_armv8a_dbg': 'debug_simulate_arm', - 'v8_linux_arm64_rel_ng': 'release_simulate_arm64_trybot', - 'v8_linux_arm64_cfi_rel_ng' : 'release_simulate_arm64_cfi', - 'v8_linux_arm64_dbg_ng': 'debug_simulate_arm64', - 'v8_linux_arm64_gc_stress_dbg_ng': 'debug_simulate_arm64', + 'v8_linux_arm64_compile_rel': 'release_simulate_arm64_trybot', + 'v8_linux_arm64_cfi_compile_rel' : 'release_simulate_arm64_cfi', + 'v8_linux_arm64_compile_dbg': 'debug_simulate_arm64', + 'v8_linux_arm64_gc_stress_compile_dbg': 'debug_simulate_arm64', 'v8_linux_mips64el_compile_rel': 'release_simulate_mips64el', - 'v8_numfuzz_ng': 'release_x64', - 'v8_numfuzz_dbg_ng': 'debug_x64', - 'v8_numfuzz_tsan_ng': 'release_x64_tsan', + 'v8_numfuzz_compile_rel': 'release_x64', + 'v8_numfuzz_compile_dbg': 'debug_x64', + 'v8_numfuzz_tsan_compile_rel': 'release_x64_tsan', }, }, @@ -577,6 +571,8 @@ 'debug_x64_asan_no_lsan_static': [ 'debug', 'static', 'goma', 'v8_enable_slow_dchecks', 'v8_optimized_debug', 'x64', 'asan'], + 'debug_x64_conservative_stack_scanning': [ + 'debug_bot', 'x64', 'conservative_stack_scanning'], 'debug_x64_custom': [ 'debug_bot', 'x64', 'v8_snapshot_custom'], 'debug_x64_external_code_space': [ @@ -703,6 +699,11 @@ 'gn_args': 'is_clang=true', }, + 'conservative_stack_scanning': { + 'gn_args': 'v8_enable_conservative_stack_scanning=true ' + 'v8_enable_inner_pointer_resolution_mb=true', + }, + 'coverage': { 'gn_args': 'v8_code_coverage=true', }, diff --git a/deps/v8/infra/testing/builders.pyl b/deps/v8/infra/testing/builders.pyl index ca2fab5eac8e90..516905539cf72c 100644 --- a/deps/v8/infra/testing/builders.pyl +++ b/deps/v8/infra/testing/builders.pyl @@ -32,7 +32,7 @@ ### luci.v8.try ############################################################################## # Android - 'v8_android_arm64_n5x_rel_ng_triggered': { + 'v8_android_arm64_n5x_rel': { 'swarming_dimensions' : { 'device_os': 'MMB29Q', 'device_type': 'bullhead', @@ -46,7 +46,7 @@ }, ############################################################################## # Fuchsia - 'v8_fuchsia_rel_ng_triggered': { + 'v8_fuchsia_rel': { 'swarming_dimensions' : { 'os': 'Ubuntu-18.04', }, @@ -64,7 +64,7 @@ }, ############################################################################## # Linux32 - 'v8_linux_dbg_ng_triggered': { + 'v8_linux_dbg': { 'swarming_dimensions' : { 'cpu': 'x86-64-avx2', 'os': 'Ubuntu-18.04', @@ -109,7 +109,7 @@ }, ], }, - 'v8_linux_gc_stress_dbg_ng_triggered': { + 'v8_linux_gc_stress_dbg': { 'swarming_dimensions' : { 'os': 'Ubuntu-18.04', }, @@ -118,7 +118,7 @@ {'name': 'd8testing', 'test_args': ['--gc-stress'], 'shards': 5}, ], }, - 'v8_linux_nodcheck_rel_ng_triggered': { + 'v8_linux_nodcheck_rel': { 'swarming_dimensions' : { 'cpu': 'x86-64-avx2', 'os': 'Ubuntu-18.04', @@ -143,7 +143,7 @@ }, ], }, - 'v8_linux_noi18n_rel_ng_triggered': { + 'v8_linux_noi18n_rel': { 'swarming_dimensions' : { 'os': 'Ubuntu-18.04', }, @@ -153,7 +153,7 @@ {'name': 'v8testing', 'variant': 'default', 'shards': 2}, ], }, - 'v8_linux_rel_ng_triggered': { + 'v8_linux_rel': { 'swarming_dimensions' : { 'cpu': 'x86-64-avx2', 'os': 'Ubuntu-18.04', @@ -182,7 +182,7 @@ }, ], }, - 'v8_linux_optional_rel_ng_triggered': { + 'v8_linux_optional_rel': { 'swarming_dimensions' : { 'cpu': 'x86-64-avx2', 'os': 'Ubuntu-18.04', @@ -285,7 +285,7 @@ }, ], }, - 'v8_linux_verify_csa_rel_ng_triggered': { + 'v8_linux_verify_csa_rel': { 'swarming_dimensions' : { 'os': 'Ubuntu-18.04', }, @@ -295,7 +295,7 @@ }, ############################################################################## # Linux32 with arm simulators - 'v8_linux_arm_dbg_ng_triggered': { + 'v8_linux_arm_dbg': { 'swarming_dimensions' : { 'os': 'Ubuntu-18.04', }, @@ -307,7 +307,7 @@ {'name': 'v8testing', 'variant': 'extra', 'shards': 10}, ], }, - 'v8_linux_arm_lite_rel_ng_triggered': { + 'v8_linux_arm_lite_rel': { 'swarming_dimensions' : { 'os': 'Ubuntu-18.04', }, @@ -315,7 +315,7 @@ {'name': 'v8testing', 'variant': 'default', 'shards': 4}, ], }, - 'v8_linux_arm_rel_ng_triggered': { + 'v8_linux_arm_rel': { 'swarming_dimensions' : { 'os': 'Ubuntu-18.04', }, @@ -329,7 +329,7 @@ }, ############################################################################## # Linux64 - 'v8_linux64_asan_rel_ng_triggered': { + 'v8_linux64_asan_rel': { 'swarming_dimensions' : { 'os': 'Ubuntu-18.04', }, @@ -340,7 +340,7 @@ {'name': 'v8testing', 'variant': 'slow_path'}, ], }, - 'v8_linux64_cfi_rel_ng_triggered': { + 'v8_linux64_cfi_rel': { 'swarming_dimensions' : { 'os': 'Ubuntu-18.04', }, @@ -352,7 +352,7 @@ {'name': 'v8testing', 'shards': 3}, ], }, - 'v8_linux64_cppgc_non_default_dbg_ng_triggered': { + 'v8_linux64_cppgc_non_default_dbg': { 'swarming_dimensions' : { 'cpu': 'x86-64-avx2', 'os': 'Ubuntu-18.04', @@ -361,7 +361,7 @@ {'name': 'v8testing', 'shards': 3}, ], }, - 'v8_linux64_dbg_ng_triggered': { + 'v8_linux64_dbg': { 'swarming_dimensions' : { 'cpu': 'x86-64-avx2', 'os': 'Ubuntu-18.04', @@ -385,7 +385,7 @@ {'name': 'mjsunit', 'variant': 'maglev'}, ], }, - 'v8_linux64_dict_tracking_dbg_ng_triggered': { + 'v8_linux64_dict_tracking_dbg': { 'swarming_dimensions' : { 'cpu': 'x86-64-avx2', 'os': 'Ubuntu-18.04', @@ -394,7 +394,7 @@ {'name': 'v8testing', 'shards': 3}, ], }, - 'v8_linux64_disable_runtime_call_stats_rel_ng_triggered': { + 'v8_linux64_disable_runtime_call_stats_rel': { 'swarming_dimensions' : { 'os': 'Ubuntu-18.04', }, @@ -402,7 +402,7 @@ {'name': 'v8testing'}, ], }, - 'v8_linux64_external_code_space_dbg_ng_triggered': { + 'v8_linux64_external_code_space_dbg': { 'swarming_dimensions' : { 'cpu': 'x86-64-avx2', 'os': 'Ubuntu-18.04', @@ -411,14 +411,14 @@ {'name': 'v8testing', 'shards': 3}, ], }, - 'v8_linux64_fuzzilli_ng_triggered': { + 'v8_linux64_fuzzilli_rel': { 'swarming_dimensions' : { 'os': 'Ubuntu-18.04', }, # TODO(almuthanna): Add a new test config for the fuzzilli suite. 'tests': [], }, - 'v8_linux64_fyi_rel_ng_triggered': { + 'v8_linux64_fyi_rel': { 'swarming_dimensions' : { 'os': 'Ubuntu-18.04', }, @@ -440,7 +440,7 @@ {'name': 'mjsunit', 'variant': 'wasm_write_protect_code'}, ], }, - 'v8_linux64_gc_stress_custom_snapshot_dbg_ng_triggered': { + 'v8_linux64_gc_stress_custom_snapshot_dbg': { 'swarming_dimensions' : { 'os': 'Ubuntu-18.04', }, @@ -452,7 +452,7 @@ }, ], }, - 'v8_linux64_gc_stress_dbg_ng_triggered': { + 'v8_linux64_gc_stress_dbg': { 'swarming_dimensions' : { 'cpu': 'x86-64-avx2', 'os': 'Ubuntu-18.04', @@ -473,7 +473,7 @@ }, ], }, - 'v8_linux64_gcc_rel_ng_triggered': { + 'v8_linux64_gcc_rel': { 'swarming_dimensions' : { 'os': 'Ubuntu-20.04', }, @@ -489,7 +489,7 @@ {'name': 'v8testing'}, ], }, - 'v8_linux64_heap_sandbox_dbg_ng_triggered': { + 'v8_linux64_heap_sandbox_dbg': { 'swarming_dimensions' : { 'os': 'Ubuntu-18.04', }, @@ -497,7 +497,7 @@ {'name': 'v8testing', 'shards': 4}, ], }, - 'v8_linux64_minor_mc_dbg_ng_triggered': { + 'v8_linux64_minor_mc_dbg': { 'swarming_dimensions' : { 'cpu': 'x86-64-avx2', 'os': 'Ubuntu-18.04', @@ -510,7 +510,7 @@ {'name': 'mjsunit', 'variant': 'minor_mc'}, ], }, - 'v8_linux64_msan_rel_ng_triggered': { + 'v8_linux64_msan_rel': { 'swarming_dimensions' : { 'os': 'Ubuntu-18.04', }, @@ -519,7 +519,7 @@ {'name': 'v8testing', 'shards': 5}, ], }, - 'v8_linux64_nodcheck_rel_ng_triggered': { + 'v8_linux64_nodcheck_rel': { 'swarming_dimensions' : { 'cpu': 'x86-64-avx2', 'os': 'Ubuntu-18.04', @@ -545,7 +545,7 @@ {'name': 'mjsunit', 'variant': 'maglev'}, ], }, - 'v8_linux64_perfetto_dbg_ng_triggered': { + 'v8_linux64_perfetto_dbg': { 'swarming_dimensions' : { 'os': 'Ubuntu-18.04', }, @@ -553,7 +553,7 @@ {'name': 'v8testing', 'shards': 3}, ], }, - 'v8_linux64_no_pointer_compression_rel_ng_triggered': { + 'v8_linux64_no_pointer_compression_rel': { 'swarming_dimensions' : { 'os': 'Ubuntu-18.04', }, @@ -561,7 +561,7 @@ {'name': 'v8testing', 'shards': 3}, ], }, - 'v8_linux64_no_sandbox_dbg_ng_triggered': { + 'v8_linux64_no_sandbox_dbg': { 'swarming_dimensions' : { 'cpu': 'x86-64-avx2', 'os': 'Ubuntu-18.04', @@ -570,7 +570,7 @@ {'name': 'v8testing', 'shards': 5}, ], }, - 'v8_linux64_single_generation_dbg_ng_triggered': { + 'v8_linux64_single_generation_dbg': { 'swarming_dimensions' : { 'os': 'Ubuntu-18.04', }, @@ -578,7 +578,7 @@ {'name': 'v8testing', 'shards': 3}, ], }, - 'v8_linux64_rel_ng_triggered': { + 'v8_linux64_rel': { 'swarming_dimensions' : { 'cpu': 'x86-64-avx2', 'os': 'Ubuntu-18.04', @@ -601,7 +601,7 @@ {'name': 'mjsunit', 'variant': 'maglev'}, ], }, - 'v8_linux64_predictable_rel_ng_triggered': { + 'v8_linux64_predictable_rel': { 'swarming_dimensions': { 'cpu': 'x86-64-avx2', 'os': 'Ubuntu-18.04', @@ -612,7 +612,7 @@ {'name': 'mozilla'}, ], }, - 'v8_linux64_no_sandbox_rel_ng_triggered': { + 'v8_linux64_no_sandbox_rel': { 'swarming_dimensions' : { 'cpu': 'x86-64-avx2', 'os': 'Ubuntu-18.04', @@ -621,7 +621,7 @@ {'name': 'v8testing', 'shards': 2}, ], }, - 'v8_linux64_tsan_rel_ng_triggered': { + 'v8_linux64_tsan_rel': { 'swarming_dimensions' : { 'os': 'Ubuntu-18.04', }, @@ -635,7 +635,7 @@ {'name': 'v8testing', 'variant': 'stress_concurrent_allocation', 'shards': 2}, ], }, - 'v8_linux64_tsan_no_cm_rel_ng_triggered': { + 'v8_linux64_tsan_no_cm_rel': { 'swarming_dimensions' : { 'os': 'Ubuntu-18.04', }, @@ -653,7 +653,7 @@ {'name': 'v8testing', 'variant': 'stress_concurrent_inlining', 'shards': 2}, ], }, - 'v8_linux64_tsan_isolates_rel_ng_triggered': { + 'v8_linux64_tsan_isolates_rel': { 'swarming_dimensions' : { 'os': 'Ubuntu-18.04', }, @@ -661,7 +661,7 @@ {'name': 'v8testing', 'test_args': ['--isolates'], 'shards': 7}, ], }, - 'v8_linux64_ubsan_rel_ng_triggered': { + 'v8_linux64_ubsan_rel': { 'swarming_dimensions' : { 'os': 'Ubuntu-18.04', }, @@ -669,7 +669,7 @@ {'name': 'v8testing', 'shards': 2}, ], }, - 'v8_linux64_verify_csa_rel_ng_triggered': { + 'v8_linux64_verify_csa_rel': { 'swarming_dimensions' : { 'os': 'Ubuntu-18.04', }, @@ -679,7 +679,7 @@ }, ############################################################################## # Linux64 with arm64 simulators - 'v8_linux_arm64_dbg_ng_triggered': { + 'v8_linux_arm64_dbg': { 'swarming_dimensions' : { 'os': 'Ubuntu-18.04', }, @@ -691,7 +691,7 @@ {'name': 'v8testing', 'variant': 'extra', 'shards': 14}, ], }, - 'v8_linux_arm64_gc_stress_dbg_ng_triggered': { + 'v8_linux_arm64_gc_stress_dbg': { 'swarming_dimensions' : { 'os': 'Ubuntu-18.04', }, @@ -699,7 +699,7 @@ {'name': 'd8testing', 'test_args': ['--gc-stress'], 'shards': 12}, ], }, - 'v8_linux_arm64_sim_heap_sandbox_dbg_ng_triggered': { + 'v8_linux_arm64_sim_heap_sandbox_dbg': { 'swarming_dimensions' : { 'os': 'Ubuntu-18.04', }, @@ -707,7 +707,7 @@ {'name': 'v8testing', 'shards': 14}, ], }, - 'v8_linux_arm64_rel_ng_triggered': { + 'v8_linux_arm64_rel': { 'swarming_dimensions' : { 'os': 'Ubuntu-18.04', }, @@ -719,7 +719,7 @@ {'name': 'v8testing', 'variant': 'extra', 'shards': 14}, ], }, - 'v8_linux_arm64_cfi_rel_ng_triggered': { + 'v8_linux_arm64_cfi_rel': { 'swarming_dimensions' : { 'os': 'Ubuntu-18.04', }, @@ -728,7 +728,7 @@ {'name': 'v8testing', 'shards': 4}, ], }, - 'v8_linux64_arm64_no_pointer_compression_rel_ng_triggered': { + 'v8_linux64_arm64_no_pointer_compression_rel': { 'swarming_dimensions' : { 'os': 'Ubuntu-18.04', }, @@ -738,7 +738,7 @@ }, ############################################################################## # Linux64 with Loongson simulators - 'v8_linux64_loong64_rel_ng_triggered': { + 'v8_linux64_loong64_rel': { 'swarming_dimensions': { 'os': 'Ubuntu-18.04', }, @@ -748,7 +748,7 @@ }, ############################################################################## # Linux with RISC-V simulators - 'v8_linux_riscv32_rel_ng_triggered': { + 'v8_linux_riscv32_rel': { 'swarming_dimensions': { 'os': 'Ubuntu-18.04', }, @@ -756,7 +756,7 @@ {'name': 'v8testing', 'shards': 3}, ], }, - 'v8_linux64_riscv64_rel_ng_triggered': { + 'v8_linux64_riscv64_rel': { 'swarming_dimensions': { 'os': 'Ubuntu-18.04', }, @@ -766,7 +766,7 @@ }, ############################################################################## # Odroids with native arm - 'v8_odroid_arm_rel_ng_triggered': { + 'v8_odroid_arm_rel': { 'swarming_dimensions' : { 'cores': '8', 'cpu': 'armv7l-32-ODROID-XU4', @@ -784,7 +784,7 @@ }, ############################################################################## # Win32 - 'v8_win_dbg_ng_triggered': { + 'v8_win_dbg': { 'swarming_dimensions' : { 'cpu': 'x86-64', 'os': 'Windows-7-SP1', @@ -794,7 +794,7 @@ {'name': 'v8testing', 'shards': 3}, ], }, - 'v8_win_rel_ng_triggered': { + 'v8_win_rel': { 'swarming_dimensions' : { 'cpu': 'x86-64', 'os': 'Windows-7-SP1', @@ -807,7 +807,7 @@ }, ############################################################################## # Win64 - 'v8_win64_asan_rel_ng_triggered': { + 'v8_win64_asan_rel': { 'swarming_dimensions' : { 'os': 'Windows-10-19042', }, @@ -815,7 +815,7 @@ {'name': 'v8testing', 'shards': 5}, ], }, - 'v8_win64_dbg_ng_triggered': { + 'v8_win64_dbg': { 'swarming_dimensions' : { 'cpu': 'x86-64', 'os': 'Windows-10-19042', @@ -827,7 +827,7 @@ {'name': 'v8testing', 'variant': 'extra', 'shards': 2}, ], }, - 'v8_win64_msvc_rel_ng_triggered': { + 'v8_win64_msvc_rel': { 'swarming_dimensions' : { 'cpu': 'x86-64', 'os': 'Windows-10-19042', @@ -838,7 +838,7 @@ {'name': 'v8testing', 'shards': 2}, ], }, - 'v8_win64_rel_ng_triggered': { + 'v8_win64_rel': { 'swarming_dimensions' : { 'cpu': 'x86-64', 'os': 'Windows-10-19042', @@ -852,7 +852,7 @@ }, ############################################################################## # Mac64 - 'v8_mac64_asan_rel_ng_triggered': { + 'v8_mac64_asan_rel': { 'swarming_dimensions' : { 'cpu': 'x86-64', 'os': 'Mac-10.15', @@ -861,7 +861,7 @@ {'name': 'v8testing', 'shards': 8}, ], }, - 'v8_mac64_dbg_ng_triggered': { + 'v8_mac64_dbg': { 'swarming_dimensions' : { 'cpu': 'x86-64', 'os': 'Mac-10.15', @@ -873,7 +873,7 @@ {'name': 'v8testing', 'variant': 'extra', 'shards': 6}, ], }, - 'v8_mac64_gc_stress_dbg_ng_triggered': { + 'v8_mac64_gc_stress_dbg': { 'swarming_dimensions' : { 'cpu': 'x86-64', 'os': 'Mac-10.15', @@ -882,7 +882,7 @@ {'name': 'd8testing', 'test_args': ['--gc-stress'], 'shards': 6}, ], }, - 'v8_mac64_rel_ng_triggered': { + 'v8_mac64_rel': { 'swarming_dimensions' : { 'cpu': 'x86-64', 'os': 'Mac-10.15', @@ -894,7 +894,7 @@ {'name': 'v8testing', 'variant': 'extra', 'shards': 3}, ], }, - 'v8_mac_arm64_rel_ng_triggered': { + 'v8_mac_arm64_rel': { 'swarming_dimensions' : { 'cpu': 'arm64', 'os': 'Mac-11', @@ -904,7 +904,7 @@ {'name': 'v8testing'}, ], }, - 'v8_mac_arm64_dbg_ng_triggered': { + 'v8_mac_arm64_dbg': { 'swarming_dimensions' : { 'cpu': 'arm64', 'os': 'Mac-11', @@ -914,7 +914,7 @@ {'name': 'v8testing'}, ], }, - 'v8_mac_arm64_full_dbg_ng_triggered': { + 'v8_mac_arm64_full_dbg': { 'swarming_dimensions' : { 'cpu': 'arm64', 'os': 'Mac-11', @@ -924,7 +924,7 @@ {'name': 'v8testing'}, ], }, - 'v8_mac_arm64_no_pointer_compression_dbg_ng_triggered': { + 'v8_mac_arm64_no_pointer_compression_dbg': { 'swarming_dimensions' : { 'cpu': 'arm64', 'os': 'Mac-11', @@ -934,7 +934,7 @@ {'name': 'v8testing'}, ], }, - 'v8_mac_arm64_sim_rel_ng_triggered': { + 'v8_mac_arm64_sim_rel': { 'swarming_dimensions' : { 'cpu': 'x86-64', 'os': 'Mac-10.15', @@ -944,7 +944,7 @@ {'name': 'v8testing', 'variant': 'future', 'shards': 2}, ], }, - 'v8_mac_arm64_sim_dbg_ng_triggered': { + 'v8_mac_arm64_sim_dbg': { 'swarming_dimensions' : { 'cpu': 'x86-64', 'os': 'Mac-10.15', @@ -954,7 +954,7 @@ {'name': 'v8testing', 'variant': 'future', 'shards': 2}, ], }, - 'v8_mac_arm64_sim_nodcheck_rel_ng_triggered': { + 'v8_mac_arm64_sim_nodcheck_rel': { 'swarming_dimensions' : { 'cpu': 'x86-64', 'os': 'Mac-10.15', @@ -2349,7 +2349,7 @@ }, ], }, - 'v8_numfuzz_ng_triggered': { + 'v8_numfuzz_rel': { 'swarming_dimensions': { 'os': 'Ubuntu-18.04', }, @@ -2366,7 +2366,7 @@ }, ], }, - 'v8_numfuzz_tsan_ng_triggered': { + 'v8_numfuzz_tsan_rel': { 'swarming_dimensions': { 'os': 'Ubuntu-18.04', }, @@ -2419,7 +2419,7 @@ }, ], }, - 'v8_numfuzz_dbg_ng_triggered': { + 'v8_numfuzz_dbg': { 'swarming_dimensions': { 'os': 'Ubuntu-18.04', }, diff --git a/deps/v8/src/DEPS b/deps/v8/src/DEPS index 1edad011be189c..8912d7fb25186c 100644 --- a/deps/v8/src/DEPS +++ b/deps/v8/src/DEPS @@ -32,6 +32,7 @@ include_rules = [ "+src/heap/local-factory.h", "+src/heap/local-heap.h", "+src/heap/local-heap-inl.h", + "+src/heap/pretenuring-handler-inl.h", # TODO(v8:10496): Don't expose memory chunk outside of heap/. "+src/heap/memory-chunk.h", "+src/heap/memory-chunk-inl.h", diff --git a/deps/v8/src/api/api-natives.cc b/deps/v8/src/api/api-natives.cc index 562b7849b4061d..8624c279d66e4f 100644 --- a/deps/v8/src/api/api-natives.cc +++ b/deps/v8/src/api/api-natives.cc @@ -529,7 +529,7 @@ MaybeHandle<JSFunction> InstantiateFunction( if (!data->needs_access_check() && data->GetNamedPropertyHandler().IsUndefined(isolate) && data->GetIndexedPropertyHandler().IsUndefined(isolate)) { - function_type = FLAG_embedder_instance_types && data->HasInstanceType() + function_type = v8_flags.embedder_instance_types && data->HasInstanceType() ? static_cast<InstanceType>(data->InstanceType()) : JS_API_OBJECT_TYPE; } diff --git a/deps/v8/src/api/api.cc b/deps/v8/src/api/api.cc index b54e554217329b..a4a4381614e1fd 100644 --- a/deps/v8/src/api/api.cc +++ b/deps/v8/src/api/api.cc @@ -284,7 +284,7 @@ void i::V8::FatalProcessOutOfMemory(i::Isolate* i_isolate, const char* location, // BUG(1718): Don't use the take_snapshot since we don't support // HeapObjectIterator here without doing a special GC. i_isolate->heap()->RecordStats(&heap_stats, false); - if (!FLAG_correctness_fuzzer_suppressions) { + if (!v8_flags.correctness_fuzzer_suppressions) { char* first_newline = strchr(last_few_messages, '\n'); if (first_newline == nullptr || first_newline[1] == '\0') first_newline = last_few_messages; @@ -795,7 +795,7 @@ i::Address* GlobalizeTracedReference(i::Isolate* i_isolate, i::Address* obj, i::Handle<i::Object> result = i_isolate->global_handles()->CreateTraced(*obj, slot, store_mode); #ifdef VERIFY_HEAP - if (i::FLAG_verify_heap) { + if (i::v8_flags.verify_heap) { i::Object(*obj).ObjectVerify(i_isolate); } #endif // VERIFY_HEAP @@ -823,7 +823,7 @@ i::Address* GlobalizeReference(i::Isolate* i_isolate, i::Address* obj) { API_RCS_SCOPE(i_isolate, Persistent, New); i::Handle<i::Object> result = i_isolate->global_handles()->Create(*obj); #ifdef VERIFY_HEAP - if (i::FLAG_verify_heap) { + if (i::v8_flags.verify_heap) { i::Object(*obj).ObjectVerify(i_isolate); } #endif // VERIFY_HEAP @@ -1676,7 +1676,7 @@ void ObjectTemplate::SetAccessor(v8::Local<String> name, SideEffectType getter_side_effect_type, SideEffectType setter_side_effect_type) { TemplateSetAccessor(this, name, getter, setter, data, settings, attribute, - i::FLAG_disable_old_api_accessors, false, + i::v8_flags.disable_old_api_accessors, false, getter_side_effect_type, setter_side_effect_type); } @@ -1688,7 +1688,7 @@ void ObjectTemplate::SetAccessor(v8::Local<Name> name, SideEffectType getter_side_effect_type, SideEffectType setter_side_effect_type) { TemplateSetAccessor(this, name, getter, setter, data, settings, attribute, - i::FLAG_disable_old_api_accessors, false, + i::v8_flags.disable_old_api_accessors, false, getter_side_effect_type, setter_side_effect_type); } @@ -2100,7 +2100,7 @@ MaybeLocal<Value> Script::Run(Local<Context> context, // // To avoid this, on running scripts check first if JIT code log is // pending and generate immediately. - if (i::FLAG_enable_etw_stack_walking) { + if (i::v8_flags.enable_etw_stack_walking) { i::ETWJITInterface::MaybeSetHandlerNow(i_isolate); } #endif @@ -2109,14 +2109,15 @@ MaybeLocal<Value> Script::Run(Local<Context> context, // TODO(crbug.com/1193459): remove once ablation study is completed base::ElapsedTimer timer; base::TimeDelta delta; - if (i::FLAG_script_delay > 0) { - delta = v8::base::TimeDelta::FromMillisecondsD(i::FLAG_script_delay); + if (i::v8_flags.script_delay > 0) { + delta = v8::base::TimeDelta::FromMillisecondsD(i::v8_flags.script_delay); } - if (i::FLAG_script_delay_once > 0 && !i_isolate->did_run_script_delay()) { - delta = v8::base::TimeDelta::FromMillisecondsD(i::FLAG_script_delay_once); + if (i::v8_flags.script_delay_once > 0 && !i_isolate->did_run_script_delay()) { + delta = + v8::base::TimeDelta::FromMillisecondsD(i::v8_flags.script_delay_once); i_isolate->set_did_run_script_delay(true); } - if (i::FLAG_script_delay_fraction > 0.0) { + if (i::v8_flags.script_delay_fraction > 0.0) { timer.Start(); } else if (delta.InMicroseconds() > 0) { timer.Start(); @@ -2125,7 +2126,7 @@ MaybeLocal<Value> Script::Run(Local<Context> context, } } - if (V8_UNLIKELY(i::FLAG_experimental_web_snapshots)) { + if (V8_UNLIKELY(i::v8_flags.experimental_web_snapshots)) { i::Handle<i::HeapObject> maybe_script = handle(fun->shared().script(), i_isolate); if (maybe_script->IsScript() && @@ -2149,9 +2150,9 @@ MaybeLocal<Value> Script::Run(Local<Context> context, has_pending_exception = !ToLocal<Value>( i::Execution::CallScript(i_isolate, fun, receiver, options), &result); - if (i::FLAG_script_delay_fraction > 0.0) { + if (i::v8_flags.script_delay_fraction > 0.0) { delta = v8::base::TimeDelta::FromMillisecondsD( - timer.Elapsed().InMillisecondsF() * i::FLAG_script_delay_fraction); + timer.Elapsed().InMillisecondsF() * i::v8_flags.script_delay_fraction); timer.Restart(); while (timer.Elapsed() < delta) { // Busy wait. @@ -2742,7 +2743,7 @@ ScriptCompiler::ScriptStreamingTask* ScriptCompiler::StartStreaming( Utils::ApiCheck(options == kNoCompileOptions || options == kEagerCompile, "v8::ScriptCompiler::StartStreaming", "Invalid CompileOptions"); - if (!i::FLAG_script_streaming) return nullptr; + if (!i::v8_flags.script_streaming) return nullptr; i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(v8_isolate); i::ScriptStreamingData* data = source->impl(); std::unique_ptr<i::BackgroundCompileTask> task = @@ -2775,20 +2776,22 @@ void ScriptCompiler::ConsumeCodeCacheTask::SourceTextAvailable( bool ScriptCompiler::ConsumeCodeCacheTask::ShouldMergeWithExistingScript() const { - if (!i::FLAG_merge_background_deserialized_script_with_compilation_cache) { + if (!i::v8_flags + .merge_background_deserialized_script_with_compilation_cache) { return false; } return impl_->ShouldMergeWithExistingScript(); } void ScriptCompiler::ConsumeCodeCacheTask::MergeWithExistingScript() { - DCHECK(i::FLAG_merge_background_deserialized_script_with_compilation_cache); + DCHECK( + i::v8_flags.merge_background_deserialized_script_with_compilation_cache); impl_->MergeWithExistingScript(); } ScriptCompiler::ConsumeCodeCacheTask* ScriptCompiler::StartConsumingCodeCache( Isolate* v8_isolate, std::unique_ptr<CachedData> cached_data) { - if (!i::FLAG_concurrent_cache_deserialization) return nullptr; + if (!i::v8_flags.concurrent_cache_deserialization) return nullptr; i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(v8_isolate); DCHECK_NO_SCRIPT_NO_EXCEPTION(i_isolate); return new ScriptCompiler::ConsumeCodeCacheTask( @@ -4923,7 +4926,7 @@ Maybe<bool> Object::SetAccessor(Local<Context> context, Local<Name> name, SideEffectType setter_side_effect_type) { return ObjectSetAccessor(context, this, name, getter, setter, data.FromMaybe(Local<Value>()), settings, attribute, - i::FLAG_disable_old_api_accessors, false, + i::v8_flags.disable_old_api_accessors, false, getter_side_effect_type, setter_side_effect_type); } @@ -6740,6 +6743,14 @@ void v8::Context::SetPromiseHooks(Local<Function> init_hook, #endif // V8_ENABLE_JAVASCRIPT_PROMISE_HOOKS } +bool Context::HasTemplateLiteralObject(Local<Value> object) { + i::DisallowGarbageCollection no_gc; + i::Object i_object = *Utils::OpenHandle(*object); + if (!i_object.IsJSArray()) return false; + return Utils::OpenHandle(this)->native_context().HasTemplateLiteralObject( + i::JSArray::cast(i_object)); +} + MaybeLocal<Context> metrics::Recorder::GetContext( Isolate* v8_isolate, metrics::Recorder::ContextId id) { i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(v8_isolate); @@ -8149,12 +8160,12 @@ std::unique_ptr<v8::BackingStore> v8::ArrayBuffer::NewBackingStore( void* deleter_data) { CHECK_LE(byte_length, i::JSArrayBuffer::kMaxByteLength); #ifdef V8_ENABLE_SANDBOX - Utils::ApiCheck( - !data || i::GetProcessWideSandbox()->Contains(data), - "v8_ArrayBuffer_NewBackingStore", - "When the V8 Sandbox is enabled, ArrayBuffer backing stores must be " - "allocated inside the sandbox address space. Please use an appropriate " - "ArrayBuffer::Allocator to allocate these buffers."); + Utils::ApiCheck(!data || i::GetProcessWideSandbox()->Contains(data), + "v8_ArrayBuffer_NewBackingStore", + "When the V8 Sandbox is enabled, ArrayBuffer backing stores " + "must be allocated inside the sandbox address space. Please " + "use an appropriate ArrayBuffer::Allocator to allocate these " + "buffers, or disable the sandbox."); #endif // V8_ENABLE_SANDBOX std::unique_ptr<i::BackingStoreBase> backing_store = @@ -8246,7 +8257,7 @@ static_assert( Local<Type##Array> Type##Array::New( \ Local<SharedArrayBuffer> shared_array_buffer, size_t byte_offset, \ size_t length) { \ - CHECK(i::FLAG_harmony_sharedarraybuffer); \ + CHECK(i::v8_flags.harmony_sharedarraybuffer); \ i::Isolate* i_isolate = \ Utils::OpenHandle(*shared_array_buffer)->GetIsolate(); \ API_RCS_SCOPE(i_isolate, Type##Array, New); \ @@ -8281,7 +8292,7 @@ Local<DataView> DataView::New(Local<ArrayBuffer> array_buffer, Local<DataView> DataView::New(Local<SharedArrayBuffer> shared_array_buffer, size_t byte_offset, size_t byte_length) { - CHECK(i::FLAG_harmony_sharedarraybuffer); + CHECK(i::v8_flags.harmony_sharedarraybuffer); i::Handle<i::JSArrayBuffer> buffer = Utils::OpenHandle(*shared_array_buffer); i::Isolate* i_isolate = buffer->GetIsolate(); API_RCS_SCOPE(i_isolate, DataView, New); @@ -8298,7 +8309,7 @@ size_t v8::SharedArrayBuffer::ByteLength() const { Local<SharedArrayBuffer> v8::SharedArrayBuffer::New(Isolate* v8_isolate, size_t byte_length) { - CHECK(i::FLAG_harmony_sharedarraybuffer); + CHECK(i::v8_flags.harmony_sharedarraybuffer); i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(v8_isolate); API_RCS_SCOPE(i_isolate, SharedArrayBuffer, New); ENTER_V8_NO_SCRIPT_NO_EXCEPTION(i_isolate); @@ -8320,7 +8331,7 @@ Local<SharedArrayBuffer> v8::SharedArrayBuffer::New(Isolate* v8_isolate, Local<SharedArrayBuffer> v8::SharedArrayBuffer::New( Isolate* v8_isolate, std::shared_ptr<BackingStore> backing_store) { - CHECK(i::FLAG_harmony_sharedarraybuffer); + CHECK(i::v8_flags.harmony_sharedarraybuffer); CHECK_IMPLIES(backing_store->ByteLength() != 0, backing_store->Data() != nullptr); i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(v8_isolate); @@ -8711,7 +8722,7 @@ bool Isolate::HasPendingBackgroundTasks() { } void Isolate::RequestGarbageCollectionForTesting(GarbageCollectionType type) { - Utils::ApiCheck(i::FLAG_expose_gc, + Utils::ApiCheck(i::v8_flags.expose_gc, "v8::Isolate::RequestGarbageCollectionForTesting", "Must use --expose-gc"); if (type == kMinorGarbageCollection) { @@ -9227,7 +9238,7 @@ int64_t Isolate::AdjustAmountOfExternalAllocatedMemory( void Isolate::SetEventLogger(LogEventCallback that) { // Do not overwrite the event logger if we want to log explicitly. - if (i::FLAG_log_internal_timer_events) return; + if (i::v8_flags.log_internal_timer_events) return; i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(this); i_isolate->set_event_logger(that); } @@ -9360,7 +9371,7 @@ bool Isolate::IdleNotificationDeadline(double deadline_in_seconds) { // Returning true tells the caller that it need not // continue to call IdleNotification. i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(this); - if (!i::FLAG_use_idle_notification) return true; + if (!i::v8_flags.use_idle_notification) return true; return i_isolate->heap()->IdleNotification(deadline_in_seconds); } @@ -9563,7 +9574,7 @@ void Isolate::InstallConditionalFeatures(Local<Context> context) { if (i_isolate->is_execution_terminating()) return; i_isolate->InstallConditionalFeatures(Utils::OpenHandle(*context)); #if V8_ENABLE_WEBASSEMBLY - if (i::FLAG_expose_wasm && !i_isolate->has_pending_exception()) { + if (i::v8_flags.expose_wasm && !i_isolate->has_pending_exception()) { i::WasmJs::InstallConditionalFeatures(i_isolate, Utils::OpenHandle(*context)); } diff --git a/deps/v8/src/base/bits.cc b/deps/v8/src/base/bits.cc index e604cff6d5c96c..2a3dce97761c59 100644 --- a/deps/v8/src/base/bits.cc +++ b/deps/v8/src/base/bits.cc @@ -52,6 +52,46 @@ int32_t SignedMulHigh32(int32_t lhs, int32_t rhs) { 32u); } +// The algorithm used is described in section 8.2 of +// Hacker's Delight, by Henry S. Warren, Jr. +// It assumes that a right shift on a signed integer is an arithmetic shift. +int64_t SignedMulHigh64(int64_t u, int64_t v) { + uint64_t u0 = u & 0xFFFFFFFF; + int64_t u1 = u >> 32; + uint64_t v0 = v & 0xFFFFFFFF; + int64_t v1 = v >> 32; + + uint64_t w0 = u0 * v0; + int64_t t = u1 * v0 + (w0 >> 32); + int64_t w1 = t & 0xFFFFFFFF; + int64_t w2 = t >> 32; + w1 = u0 * v1 + w1; + + return u1 * v1 + w2 + (w1 >> 32); +} + +// The algorithm used is described in section 8.2 of +// Hacker's Delight, by Henry S. Warren, Jr. +uint64_t UnsignedMulHigh64(uint64_t u, uint64_t v) { + uint64_t u0 = u & 0xFFFFFFFF; + uint64_t u1 = u >> 32; + uint64_t v0 = v & 0xFFFFFFFF; + uint64_t v1 = v >> 32; + + uint64_t w0 = u0 * v0; + uint64_t t = u1 * v0 + (w0 >> 32); + uint64_t w1 = t & 0xFFFFFFFFLL; + uint64_t w2 = t >> 32; + w1 = u0 * v1 + w1; + + return u1 * v1 + w2 + (w1 >> 32); +} + +uint32_t UnsignedMulHigh32(uint32_t lhs, uint32_t rhs) { + uint64_t const value = + static_cast<uint64_t>(lhs) * static_cast<uint64_t>(rhs); + return static_cast<uint32_t>(value >> 32u); +} int32_t SignedMulHighAndAdd32(int32_t lhs, int32_t rhs, int32_t acc) { return base::bit_cast<int32_t>( @@ -66,12 +106,22 @@ int32_t SignedDiv32(int32_t lhs, int32_t rhs) { return lhs / rhs; } +int64_t SignedDiv64(int64_t lhs, int64_t rhs) { + if (rhs == 0) return 0; + if (rhs == -1) return lhs == std::numeric_limits<int64_t>::min() ? lhs : -lhs; + return lhs / rhs; +} int32_t SignedMod32(int32_t lhs, int32_t rhs) { if (rhs == 0 || rhs == -1) return 0; return lhs % rhs; } +int64_t SignedMod64(int64_t lhs, int64_t rhs) { + if (rhs == 0 || rhs == -1) return 0; + return lhs % rhs; +} + int64_t SignedSaturatedAdd64(int64_t lhs, int64_t rhs) { using limits = std::numeric_limits<int64_t>; // Underflow if {lhs + rhs < min}. In that case, return {min}. diff --git a/deps/v8/src/base/bits.h b/deps/v8/src/base/bits.h index 3209a4b081df7f..0cb22a9a904aea 100644 --- a/deps/v8/src/base/bits.h +++ b/deps/v8/src/base/bits.h @@ -70,6 +70,30 @@ T ReverseBits(T value) { return result; } +// ReverseBytes(value) returns |value| in reverse byte order. +template <typename T> +T ReverseBytes(T value) { + static_assert((sizeof(value) == 1) || (sizeof(value) == 2) || + (sizeof(value) == 4) || (sizeof(value) == 8)); + T result = 0; + for (unsigned i = 0; i < sizeof(value); i++) { + result = (result << 8) | (value & 0xff); + value >>= 8; + } + return result; +} + +template <class T> +inline constexpr std::make_unsigned_t<T> Unsigned(T value) { + static_assert(std::is_signed_v<T>); + return static_cast<std::make_unsigned_t<T>>(value); +} +template <class T> +inline constexpr std::make_signed_t<T> Signed(T value) { + static_assert(std::is_unsigned_v<T>); + return static_cast<std::make_signed_t<T>>(value); +} + // CountLeadingZeros(value) returns the number of zero bits following the most // significant 1 bit in |value| if |value| is non-zero, otherwise it returns // {sizeof(T) * 8}. @@ -104,6 +128,15 @@ inline constexpr unsigned CountLeadingZeros64(uint64_t value) { return CountLeadingZeros(value); } +// The number of leading zeros for a positive number, +// the number of leading ones for a negative number. +template <class T> +constexpr unsigned CountLeadingSignBits(T value) { + static_assert(std::is_signed_v<T>); + return value < 0 ? CountLeadingZeros(~Unsigned(value)) + : CountLeadingZeros(Unsigned(value)); +} + // CountTrailingZeros(value) returns the number of zero bits preceding the // least significant 1 bit in |value| if |value| is non-zero, otherwise it // returns {sizeof(T) * 8}. @@ -297,6 +330,21 @@ inline bool SignedSubOverflow64(int64_t lhs, int64_t rhs, int64_t* val) { // those. V8_BASE_EXPORT int32_t SignedMulHigh32(int32_t lhs, int32_t rhs); +// UnsignedMulHigh32(lhs, rhs) multiplies two unsigned 32-bit values |lhs| and +// |rhs|, extracts the most significant 32 bits of the result, and returns +// those. +V8_BASE_EXPORT uint32_t UnsignedMulHigh32(uint32_t lhs, uint32_t rhs); + +// SignedMulHigh64(lhs, rhs) multiplies two signed 64-bit values |lhs| and +// |rhs|, extracts the most significant 64 bits of the result, and returns +// those. +V8_BASE_EXPORT int64_t SignedMulHigh64(int64_t lhs, int64_t rhs); + +// UnsignedMulHigh64(lhs, rhs) multiplies two unsigned 64-bit values |lhs| and +// |rhs|, extracts the most significant 64 bits of the result, and returns +// those. +V8_BASE_EXPORT uint64_t UnsignedMulHigh64(uint64_t lhs, uint64_t rhs); + // SignedMulHighAndAdd32(lhs, rhs, acc) multiplies two signed 32-bit values // |lhs| and |rhs|, extracts the most significant 32 bits of the result, and // adds the accumulate value |acc|. @@ -308,11 +356,21 @@ V8_BASE_EXPORT int32_t SignedMulHighAndAdd32(int32_t lhs, int32_t rhs, // is minint and |rhs| is -1, it returns minint. V8_BASE_EXPORT int32_t SignedDiv32(int32_t lhs, int32_t rhs); +// SignedDiv64(lhs, rhs) divides |lhs| by |rhs| and returns the quotient +// truncated to int64. If |rhs| is zero, then zero is returned. If |lhs| +// is minint and |rhs| is -1, it returns minint. +V8_BASE_EXPORT int64_t SignedDiv64(int64_t lhs, int64_t rhs); + // SignedMod32(lhs, rhs) divides |lhs| by |rhs| and returns the remainder // truncated to int32. If either |rhs| is zero or |lhs| is minint and |rhs| // is -1, it returns zero. V8_BASE_EXPORT int32_t SignedMod32(int32_t lhs, int32_t rhs); +// SignedMod64(lhs, rhs) divides |lhs| by |rhs| and returns the remainder +// truncated to int64. If either |rhs| is zero or |lhs| is minint and |rhs| +// is -1, it returns zero. +V8_BASE_EXPORT int64_t SignedMod64(int64_t lhs, int64_t rhs); + // UnsignedAddOverflow32(lhs,rhs,val) performs an unsigned summation of |lhs| // and |rhs| and stores the result into the variable pointed to by |val| and // returns true if the unsigned summation resulted in an overflow. @@ -332,6 +390,11 @@ inline uint32_t UnsignedDiv32(uint32_t lhs, uint32_t rhs) { return rhs ? lhs / rhs : 0u; } +// UnsignedDiv64(lhs, rhs) divides |lhs| by |rhs| and returns the quotient +// truncated to uint64. If |rhs| is zero, then zero is returned. +inline uint64_t UnsignedDiv64(uint64_t lhs, uint64_t rhs) { + return rhs ? lhs / rhs : 0u; +} // UnsignedMod32(lhs, rhs) divides |lhs| by |rhs| and returns the remainder // truncated to uint32. If |rhs| is zero, then zero is returned. @@ -339,6 +402,12 @@ inline uint32_t UnsignedMod32(uint32_t lhs, uint32_t rhs) { return rhs ? lhs % rhs : 0u; } +// UnsignedMod64(lhs, rhs) divides |lhs| by |rhs| and returns the remainder +// truncated to uint64. If |rhs| is zero, then zero is returned. +inline uint64_t UnsignedMod64(uint64_t lhs, uint64_t rhs) { + return rhs ? lhs % rhs : 0u; +} + // Wraparound integer arithmetic without undefined behavior. inline int32_t WraparoundAdd32(int32_t lhs, int32_t rhs) { diff --git a/deps/v8/src/base/compiler-specific.h b/deps/v8/src/base/compiler-specific.h index 3221de08349843..d7ddefd7137811 100644 --- a/deps/v8/src/base/compiler-specific.h +++ b/deps/v8/src/base/compiler-specific.h @@ -135,4 +135,15 @@ #define ALIGNAS(byte_alignment) __attribute__((aligned(byte_alignment))) #endif +// Forces the linker to not GC the section corresponding to the symbol. +#if defined(__has_attribute) +#if __has_attribute(used) && __has_attribute(retain) +#define V8_DONT_STRIP_SYMBOL __attribute__((used, retain)) +#endif // __has_attribute(used) && __has_attribute(retain) +#endif // defined(__has_attribute) + +#if !defined(V8_DONT_STRIP_SYMBOL) +#define V8_DONT_STRIP_SYMBOL +#endif // !defined(V8_DONT_STRIP_SYMBOL) + #endif // V8_BASE_COMPILER_SPECIFIC_H_ diff --git a/deps/v8/src/base/division-by-constant.cc b/deps/v8/src/base/division-by-constant.cc index 97dfd5680b04df..fbc36463dc51d0 100644 --- a/deps/v8/src/base/division-by-constant.cc +++ b/deps/v8/src/base/division-by-constant.cc @@ -6,15 +6,16 @@ #include <stdint.h> +#include <type_traits> + #include "src/base/logging.h" #include "src/base/macros.h" namespace v8 { namespace base { -template <class T> +template <class T, std::enable_if_t<std::is_unsigned_v<T>, bool>> MagicNumbersForDivision<T> SignedDivisionByConstant(T d) { - static_assert(static_cast<T>(0) < static_cast<T>(-1)); DCHECK(d != static_cast<T>(-1) && d != 0 && d != 1); const unsigned bits = static_cast<unsigned>(sizeof(T)) * 8; const T min = (static_cast<T>(1) << (bits - 1)); @@ -48,11 +49,10 @@ MagicNumbersForDivision<T> SignedDivisionByConstant(T d) { return MagicNumbersForDivision<T>(neg ? (0 - mul) : mul, p - bits, false); } - template <class T> MagicNumbersForDivision<T> UnsignedDivisionByConstant(T d, unsigned leading_zeros) { - static_assert(static_cast<T>(0) < static_cast<T>(-1)); + static_assert(std::is_unsigned_v<T>); DCHECK_NE(d, 0); const unsigned bits = static_cast<unsigned>(sizeof(T)) * 8; const T ones = ~static_cast<T>(0) >> leading_zeros; diff --git a/deps/v8/src/base/division-by-constant.h b/deps/v8/src/base/division-by-constant.h index 744283981bc3de..4b9f4a873c289c 100644 --- a/deps/v8/src/base/division-by-constant.h +++ b/deps/v8/src/base/division-by-constant.h @@ -7,6 +7,9 @@ #include <stdint.h> +#include <tuple> +#include <type_traits> + #include "src/base/base-export.h" #include "src/base/export-template.h" @@ -16,10 +19,10 @@ namespace base { // ---------------------------------------------------------------------------- // The magic numbers for division via multiplication, see Warren's "Hacker's -// Delight", chapter 10. The template parameter must be one of the unsigned -// integral types. +// Delight", chapter 10. template <class T> struct EXPORT_TEMPLATE_DECLARE(V8_BASE_EXPORT) MagicNumbersForDivision { + static_assert(std::is_integral_v<T>); MagicNumbersForDivision(T m, unsigned s, bool a) : multiplier(m), shift(s), add(a) {} bool operator==(const MagicNumbersForDivision& rhs) const { @@ -31,13 +34,20 @@ struct EXPORT_TEMPLATE_DECLARE(V8_BASE_EXPORT) MagicNumbersForDivision { bool add; }; - // Calculate the multiplier and shift for signed division via multiplication. // The divisor must not be -1, 0 or 1 when interpreted as a signed value. -template <class T> +template <class T, std::enable_if_t<std::is_unsigned_v<T>, bool> = true> EXPORT_TEMPLATE_DECLARE(V8_BASE_EXPORT) MagicNumbersForDivision<T> SignedDivisionByConstant(T d); +template <class T, std::enable_if_t<std::is_signed_v<T>, bool> = true> +MagicNumbersForDivision<T> SignedDivisionByConstant(T d) { + using Unsigned = std::make_unsigned_t<T>; + MagicNumbersForDivision<Unsigned> magic = + SignedDivisionByConstant(static_cast<Unsigned>(d)); + return {static_cast<T>(magic.multiplier), magic.shift, magic.add}; +} + // Calculate the multiplier and shift for unsigned division via multiplication, // see Warren's "Hacker's Delight", chapter 10. The divisor must not be 0 and // leading_zeros can be used to speed up the calculation if the given number of diff --git a/deps/v8/src/baseline/baseline-compiler.cc b/deps/v8/src/baseline/baseline-compiler.cc index 4db43686acc11a..25123cb7cd80eb 100644 --- a/deps/v8/src/baseline/baseline-compiler.cc +++ b/deps/v8/src/baseline/baseline-compiler.cc @@ -967,14 +967,6 @@ void BaselineCompiler::VisitDefineKeyedOwnPropertyInLiteral() { IndexAsTagged(3)); // slot } -void BaselineCompiler::VisitCollectTypeProfile() { - SaveAccumulatorScope accumulator_scope(&basm_); - CallRuntime(Runtime::kCollectTypeProfile, - IntAsSmi(0), // position - kInterpreterAccumulatorRegister, // value - FeedbackVector()); // feedback vector -} - void BaselineCompiler::VisitAdd() { CallBuiltin<Builtin::kAdd_Baseline>( RegisterOperand(0), kInterpreterAccumulatorRegister, Index(1)); @@ -1158,10 +1150,10 @@ void BaselineCompiler::VisitGetSuperConstructor() { StoreRegister(0, prototype); } -void BaselineCompiler::VisitFindNonDefaultConstructor() { +void BaselineCompiler::VisitFindNonDefaultConstructorOrConstruct() { SaveAccumulatorScope accumulator_scope(&basm_); - CallBuiltin<Builtin::kFindNonDefaultConstructor>(RegisterOperand(0), - RegisterOperand(1)); + CallBuiltin<Builtin::kFindNonDefaultConstructorOrConstruct>( + RegisterOperand(0), RegisterOperand(1)); StoreRegisterPair(2, kReturnRegister0, kReturnRegister1); } @@ -1421,9 +1413,9 @@ void BaselineCompiler::VisitIntrinsicAsyncGeneratorResolve( CallBuiltin<Builtin::kAsyncGeneratorResolve>(args); } -void BaselineCompiler::VisitIntrinsicAsyncGeneratorYield( +void BaselineCompiler::VisitIntrinsicAsyncGeneratorYieldWithAwait( interpreter::RegisterList args) { - CallBuiltin<Builtin::kAsyncGeneratorYield>(args); + CallBuiltin<Builtin::kAsyncGeneratorYieldWithAwait>(args); } void BaselineCompiler::VisitConstruct() { diff --git a/deps/v8/src/builtins/arm/builtins-arm.cc b/deps/v8/src/builtins/arm/builtins-arm.cc index 81f5c961bc442a..d6a6591dfe769c 100644 --- a/deps/v8/src/builtins/arm/builtins-arm.cc +++ b/deps/v8/src/builtins/arm/builtins-arm.cc @@ -1051,7 +1051,7 @@ void Builtins::Generate_BaselineOutOfLinePrologue(MacroAssembler* masm) { // Drop the frame created by the baseline call. __ ldm(ia_w, sp, {fp, lr}); - __ MaybeOptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector); + __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector); __ Trap(); } @@ -1300,7 +1300,7 @@ void Builtins::Generate_InterpreterEntryTrampoline( __ jmp(&after_stack_check_interrupt); __ bind(&flags_need_processing); - __ MaybeOptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector); + __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector); __ bind(&is_baseline); { diff --git a/deps/v8/src/builtins/arm64/builtins-arm64.cc b/deps/v8/src/builtins/arm64/builtins-arm64.cc index 00368d0da6a974..168270bf6fb66c 100644 --- a/deps/v8/src/builtins/arm64/builtins-arm64.cc +++ b/deps/v8/src/builtins/arm64/builtins-arm64.cc @@ -1205,7 +1205,7 @@ void Builtins::Generate_BaselineOutOfLinePrologue(MacroAssembler* masm) { ASM_CODE_COMMENT_STRING(masm, "Optimized marker check"); // Drop the frame created by the baseline call. __ Pop<TurboAssembler::kAuthLR>(fp, lr); - __ MaybeOptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector); + __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector); __ Trap(); } @@ -1474,7 +1474,7 @@ void Builtins::Generate_InterpreterEntryTrampoline( __ jmp(&after_stack_check_interrupt); __ bind(&flags_need_processing); - __ MaybeOptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector); + __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector); __ bind(&is_baseline); { diff --git a/deps/v8/src/builtins/array-to-reversed.tq b/deps/v8/src/builtins/array-to-reversed.tq index 542bc867f04e6e..5d97d6546ea1fe 100644 --- a/deps/v8/src/builtins/array-to-reversed.tq +++ b/deps/v8/src/builtins/array-to-reversed.tq @@ -8,8 +8,8 @@ macro FastPackedArrayToReversed<Accessor: type, T: type>( kind: constexpr ElementsKind, elements: FixedArrayBase, length: Smi): JSArray { // 3. Let A be ? ArrayCreate(𝔽(len)). - const copy: FixedArrayBase = - AllocateFixedArray(kind, SmiUntag(length), AllocationFlag::kNone); + const copy: FixedArrayBase = AllocateFixedArray( + kind, SmiUntag(length), AllocationFlag::kAllowLargeObjectAllocation); // 4. Let k be 0. let k: Smi = 0; @@ -39,6 +39,8 @@ macro TryFastPackedArrayToReversed(implicit context: Context)(receiver: JSAny): JSArray labels Slow { const array: FastJSArray = Cast<FastJSArray>(receiver) otherwise Slow; + if (array.length < 1) return ArrayCreate(0); + const kind: ElementsKind = array.map.elements_kind; if (kind == ElementsKind::PACKED_SMI_ELEMENTS) { return FastPackedArrayToReversed<array::FastPackedSmiElements, Smi>( diff --git a/deps/v8/src/builtins/array-to-sorted.tq b/deps/v8/src/builtins/array-to-sorted.tq index 0a953ab4e533d0..0a36502825c3cc 100644 --- a/deps/v8/src/builtins/array-to-sorted.tq +++ b/deps/v8/src/builtins/array-to-sorted.tq @@ -15,7 +15,8 @@ CopyWorkArrayToNewFastJSArray(implicit context: Context, sortState: SortState)( dcheck(len <= kMaxFastArrayLength); const copy: FixedArray = UnsafeCast<FixedArray>(AllocateFixedArray( - elementsKind, Convert<intptr>(len), AllocationFlag::kNone)); + elementsKind, Convert<intptr>(len), + AllocationFlag::kAllowLargeObjectAllocation)); const workArray = sortState.workArray; CopyElements( diff --git a/deps/v8/src/builtins/array-to-spliced.tq b/deps/v8/src/builtins/array-to-spliced.tq index 999c1388624970..505a58b733cf05 100644 --- a/deps/v8/src/builtins/array-to-spliced.tq +++ b/deps/v8/src/builtins/array-to-spliced.tq @@ -14,11 +14,13 @@ macro CopyFastPackedArrayForToSpliced(implicit context: Context)( const insertCount: intptr = Convert<intptr>(insertCountSmi); const actualDeleteCount: intptr = Convert<intptr>(actualDeleteCountSmi); - const copy: FixedArrayBase = - AllocateFixedArray(kind, newLen, AllocationFlag::kNone); + const copy: FixedArrayBase = AllocateFixedArray( + kind, newLen, AllocationFlag::kAllowLargeObjectAllocation); - // Copy the part before the inserted items. - CopyElements(kind, copy, 0, array.elements, 0, actualStart); + if (actualStart > 0) { + // Copy the part before the inserted items. + CopyElements(kind, copy, 0, array.elements, 0, actualStart); + } // Initialize elements that will hold the inserted items because the // NewJSArray below may allocate. Leave the actual insertion for later since @@ -36,9 +38,11 @@ macro CopyFastPackedArrayForToSpliced(implicit context: Context)( // Copy the part after the inserted items. const secondPartStart: intptr = actualStart + insertCount; const secondPartLen: intptr = newLen - secondPartStart; - const r: intptr = actualStart + actualDeleteCount; - dcheck(Convert<Smi>(r + secondPartLen) <= array.length); - CopyElements(kind, copy, secondPartStart, array.elements, r, secondPartLen); + if (secondPartLen > 0) { + const r: intptr = actualStart + actualDeleteCount; + dcheck(Convert<Smi>(r + secondPartLen) <= array.length); + CopyElements(kind, copy, secondPartStart, array.elements, r, secondPartLen); + } const map: Map = LoadJSArrayElementsMap(kind, LoadNativeContext(context)); return NewJSArray(map, copy); diff --git a/deps/v8/src/builtins/array-with.tq b/deps/v8/src/builtins/array-with.tq index 161bce9f0b4b8a..e6a6c7cfbcf580 100644 --- a/deps/v8/src/builtins/array-with.tq +++ b/deps/v8/src/builtins/array-with.tq @@ -55,10 +55,8 @@ transitioning builtin GenericArrayWith( // https://tc39.es/proposal-change-array-by-copy/#sec-array.prototype.with transitioning javascript builtin ArrayPrototypeWith( - js-implicit context: NativeContext, receiver: JSAny)(...arguments): JSAny { - const index = arguments[0]; - const value = arguments[1]; - + js-implicit context: NativeContext, receiver: JSAny)( + index: JSAny, value: JSAny): JSAny { // 1. Let O be ? ToObject(this value). const object: JSReceiver = ToObject_Inline(context, receiver); diff --git a/deps/v8/src/builtins/base.tq b/deps/v8/src/builtins/base.tq index 4d5f054a178f90..40f702549d93a6 100644 --- a/deps/v8/src/builtins/base.tq +++ b/deps/v8/src/builtins/base.tq @@ -437,10 +437,11 @@ extern enum MessageTemplate { kWasmTrapArrayOutOfBounds, kWasmTrapArrayTooLarge, kWasmTrapStringOffsetOutOfBounds, + kWasmObjectsAreOpaque, kWeakRefsRegisterTargetAndHoldingsMustNotBeSame, - kWeakRefsRegisterTargetMustBeObject, - kWeakRefsUnregisterTokenMustBeObject, - kWeakRefsWeakRefConstructorTargetMustBeObject, + kInvalidWeakRefsRegisterTarget, + kInvalidWeakRefsUnregisterToken, + kInvalidWeakRefsWeakRefConstructorTarget, ... } @@ -917,10 +918,10 @@ macro Float64IsNaN(n: float64): bool { // The type of all tagged values that can safely be compared with TaggedEqual. @if(V8_ENABLE_WEBASSEMBLY) type TaggedWithIdentity = JSReceiver | FixedArrayBase | Oddball | Map | - WeakCell | Context | EmptyString | WasmInternalFunction; + WeakCell | Context | EmptyString | Symbol | WasmInternalFunction; @ifnot(V8_ENABLE_WEBASSEMBLY) type TaggedWithIdentity = JSReceiver | FixedArrayBase | Oddball | Map | - WeakCell | Context | EmptyString; + WeakCell | Context | EmptyString | Symbol; extern operator '==' macro TaggedEqual(TaggedWithIdentity, Object): bool; extern operator '==' macro TaggedEqual(Object, TaggedWithIdentity): bool; diff --git a/deps/v8/src/builtins/builtins-array.cc b/deps/v8/src/builtins/builtins-array.cc index 79e8396bf49897..49fe48d6987a46 100644 --- a/deps/v8/src/builtins/builtins-array.cc +++ b/deps/v8/src/builtins/builtins-array.cc @@ -503,6 +503,8 @@ namespace { // Returns true, iff we can use ElementsAccessor for shifting. V8_WARN_UNUSED_RESULT bool CanUseFastArrayShift(Isolate* isolate, Handle<JSReceiver> receiver) { + if (V8_COMPRESS_POINTERS_8GB_BOOL) return false; + if (!EnsureJSArrayWithWritableFastElements(isolate, receiver, nullptr, 0, 0) || !IsJSArrayFastElementMovingAllowed(isolate, JSArray::cast(*receiver))) { @@ -1599,7 +1601,8 @@ enum class ArrayGroupMode { kToObject, kToMap }; template <ArrayGroupMode mode> inline MaybeHandle<OrderedHashMap> GenericArrayGroup( Isolate* isolate, Handle<JSReceiver> O, Handle<Object> callbackfn, - Handle<OrderedHashMap> groups, double initialK, double len) { + Handle<Object> thisArg, Handle<OrderedHashMap> groups, double initialK, + double len) { // 6. Repeat, while k < len for (double k = initialK; k < len; ++k) { // 6a. Let Pk be ! ToString(𝔽(k)). @@ -1617,9 +1620,9 @@ inline MaybeHandle<OrderedHashMap> GenericArrayGroup( // 6c. Let key be ? Call(callbackfn, thisArg, « kValue, 𝔽(k), O »). Handle<Object> propertyKey; Handle<Object> argv[] = {kValue, isolate->factory()->NewNumber(k), O}; - ASSIGN_RETURN_ON_EXCEPTION(isolate, propertyKey, - Execution::Call(isolate, callbackfn, O, 3, argv), - OrderedHashMap); + ASSIGN_RETURN_ON_EXCEPTION( + isolate, propertyKey, + Execution::Call(isolate, callbackfn, thisArg, 3, argv), OrderedHashMap); if (mode == ArrayGroupMode::kToMap) { // 6d. If key is -0𝔽, set key to +0𝔽. @@ -1649,7 +1652,7 @@ inline MaybeHandle<OrderedHashMap> GenericArrayGroup( template <ArrayGroupMode mode> inline MaybeHandle<OrderedHashMap> FastArrayGroup( Isolate* isolate, Handle<JSArray> array, Handle<Object> callbackfn, - Handle<OrderedHashMap> groups, double len, + Handle<Object> thisArg, Handle<OrderedHashMap> groups, double len, ElementsKind* result_elements_kind) { DCHECK_NOT_NULL(result_elements_kind); @@ -1662,8 +1665,8 @@ inline MaybeHandle<OrderedHashMap> FastArrayGroup( for (InternalIndex k : InternalIndex::Range(uint_len)) { if (!CheckArrayMapNotModified(array, original_map) || k.as_uint32() >= static_cast<uint32_t>(array->length().Number())) { - return GenericArrayGroup<mode>(isolate, array, callbackfn, groups, - k.as_uint32(), len); + return GenericArrayGroup<mode>(isolate, array, callbackfn, thisArg, + groups, k.as_uint32(), len); } // 6a. Let Pk be ! ToString(𝔽(k)). // 6b. Let kValue be ? Get(O, Pk). @@ -1679,7 +1682,7 @@ inline MaybeHandle<OrderedHashMap> FastArrayGroup( kValue, isolate->factory()->NewNumber(k.as_uint32()), array}; ASSIGN_RETURN_ON_EXCEPTION( isolate, propertyKey, - Execution::Call(isolate, callbackfn, array, 3, argv), OrderedHashMap); + Execution::Call(isolate, callbackfn, thisArg, 3, argv), OrderedHashMap); if (mode == ArrayGroupMode::kToMap) { // 6d. If key is -0𝔽, set key to +0𝔽. @@ -1719,7 +1722,7 @@ inline MaybeHandle<OrderedHashMap> FastArrayGroup( } // namespace -// https://tc39.es/proposal-array-grouping/#sec-array.prototype.groupby +// https://tc39.es/proposal-array-grouping/#sec-array.prototype.group BUILTIN(ArrayPrototypeGroup) { const char* const kMethodName = "Array.prototype.group"; HandleScope scope(isolate); @@ -1741,6 +1744,8 @@ BUILTIN(ArrayPrototypeGroup) { isolate, NewTypeError(MessageTemplate::kCalledNonCallable, callbackfn)); } + Handle<Object> thisArg = args.atOrUndefined(isolate, 2); + // 5. Let groups be a new empty List. Handle<OrderedHashMap> groups = isolate->factory()->NewOrderedHashMap(); ElementsKind result_elements_kind = ElementsKind::PACKED_ELEMENTS; @@ -1748,14 +1753,15 @@ BUILTIN(ArrayPrototypeGroup) { Handle<JSArray> array = Handle<JSArray>::cast(O); ASSIGN_RETURN_FAILURE_ON_EXCEPTION( isolate, groups, - FastArrayGroup<ArrayGroupMode::kToObject>( - isolate, array, callbackfn, groups, len, &result_elements_kind)); + FastArrayGroup<ArrayGroupMode::kToObject>(isolate, array, callbackfn, + thisArg, groups, len, + &result_elements_kind)); } else { // 4. Let k be 0. ASSIGN_RETURN_FAILURE_ON_EXCEPTION( isolate, groups, GenericArrayGroup<ArrayGroupMode::kToObject>(isolate, O, callbackfn, - groups, 0, len)); + thisArg, groups, 0, len)); } // 7. Let obj be ! OrdinaryObjectCreate(null). @@ -1781,7 +1787,7 @@ BUILTIN(ArrayPrototypeGroup) { return *obj; } -// https://tc39.es/proposal-array-grouping/#sec-array.prototype.groupbymap +// https://tc39.es/proposal-array-grouping/#sec-array.prototype.grouptomap BUILTIN(ArrayPrototypeGroupToMap) { const char* const kMethodName = "Array.prototype.groupToMap"; HandleScope scope(isolate); @@ -1803,21 +1809,23 @@ BUILTIN(ArrayPrototypeGroupToMap) { isolate, NewTypeError(MessageTemplate::kCalledNonCallable, callbackfn)); } + Handle<Object> thisArg = args.atOrUndefined(isolate, 2); + // 5. Let groups be a new empty List. Handle<OrderedHashMap> groups = isolate->factory()->NewOrderedHashMap(); ElementsKind result_elements_kind = ElementsKind::PACKED_ELEMENTS; if (IsFastArray(O)) { Handle<JSArray> array = Handle<JSArray>::cast(O); - ASSIGN_RETURN_FAILURE_ON_EXCEPTION( - isolate, groups, - FastArrayGroup<ArrayGroupMode::kToMap>( - isolate, array, callbackfn, groups, len, &result_elements_kind)); + ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, groups, + FastArrayGroup<ArrayGroupMode::kToMap>( + isolate, array, callbackfn, thisArg, + groups, len, &result_elements_kind)); } else { // 4. Let k be 0. ASSIGN_RETURN_FAILURE_ON_EXCEPTION( isolate, groups, GenericArrayGroup<ArrayGroupMode::kToMap>(isolate, O, callbackfn, - groups, 0, len)); + thisArg, groups, 0, len)); } // 7. Let map be ! Construct(%Map%). diff --git a/deps/v8/src/builtins/builtins-arraybuffer.cc b/deps/v8/src/builtins/builtins-arraybuffer.cc index 4d19f6ed42dda8..fbe29b434fae36 100644 --- a/deps/v8/src/builtins/builtins-arraybuffer.cc +++ b/deps/v8/src/builtins/builtins-arraybuffer.cc @@ -24,7 +24,7 @@ namespace internal { } #define CHECK_RESIZABLE(expected, name, method) \ - if (name->is_resizable() != expected) { \ + if (name->is_resizable_by_js() != expected) { \ THROW_NEW_ERROR_RETURN_FAILURE( \ isolate, \ NewTypeError(MessageTemplate::kIncompatibleMethodReceiver, \ @@ -316,7 +316,7 @@ static Object SliceHelper(BuiltinArguments args, Isolate* isolate, if (new_len_size != 0) { size_t from_byte_length = array_buffer->GetByteLength(); - if (V8_UNLIKELY(!is_shared && array_buffer->is_resizable())) { + if (V8_UNLIKELY(!is_shared && array_buffer->is_resizable_by_js())) { // The above steps might have resized the underlying buffer. In that case, // only copy the still-accessible portion of the underlying data. if (first_size > from_byte_length) { @@ -569,7 +569,7 @@ BUILTIN(ArrayBufferPrototypeTransfer) { // Case 2: We can reuse the same BackingStore. auto from_backing_store = array_buffer->GetBackingStore(); - if (!from_backing_store->is_resizable() && + if (from_backing_store && !from_backing_store->is_resizable_by_js() && (new_byte_length == array_buffer->GetByteLength() || from_backing_store->CanReallocate())) { // Reallocate covers steps 6-12. diff --git a/deps/v8/src/builtins/builtins-async-generator-gen.cc b/deps/v8/src/builtins/builtins-async-generator-gen.cc index beccd0dfa25d81..26dcabe6c3b320 100644 --- a/deps/v8/src/builtins/builtins-async-generator-gen.cc +++ b/deps/v8/src/builtins/builtins-async-generator-gen.cc @@ -602,7 +602,7 @@ TF_BUILTIN(AsyncGeneratorReject, AsyncGeneratorBuiltinsAssembler) { TrueConstant())); } -TF_BUILTIN(AsyncGeneratorYield, AsyncGeneratorBuiltinsAssembler) { +TF_BUILTIN(AsyncGeneratorYieldWithAwait, AsyncGeneratorBuiltinsAssembler) { const auto generator = Parameter<JSGeneratorObject>(Descriptor::kGenerator); const auto value = Parameter<Object>(Descriptor::kValue); const auto is_caught = Parameter<Oddball>(Descriptor::kIsCaught); @@ -614,13 +614,14 @@ TF_BUILTIN(AsyncGeneratorYield, AsyncGeneratorBuiltinsAssembler) { LoadPromiseFromAsyncGeneratorRequest(request); Await(context, generator, value, outer_promise, - AsyncGeneratorYieldResolveSharedFunConstant(), + AsyncGeneratorYieldWithAwaitResolveSharedFunConstant(), AsyncGeneratorAwaitRejectSharedFunConstant(), is_caught); SetGeneratorAwaiting(generator); Return(UndefinedConstant()); } -TF_BUILTIN(AsyncGeneratorYieldResolveClosure, AsyncGeneratorBuiltinsAssembler) { +TF_BUILTIN(AsyncGeneratorYieldWithAwaitResolveClosure, + AsyncGeneratorBuiltinsAssembler) { const auto context = Parameter<Context>(Descriptor::kContext); const auto value = Parameter<Object>(Descriptor::kValue); const TNode<JSAsyncGeneratorObject> generator = diff --git a/deps/v8/src/builtins/builtins-bigint-gen.h b/deps/v8/src/builtins/builtins-bigint-gen.h index c1c9265e4c729e..8543f5fe999148 100644 --- a/deps/v8/src/builtins/builtins-bigint-gen.h +++ b/deps/v8/src/builtins/builtins-bigint-gen.h @@ -63,32 +63,34 @@ class BigIntBuiltinsAssembler : public CodeStubAssembler { std::make_pair(MachineType::AnyTagged(), y)); } - TNode<BoolT> CppAbsoluteMulAndCanonicalize(TNode<BigInt> result, - TNode<BigInt> x, TNode<BigInt> y) { + TNode<Int32T> CppAbsoluteMulAndCanonicalize(TNode<BigInt> result, + TNode<BigInt> x, + TNode<BigInt> y) { TNode<ExternalReference> mutable_big_int_absolute_mul_and_canonicalize = ExternalConstant( ExternalReference:: mutable_big_int_absolute_mul_and_canonicalize_function()); - TNode<BoolT> success = UncheckedCast<BoolT>(CallCFunction( - mutable_big_int_absolute_mul_and_canonicalize, MachineType::Bool(), + TNode<Int32T> return_code = UncheckedCast<Int32T>(CallCFunction( + mutable_big_int_absolute_mul_and_canonicalize, MachineType::Int32(), std::make_pair(MachineType::AnyTagged(), result), std::make_pair(MachineType::AnyTagged(), x), std::make_pair(MachineType::AnyTagged(), y))); - return success; + return return_code; } - TNode<BoolT> CppAbsoluteDivAndCanonicalize(TNode<BigInt> result, - TNode<BigInt> x, TNode<BigInt> y) { + TNode<Int32T> CppAbsoluteDivAndCanonicalize(TNode<BigInt> result, + TNode<BigInt> x, + TNode<BigInt> y) { TNode<ExternalReference> mutable_big_int_absolute_div_and_canonicalize = ExternalConstant( ExternalReference:: mutable_big_int_absolute_div_and_canonicalize_function()); - TNode<BoolT> success = UncheckedCast<BoolT>(CallCFunction( - mutable_big_int_absolute_div_and_canonicalize, MachineType::Bool(), + TNode<Int32T> return_code = UncheckedCast<Int32T>(CallCFunction( + mutable_big_int_absolute_div_and_canonicalize, MachineType::Int32(), std::make_pair(MachineType::AnyTagged(), result), std::make_pair(MachineType::AnyTagged(), x), std::make_pair(MachineType::AnyTagged(), y))); - return success; + return return_code; } void CppBitwiseAndPosPosAndCanonicalize(TNode<BigInt> result, TNode<BigInt> x, diff --git a/deps/v8/src/builtins/builtins-bigint.tq b/deps/v8/src/builtins/builtins-bigint.tq index be5d42aff0de4a..5ef53a54ce399c 100644 --- a/deps/v8/src/builtins/builtins-bigint.tq +++ b/deps/v8/src/builtins/builtins-bigint.tq @@ -14,9 +14,9 @@ extern macro BigIntBuiltinsAssembler::CppAbsoluteAddAndCanonicalize( extern macro BigIntBuiltinsAssembler::CppAbsoluteSubAndCanonicalize( MutableBigInt, BigIntBase, BigIntBase): void; extern macro BigIntBuiltinsAssembler::CppAbsoluteMulAndCanonicalize( - MutableBigInt, BigIntBase, BigIntBase): bool; + MutableBigInt, BigIntBase, BigIntBase): int32; extern macro BigIntBuiltinsAssembler::CppAbsoluteDivAndCanonicalize( - MutableBigInt, BigIntBase, BigIntBase): bool; + MutableBigInt, BigIntBase, BigIntBase): int32; extern macro BigIntBuiltinsAssembler::CppBitwiseAndPosPosAndCanonicalize( MutableBigInt, BigIntBase, BigIntBase): void; extern macro BigIntBuiltinsAssembler::CppBitwiseAndNegNegAndCanonicalize( @@ -236,7 +236,7 @@ macro BigIntMultiplyImpl(implicit context: Context)(x: BigInt, y: BigInt): const result = AllocateEmptyBigIntNoThrow(resultSign, xlength + ylength) otherwise BigIntTooBig; - if (!CppAbsoluteMulAndCanonicalize(result, x, y)) { + if (CppAbsoluteMulAndCanonicalize(result, x, y) == 1) { goto TerminationRequested; } @@ -305,7 +305,7 @@ macro BigIntDivideImpl(implicit context: Context)(x: BigInt, y: BigInt): const result = AllocateEmptyBigIntNoThrow(resultSign, resultLength) otherwise unreachable; - if (!CppAbsoluteDivAndCanonicalize(result, x, y)) { + if (CppAbsoluteDivAndCanonicalize(result, x, y) == 1) { goto TerminationRequested; } diff --git a/deps/v8/src/builtins/builtins-collections-gen.cc b/deps/v8/src/builtins/builtins-collections-gen.cc index f6238e30728766..f6edbb6bc028f4 100644 --- a/deps/v8/src/builtins/builtins-collections-gen.cc +++ b/deps/v8/src/builtins/builtins-collections-gen.cc @@ -22,130 +22,6 @@ namespace internal { template <class T> using TVariable = compiler::TypedCodeAssemblerVariable<T>; -class BaseCollectionsAssembler : public CodeStubAssembler { - public: - explicit BaseCollectionsAssembler(compiler::CodeAssemblerState* state) - : CodeStubAssembler(state) {} - - virtual ~BaseCollectionsAssembler() = default; - - protected: - enum Variant { kMap, kSet, kWeakMap, kWeakSet }; - - // Adds an entry to a collection. For Maps, properly handles extracting the - // key and value from the entry (see LoadKeyValue()). - void AddConstructorEntry(Variant variant, TNode<Context> context, - TNode<Object> collection, TNode<Object> add_function, - TNode<Object> key_value, - Label* if_may_have_side_effects = nullptr, - Label* if_exception = nullptr, - TVariable<Object>* var_exception = nullptr); - - // Adds constructor entries to a collection. Choosing a fast path when - // possible. - void AddConstructorEntries(Variant variant, TNode<Context> context, - TNode<Context> native_context, - TNode<HeapObject> collection, - TNode<Object> initial_entries); - - // Fast path for adding constructor entries. Assumes the entries are a fast - // JS array (see CodeStubAssembler::BranchIfFastJSArray()). - void AddConstructorEntriesFromFastJSArray(Variant variant, - TNode<Context> context, - TNode<Context> native_context, - TNode<Object> collection, - TNode<JSArray> fast_jsarray, - Label* if_may_have_side_effects); - - // Adds constructor entries to a collection using the iterator protocol. - void AddConstructorEntriesFromIterable(Variant variant, - TNode<Context> context, - TNode<Context> native_context, - TNode<Object> collection, - TNode<Object> iterable); - - // Constructs a collection instance. Choosing a fast path when possible. - TNode<JSObject> AllocateJSCollection(TNode<Context> context, - TNode<JSFunction> constructor, - TNode<JSReceiver> new_target); - - // Fast path for constructing a collection instance if the constructor - // function has not been modified. - TNode<JSObject> AllocateJSCollectionFast(TNode<JSFunction> constructor); - - // Fallback for constructing a collection instance if the constructor function - // has been modified. - TNode<JSObject> AllocateJSCollectionSlow(TNode<Context> context, - TNode<JSFunction> constructor, - TNode<JSReceiver> new_target); - - // Allocates the backing store for a collection. - virtual TNode<HeapObject> AllocateTable( - Variant variant, TNode<IntPtrT> at_least_space_for) = 0; - - // Main entry point for a collection constructor builtin. - void GenerateConstructor(Variant variant, - Handle<String> constructor_function_name, - TNode<Object> new_target, TNode<IntPtrT> argc, - TNode<Context> context); - - // Retrieves the collection function that adds an entry. `set` for Maps and - // `add` for Sets. - TNode<Object> GetAddFunction(Variant variant, TNode<Context> context, - TNode<Object> collection); - - // Retrieves the collection constructor function. - TNode<JSFunction> GetConstructor(Variant variant, - TNode<Context> native_context); - - // Retrieves the initial collection function that adds an entry. Should only - // be called when it is certain that a collection prototype's map hasn't been - // changed. - TNode<JSFunction> GetInitialAddFunction(Variant variant, - TNode<Context> native_context); - - // Checks whether {collection}'s initial add/set function has been modified - // (depending on {variant}, loaded from {native_context}). - void GotoIfInitialAddFunctionModified(Variant variant, - TNode<NativeContext> native_context, - TNode<HeapObject> collection, - Label* if_modified); - - // Gets root index for the name of the add/set function. - RootIndex GetAddFunctionNameIndex(Variant variant); - - // Retrieves the offset to access the backing table from the collection. - int GetTableOffset(Variant variant); - - // Estimates the number of entries the collection will have after adding the - // entries passed in the constructor. AllocateTable() can use this to avoid - // the time of growing/rehashing when adding the constructor entries. - TNode<IntPtrT> EstimatedInitialSize(TNode<Object> initial_entries, - TNode<BoolT> is_fast_jsarray); - - void GotoIfCannotBeWeakKey(const TNode<Object> obj, - Label* if_cannot_be_weak_key); - - // Determines whether the collection's prototype has been modified. - TNode<BoolT> HasInitialCollectionPrototype(Variant variant, - TNode<Context> native_context, - TNode<Object> collection); - - // Gets the initial prototype map for given collection {variant}. - TNode<Map> GetInitialCollectionPrototype(Variant variant, - TNode<Context> native_context); - - // Loads an element from a fixed array. If the element is the hole, returns - // `undefined`. - TNode<Object> LoadAndNormalizeFixedArrayElement(TNode<FixedArray> elements, - TNode<IntPtrT> index); - - // Loads an element from a fixed double array. If the element is the hole, - // returns `undefined`. - TNode<Object> LoadAndNormalizeFixedDoubleArrayElement( - TNode<HeapObject> elements, TNode<IntPtrT> index); -}; - void BaseCollectionsAssembler::AddConstructorEntry( Variant variant, TNode<Context> context, TNode<Object> collection, TNode<Object> add_function, TNode<Object> key_value, @@ -177,6 +53,9 @@ void BaseCollectionsAssembler::AddConstructorEntries( EstimatedInitialSize(initial_entries, use_fast_loop.value()); Label allocate_table(this, &use_fast_loop), exit(this), fast_loop(this), slow_loop(this, Label::kDeferred); + TVARIABLE(JSReceiver, var_iterator_object); + TVARIABLE(Object, var_exception); + Label if_exception(this, Label::kDeferred); Goto(&allocate_table); BIND(&allocate_table); { @@ -189,6 +68,7 @@ void BaseCollectionsAssembler::AddConstructorEntries( } BIND(&fast_loop); { + Label if_exception_during_fast_iteration(this); TNode<JSArray> initial_entries_jsarray = UncheckedCast<JSArray>(initial_entries); #if DEBUG @@ -198,9 +78,13 @@ void BaseCollectionsAssembler::AddConstructorEntries( #endif Label if_may_have_side_effects(this, Label::kDeferred); - AddConstructorEntriesFromFastJSArray(variant, context, native_context, - collection, initial_entries_jsarray, - &if_may_have_side_effects); + { + compiler::ScopedExceptionHandler handler( + this, &if_exception_during_fast_iteration, &var_exception); + AddConstructorEntriesFromFastJSArray(variant, context, native_context, + collection, initial_entries_jsarray, + &if_may_have_side_effects); + } Goto(&exit); if (variant == kMap || variant == kWeakMap) { @@ -222,13 +106,37 @@ void BaseCollectionsAssembler::AddConstructorEntries( use_fast_loop = Int32FalseConstant(); Goto(&allocate_table); } + BIND(&if_exception_during_fast_iteration); + { + // In case exception is thrown during collection population, materialize + // the iteator and execute iterator closing protocol. It might be + // non-trivial in case "return" callback is added somewhere in the + // iterator's prototype chain. + TNode<NativeContext> native_context = LoadNativeContext(context); + var_iterator_object = CreateArrayIterator( + native_context, UncheckedCast<JSArray>(initial_entries), + IterationKind::kEntries); + Goto(&if_exception); + } } BIND(&slow_loop); { - AddConstructorEntriesFromIterable(variant, context, native_context, - collection, initial_entries); + AddConstructorEntriesFromIterable( + variant, context, native_context, collection, initial_entries, + &if_exception, &var_iterator_object, &var_exception); Goto(&exit); } + BIND(&if_exception); + { + TNode<HeapObject> message = GetPendingMessage(); + SetPendingMessage(TheHoleConstant()); + // iterator.next field is not used by IteratorCloseOnException. + TorqueStructIteratorRecord iterator = {var_iterator_object.value(), {}}; + IteratorCloseOnException(context, iterator); + CallRuntime(Runtime::kReThrowWithMessage, context, var_exception.value(), + message); + Unreachable(); + } BIND(&exit); } @@ -306,20 +214,22 @@ void BaseCollectionsAssembler::AddConstructorEntriesFromFastJSArray( void BaseCollectionsAssembler::AddConstructorEntriesFromIterable( Variant variant, TNode<Context> context, TNode<Context> native_context, - TNode<Object> collection, TNode<Object> iterable) { - Label exit(this), loop(this), if_exception(this, Label::kDeferred); + TNode<Object> collection, TNode<Object> iterable, Label* if_exception, + TVariable<JSReceiver>* var_iterator_object, + TVariable<Object>* var_exception) { + Label exit(this), loop(this); CSA_DCHECK(this, Word32BinaryNot(IsNullOrUndefined(iterable))); TNode<Object> add_func = GetAddFunction(variant, context, collection); IteratorBuiltinsAssembler iterator_assembler(this->state()); TorqueStructIteratorRecord iterator = iterator_assembler.GetIterator(context, iterable); + *var_iterator_object = iterator.object; CSA_DCHECK(this, Word32BinaryNot(IsUndefined(iterator.object))); TNode<Map> fast_iterator_result_map = CAST( LoadContextElement(native_context, Context::ITERATOR_RESULT_MAP_INDEX)); - TVARIABLE(Object, var_exception); Goto(&loop); BIND(&loop); @@ -329,18 +239,9 @@ void BaseCollectionsAssembler::AddConstructorEntriesFromIterable( TNode<Object> next_value = iterator_assembler.IteratorValue( context, next, fast_iterator_result_map); AddConstructorEntry(variant, context, collection, add_func, next_value, - nullptr, &if_exception, &var_exception); + nullptr, if_exception, var_exception); Goto(&loop); } - BIND(&if_exception); - { - TNode<HeapObject> message = GetPendingMessage(); - SetPendingMessage(TheHoleConstant()); - IteratorCloseOnException(context, iterator); - CallRuntime(Runtime::kReThrowWithMessage, context, var_exception.value(), - message); - Unreachable(); - } BIND(&exit); } @@ -523,16 +424,28 @@ TNode<IntPtrT> BaseCollectionsAssembler::EstimatedInitialSize( [=] { return IntPtrConstant(0); }); } -void BaseCollectionsAssembler::GotoIfCannotBeWeakKey( - const TNode<Object> obj, Label* if_cannot_be_weak_key) { - GotoIf(TaggedIsSmi(obj), if_cannot_be_weak_key); +// https://tc39.es/proposal-symbols-as-weakmap-keys/#sec-canbeheldweakly-abstract-operation +void BaseCollectionsAssembler::GotoIfCannotBeHeldWeakly( + const TNode<Object> obj, Label* if_cannot_be_held_weakly) { + Label check_symbol_key(this); + Label end(this); + GotoIf(TaggedIsSmi(obj), if_cannot_be_held_weakly); TNode<Uint16T> instance_type = LoadMapInstanceType(LoadMap(CAST(obj))); - GotoIfNot(IsJSReceiverInstanceType(instance_type), if_cannot_be_weak_key); + GotoIfNot(IsJSReceiverInstanceType(instance_type), &check_symbol_key); // TODO(v8:12547) Shared structs and arrays should only be able to point // to shared values in weak collections. For now, disallow them as weak // collection keys. - GotoIf(IsJSSharedStructInstanceType(instance_type), if_cannot_be_weak_key); - GotoIf(IsJSSharedArrayInstanceType(instance_type), if_cannot_be_weak_key); + GotoIf(IsJSSharedStructInstanceType(instance_type), if_cannot_be_held_weakly); + GotoIf(IsJSSharedArrayInstanceType(instance_type), if_cannot_be_held_weakly); + Goto(&end); + Bind(&check_symbol_key); + GotoIfNot(HasHarmonySymbolAsWeakmapKeyFlag(), if_cannot_be_held_weakly); + GotoIfNot(IsSymbolInstanceType(instance_type), if_cannot_be_held_weakly); + TNode<Uint32T> flags = LoadSymbolFlags(CAST(obj)); + GotoIf(Word32And(flags, Symbol::IsInPublicSymbolTableBit::kMask), + if_cannot_be_held_weakly); + Goto(&end); + Bind(&end); } TNode<Map> BaseCollectionsAssembler::GetInitialCollectionPrototype( @@ -2414,67 +2327,6 @@ TF_BUILTIN(FindOrderedHashSetEntry, CollectionsBuiltinsAssembler) { Return(SmiConstant(-1)); } -class WeakCollectionsBuiltinsAssembler : public BaseCollectionsAssembler { - public: - explicit WeakCollectionsBuiltinsAssembler(compiler::CodeAssemblerState* state) - : BaseCollectionsAssembler(state) {} - - protected: - void AddEntry(TNode<EphemeronHashTable> table, TNode<IntPtrT> key_index, - TNode<Object> key, TNode<Object> value, - TNode<IntPtrT> number_of_elements); - - TNode<HeapObject> AllocateTable(Variant variant, - TNode<IntPtrT> at_least_space_for) override; - - // Generates and sets the identity for a JSRececiver. - TNode<Smi> CreateIdentityHash(TNode<Object> receiver); - TNode<IntPtrT> EntryMask(TNode<IntPtrT> capacity); - - // Builds code that finds the EphemeronHashTable entry for a {key} using the - // comparison code generated by {key_compare}. The key index is returned if - // the {key} is found. - using KeyComparator = - std::function<void(TNode<Object> entry_key, Label* if_same)>; - TNode<IntPtrT> FindKeyIndex(TNode<HeapObject> table, TNode<IntPtrT> key_hash, - TNode<IntPtrT> entry_mask, - const KeyComparator& key_compare); - - // Builds code that finds an EphemeronHashTable entry available for a new - // entry. - TNode<IntPtrT> FindKeyIndexForInsertion(TNode<HeapObject> table, - TNode<IntPtrT> key_hash, - TNode<IntPtrT> entry_mask); - - // Builds code that finds the EphemeronHashTable entry with key that matches - // {key} and returns the entry's key index. If {key} cannot be found, jumps to - // {if_not_found}. - TNode<IntPtrT> FindKeyIndexForKey(TNode<HeapObject> table, TNode<Object> key, - TNode<IntPtrT> hash, - TNode<IntPtrT> entry_mask, - Label* if_not_found); - - TNode<Word32T> InsufficientCapacityToAdd(TNode<IntPtrT> capacity, - TNode<IntPtrT> number_of_elements, - TNode<IntPtrT> number_of_deleted); - TNode<IntPtrT> KeyIndexFromEntry(TNode<IntPtrT> entry); - - TNode<IntPtrT> LoadNumberOfElements(TNode<EphemeronHashTable> table, - int offset); - TNode<IntPtrT> LoadNumberOfDeleted(TNode<EphemeronHashTable> table, - int offset = 0); - TNode<EphemeronHashTable> LoadTable(TNode<JSWeakCollection> collection); - TNode<IntPtrT> LoadTableCapacity(TNode<EphemeronHashTable> table); - - void RemoveEntry(TNode<EphemeronHashTable> table, TNode<IntPtrT> key_index, - TNode<IntPtrT> number_of_elements); - TNode<BoolT> ShouldRehash(TNode<IntPtrT> number_of_elements, - TNode<IntPtrT> number_of_deleted); - TNode<Word32T> ShouldShrink(TNode<IntPtrT> capacity, - TNode<IntPtrT> number_of_elements); - TNode<IntPtrT> ValueIndexFromKeyIndex(TNode<IntPtrT> key_index); -}; - void WeakCollectionsBuiltinsAssembler::AddEntry( TNode<EphemeronHashTable> table, TNode<IntPtrT> key_index, TNode<Object> key, TNode<Object> value, TNode<IntPtrT> number_of_elements) { @@ -2490,6 +2342,25 @@ void WeakCollectionsBuiltinsAssembler::AddEntry( SmiFromIntPtr(number_of_elements)); } +TNode<IntPtrT> WeakCollectionsBuiltinsAssembler::GetHash( + const TNode<HeapObject> key, Label* if_no_hash) { + TVARIABLE(IntPtrT, var_hash); + Label if_symbol(this); + Label return_result(this); + GotoIfNot(IsJSReceiver(key), &if_symbol); + var_hash = LoadJSReceiverIdentityHash(CAST(key), if_no_hash); + Goto(&return_result); + Bind(&if_symbol); + CSA_DCHECK(this, IsSymbol(key)); + CSA_DCHECK(this, Word32BinaryNot( + Word32And(LoadSymbolFlags(CAST(key)), + Symbol::IsInPublicSymbolTableBit::kMask))); + var_hash = ChangeInt32ToIntPtr(LoadNameHash(CAST(key), nullptr)); + Goto(&return_result); + Bind(&return_result); + return var_hash.value(); +} + TNode<HeapObject> WeakCollectionsBuiltinsAssembler::AllocateTable( Variant variant, TNode<IntPtrT> at_least_space_for) { // See HashTable::New(). @@ -2715,18 +2586,17 @@ TF_BUILTIN(WeakMapLookupHashIndex, WeakCollectionsBuiltinsAssembler) { auto table = Parameter<EphemeronHashTable>(Descriptor::kTable); auto key = Parameter<Object>(Descriptor::kKey); - Label if_cannot_be_weak_key(this); + Label if_cannot_be_held_weakly(this); - GotoIfCannotBeWeakKey(key, &if_cannot_be_weak_key); + GotoIfCannotBeHeldWeakly(key, &if_cannot_be_held_weakly); - TNode<IntPtrT> hash = - LoadJSReceiverIdentityHash(CAST(key), &if_cannot_be_weak_key); + TNode<IntPtrT> hash = GetHash(CAST(key), &if_cannot_be_held_weakly); TNode<IntPtrT> capacity = LoadTableCapacity(table); TNode<IntPtrT> key_index = FindKeyIndexForKey( - table, key, hash, EntryMask(capacity), &if_cannot_be_weak_key); + table, key, hash, EntryMask(capacity), &if_cannot_be_held_weakly); Return(SmiTag(ValueIndexFromKeyIndex(key_index))); - BIND(&if_cannot_be_weak_key); + BIND(&if_cannot_be_held_weakly); Return(SmiConstant(-1)); } @@ -2781,23 +2651,22 @@ TF_BUILTIN(WeakCollectionDelete, WeakCollectionsBuiltinsAssembler) { auto collection = Parameter<JSWeakCollection>(Descriptor::kCollection); auto key = Parameter<Object>(Descriptor::kKey); - Label call_runtime(this), if_cannot_be_weak_key(this); + Label call_runtime(this), if_cannot_be_held_weakly(this); - GotoIfCannotBeWeakKey(key, &if_cannot_be_weak_key); + GotoIfCannotBeHeldWeakly(key, &if_cannot_be_held_weakly); - TNode<IntPtrT> hash = - LoadJSReceiverIdentityHash(CAST(key), &if_cannot_be_weak_key); + TNode<IntPtrT> hash = GetHash(CAST(key), &if_cannot_be_held_weakly); TNode<EphemeronHashTable> table = LoadTable(collection); TNode<IntPtrT> capacity = LoadTableCapacity(table); TNode<IntPtrT> key_index = FindKeyIndexForKey( - table, key, hash, EntryMask(capacity), &if_cannot_be_weak_key); + table, key, hash, EntryMask(capacity), &if_cannot_be_held_weakly); TNode<IntPtrT> number_of_elements = LoadNumberOfElements(table, -1); GotoIf(ShouldShrink(capacity, number_of_elements), &call_runtime); RemoveEntry(table, key_index, number_of_elements); Return(TrueConstant()); - BIND(&if_cannot_be_weak_key); + BIND(&if_cannot_be_held_weakly); Return(FalseConstant()); BIND(&call_runtime); @@ -2810,10 +2679,10 @@ TF_BUILTIN(WeakCollectionDelete, WeakCollectionsBuiltinsAssembler) { TF_BUILTIN(WeakCollectionSet, WeakCollectionsBuiltinsAssembler) { auto context = Parameter<Context>(Descriptor::kContext); auto collection = Parameter<JSWeakCollection>(Descriptor::kCollection); - auto key = Parameter<JSReceiver>(Descriptor::kKey); + auto key = Parameter<HeapObject>(Descriptor::kKey); auto value = Parameter<Object>(Descriptor::kValue); - CSA_DCHECK(this, IsJSReceiver(key)); + CSA_DCHECK(this, Word32Or(IsJSReceiver(key), IsSymbol(key))); Label call_runtime(this), if_no_hash(this), if_not_found(this); @@ -2821,7 +2690,7 @@ TF_BUILTIN(WeakCollectionSet, WeakCollectionsBuiltinsAssembler) { TNode<IntPtrT> capacity = LoadTableCapacity(table); TNode<IntPtrT> entry_mask = EntryMask(capacity); - TVARIABLE(IntPtrT, var_hash, LoadJSReceiverIdentityHash(key, &if_no_hash)); + TVARIABLE(IntPtrT, var_hash, GetHash(key, &if_no_hash)); TNode<IntPtrT> key_index = FindKeyIndexForKey(table, key, var_hash.value(), entry_mask, &if_not_found); @@ -2830,6 +2699,7 @@ TF_BUILTIN(WeakCollectionSet, WeakCollectionsBuiltinsAssembler) { BIND(&if_no_hash); { + CSA_DCHECK(this, IsJSReceiver(key)); var_hash = SmiUntag(CreateIdentityHash(key)); Goto(&if_not_found); } @@ -2881,7 +2751,7 @@ TF_BUILTIN(WeakMapPrototypeSet, WeakCollectionsBuiltinsAssembler) { "WeakMap.prototype.set"); Label throw_invalid_key(this); - GotoIfCannotBeWeakKey(key, &throw_invalid_key); + GotoIfCannotBeHeldWeakly(key, &throw_invalid_key); Return( CallBuiltin(Builtin::kWeakCollectionSet, context, receiver, key, value)); @@ -2899,7 +2769,7 @@ TF_BUILTIN(WeakSetPrototypeAdd, WeakCollectionsBuiltinsAssembler) { "WeakSet.prototype.add"); Label throw_invalid_value(this); - GotoIfCannotBeWeakKey(value, &throw_invalid_value); + GotoIfCannotBeHeldWeakly(value, &throw_invalid_value); Return(CallBuiltin(Builtin::kWeakCollectionSet, context, receiver, value, TrueConstant())); diff --git a/deps/v8/src/builtins/builtins-collections-gen.h b/deps/v8/src/builtins/builtins-collections-gen.h index a132557e3cd0a4..6dd2381ddd3743 100644 --- a/deps/v8/src/builtins/builtins-collections-gen.h +++ b/deps/v8/src/builtins/builtins-collections-gen.h @@ -20,6 +20,191 @@ void BranchIfIterableWithOriginalValueSetIterator( TNode<Context> context, compiler::CodeAssemblerLabel* if_true, compiler::CodeAssemblerLabel* if_false); +class BaseCollectionsAssembler : public CodeStubAssembler { + public: + explicit BaseCollectionsAssembler(compiler::CodeAssemblerState* state) + : CodeStubAssembler(state) {} + + virtual ~BaseCollectionsAssembler() = default; + + void GotoIfCannotBeHeldWeakly(const TNode<Object> obj, + Label* if_cannot_be_held_weakly); + + protected: + enum Variant { kMap, kSet, kWeakMap, kWeakSet }; + + // Adds an entry to a collection. For Maps, properly handles extracting the + // key and value from the entry (see LoadKeyValue()). + void AddConstructorEntry(Variant variant, TNode<Context> context, + TNode<Object> collection, TNode<Object> add_function, + TNode<Object> key_value, + Label* if_may_have_side_effects = nullptr, + Label* if_exception = nullptr, + TVariable<Object>* var_exception = nullptr); + + // Adds constructor entries to a collection. Choosing a fast path when + // possible. + void AddConstructorEntries(Variant variant, TNode<Context> context, + TNode<Context> native_context, + TNode<HeapObject> collection, + TNode<Object> initial_entries); + + // Fast path for adding constructor entries. Assumes the entries are a fast + // JS array (see CodeStubAssembler::BranchIfFastJSArray()). + void AddConstructorEntriesFromFastJSArray(Variant variant, + TNode<Context> context, + TNode<Context> native_context, + TNode<Object> collection, + TNode<JSArray> fast_jsarray, + Label* if_may_have_side_effects); + + // Adds constructor entries to a collection using the iterator protocol. + void AddConstructorEntriesFromIterable( + Variant variant, TNode<Context> context, TNode<Context> native_context, + TNode<Object> collection, TNode<Object> iterable, Label* if_exception, + TVariable<JSReceiver>* var_iterator, TVariable<Object>* var_exception); + + // Constructs a collection instance. Choosing a fast path when possible. + TNode<JSObject> AllocateJSCollection(TNode<Context> context, + TNode<JSFunction> constructor, + TNode<JSReceiver> new_target); + + // Fast path for constructing a collection instance if the constructor + // function has not been modified. + TNode<JSObject> AllocateJSCollectionFast(TNode<JSFunction> constructor); + + // Fallback for constructing a collection instance if the constructor function + // has been modified. + TNode<JSObject> AllocateJSCollectionSlow(TNode<Context> context, + TNode<JSFunction> constructor, + TNode<JSReceiver> new_target); + + // Allocates the backing store for a collection. + virtual TNode<HeapObject> AllocateTable( + Variant variant, TNode<IntPtrT> at_least_space_for) = 0; + + // Main entry point for a collection constructor builtin. + void GenerateConstructor(Variant variant, + Handle<String> constructor_function_name, + TNode<Object> new_target, TNode<IntPtrT> argc, + TNode<Context> context); + + // Retrieves the collection function that adds an entry. `set` for Maps and + // `add` for Sets. + TNode<Object> GetAddFunction(Variant variant, TNode<Context> context, + TNode<Object> collection); + + // Retrieves the collection constructor function. + TNode<JSFunction> GetConstructor(Variant variant, + TNode<Context> native_context); + + // Retrieves the initial collection function that adds an entry. Should only + // be called when it is certain that a collection prototype's map hasn't been + // changed. + TNode<JSFunction> GetInitialAddFunction(Variant variant, + TNode<Context> native_context); + + // Checks whether {collection}'s initial add/set function has been modified + // (depending on {variant}, loaded from {native_context}). + void GotoIfInitialAddFunctionModified(Variant variant, + TNode<NativeContext> native_context, + TNode<HeapObject> collection, + Label* if_modified); + + // Gets root index for the name of the add/set function. + RootIndex GetAddFunctionNameIndex(Variant variant); + + // Retrieves the offset to access the backing table from the collection. + int GetTableOffset(Variant variant); + + // Estimates the number of entries the collection will have after adding the + // entries passed in the constructor. AllocateTable() can use this to avoid + // the time of growing/rehashing when adding the constructor entries. + TNode<IntPtrT> EstimatedInitialSize(TNode<Object> initial_entries, + TNode<BoolT> is_fast_jsarray); + + // Determines whether the collection's prototype has been modified. + TNode<BoolT> HasInitialCollectionPrototype(Variant variant, + TNode<Context> native_context, + TNode<Object> collection); + + // Gets the initial prototype map for given collection {variant}. + TNode<Map> GetInitialCollectionPrototype(Variant variant, + TNode<Context> native_context); + + // Loads an element from a fixed array. If the element is the hole, returns + // `undefined`. + TNode<Object> LoadAndNormalizeFixedArrayElement(TNode<FixedArray> elements, + TNode<IntPtrT> index); + + // Loads an element from a fixed double array. If the element is the hole, + // returns `undefined`. + TNode<Object> LoadAndNormalizeFixedDoubleArrayElement( + TNode<HeapObject> elements, TNode<IntPtrT> index); +}; + +class WeakCollectionsBuiltinsAssembler : public BaseCollectionsAssembler { + public: + explicit WeakCollectionsBuiltinsAssembler(compiler::CodeAssemblerState* state) + : BaseCollectionsAssembler(state) {} + + protected: + void AddEntry(TNode<EphemeronHashTable> table, TNode<IntPtrT> key_index, + TNode<Object> key, TNode<Object> value, + TNode<IntPtrT> number_of_elements); + + TNode<HeapObject> AllocateTable(Variant variant, + TNode<IntPtrT> at_least_space_for) override; + + TNode<IntPtrT> GetHash(const TNode<HeapObject> key, Label* if_no_hash); + // Generates and sets the identity for a JSRececiver. + TNode<Smi> CreateIdentityHash(TNode<Object> receiver); + TNode<IntPtrT> EntryMask(TNode<IntPtrT> capacity); + + // Builds code that finds the EphemeronHashTable entry for a {key} using the + // comparison code generated by {key_compare}. The key index is returned if + // the {key} is found. + using KeyComparator = + std::function<void(TNode<Object> entry_key, Label* if_same)>; + TNode<IntPtrT> FindKeyIndex(TNode<HeapObject> table, TNode<IntPtrT> key_hash, + TNode<IntPtrT> entry_mask, + const KeyComparator& key_compare); + + // Builds code that finds an EphemeronHashTable entry available for a new + // entry. + TNode<IntPtrT> FindKeyIndexForInsertion(TNode<HeapObject> table, + TNode<IntPtrT> key_hash, + TNode<IntPtrT> entry_mask); + + // Builds code that finds the EphemeronHashTable entry with key that matches + // {key} and returns the entry's key index. If {key} cannot be found, jumps to + // {if_not_found}. + TNode<IntPtrT> FindKeyIndexForKey(TNode<HeapObject> table, TNode<Object> key, + TNode<IntPtrT> hash, + TNode<IntPtrT> entry_mask, + Label* if_not_found); + + TNode<Word32T> InsufficientCapacityToAdd(TNode<IntPtrT> capacity, + TNode<IntPtrT> number_of_elements, + TNode<IntPtrT> number_of_deleted); + TNode<IntPtrT> KeyIndexFromEntry(TNode<IntPtrT> entry); + + TNode<IntPtrT> LoadNumberOfElements(TNode<EphemeronHashTable> table, + int offset); + TNode<IntPtrT> LoadNumberOfDeleted(TNode<EphemeronHashTable> table, + int offset = 0); + TNode<EphemeronHashTable> LoadTable(TNode<JSWeakCollection> collection); + TNode<IntPtrT> LoadTableCapacity(TNode<EphemeronHashTable> table); + + void RemoveEntry(TNode<EphemeronHashTable> table, TNode<IntPtrT> key_index, + TNode<IntPtrT> number_of_elements); + TNode<BoolT> ShouldRehash(TNode<IntPtrT> number_of_elements, + TNode<IntPtrT> number_of_deleted); + TNode<Word32T> ShouldShrink(TNode<IntPtrT> capacity, + TNode<IntPtrT> number_of_elements); + TNode<IntPtrT> ValueIndexFromKeyIndex(TNode<IntPtrT> key_index); +}; + } // namespace internal } // namespace v8 diff --git a/deps/v8/src/builtins/builtins-constructor-gen.cc b/deps/v8/src/builtins/builtins-constructor-gen.cc index bbc12a5b650a79..cb2b79bef7ce72 100644 --- a/deps/v8/src/builtins/builtins-constructor-gen.cc +++ b/deps/v8/src/builtins/builtins-constructor-gen.cc @@ -596,13 +596,16 @@ TNode<HeapObject> ConstructorBuiltinsAssembler::CreateShallowObjectLiteral( static_assert(JSObject::kMaxInstanceSize < kMaxRegularHeapObjectSize); TNode<IntPtrT> instance_size = TimesTaggedSize(LoadMapInstanceSizeInWords(boilerplate_map)); + TNode<IntPtrT> aligned_instance_size = + AlignToAllocationAlignment(instance_size); TNode<IntPtrT> allocation_size = instance_size; bool needs_allocation_memento = v8_flags.allocation_site_pretenuring; if (needs_allocation_memento) { DCHECK(V8_ALLOCATION_SITE_TRACKING_BOOL); // Prepare for inner-allocating the AllocationMemento. - allocation_size = - IntPtrAdd(instance_size, IntPtrConstant(AllocationMemento::kSize)); + allocation_size = IntPtrAdd(aligned_instance_size, + IntPtrConstant(ALIGN_TO_ALLOCATION_ALIGNMENT( + AllocationMemento::kSize))); } TNode<HeapObject> copy = @@ -620,7 +623,7 @@ TNode<HeapObject> ConstructorBuiltinsAssembler::CreateShallowObjectLiteral( // Initialize the AllocationMemento before potential GCs due to heap number // allocation when copying the in-object properties. if (needs_allocation_memento) { - InitializeAllocationMemento(copy, instance_size, allocation_site); + InitializeAllocationMemento(copy, aligned_instance_size, allocation_site); } { diff --git a/deps/v8/src/builtins/builtins-dataview.cc b/deps/v8/src/builtins/builtins-dataview.cc index 6bdc561361a06a..8f3f789e0c32d4 100644 --- a/deps/v8/src/builtins/builtins-dataview.cc +++ b/deps/v8/src/builtins/builtins-dataview.cc @@ -75,7 +75,7 @@ BUILTIN(DataViewConstructor) { bool length_tracking = false; if (byte_length->IsUndefined(isolate)) { view_byte_length = buffer_byte_length - view_byte_offset; - length_tracking = array_buffer->is_resizable(); + length_tracking = array_buffer->is_resizable_by_js(); } else { // 11. Else, // a. Set byteLengthChecked be ? ToIndex(byteLength). @@ -113,7 +113,7 @@ BUILTIN(DataViewConstructor) { raw.SetEmbedderField(i, Smi::zero()); } raw.set_bit_field(0); - raw.set_is_backed_by_rab(array_buffer->is_resizable() && + raw.set_is_backed_by_rab(array_buffer->is_resizable_by_js() && !array_buffer->is_shared()); raw.set_is_length_tracking(length_tracking); raw.set_byte_length(0); diff --git a/deps/v8/src/builtins/builtins-definitions.h b/deps/v8/src/builtins/builtins-definitions.h index 175acbd49588ed..c656b02e75566c 100644 --- a/deps/v8/src/builtins/builtins-definitions.h +++ b/deps/v8/src/builtins/builtins-definitions.h @@ -198,6 +198,7 @@ namespace internal { \ /* Maglev Compiler */ \ ASM(MaglevOnStackReplacement, OnStackReplacement) \ + ASM(MaglevOutOfLinePrologue, NoContext) \ \ /* Code life-cycle */ \ TFC(CompileLazy, JSTrampoline) \ @@ -619,6 +620,8 @@ namespace internal { /* JSON */ \ CPP(JsonParse) \ CPP(JsonStringify) \ + CPP(JsonRawJson) \ + CPP(JsonIsRawJson) \ \ /* Web snapshots */ \ CPP(WebSnapshotSerialize) \ @@ -1021,7 +1024,7 @@ namespace internal { \ TFS(AsyncGeneratorResolve, kGenerator, kValue, kDone) \ TFS(AsyncGeneratorReject, kGenerator, kValue) \ - TFS(AsyncGeneratorYield, kGenerator, kValue, kIsCaught) \ + TFS(AsyncGeneratorYieldWithAwait, kGenerator, kValue, kIsCaught) \ TFS(AsyncGeneratorReturn, kGenerator, kValue, kIsCaught) \ TFS(AsyncGeneratorResumeNext, kGenerator) \ \ @@ -1046,8 +1049,8 @@ namespace internal { kValue) \ TFJ(AsyncGeneratorAwaitRejectClosure, kJSArgcReceiverSlots + 1, kReceiver, \ kValue) \ - TFJ(AsyncGeneratorYieldResolveClosure, kJSArgcReceiverSlots + 1, kReceiver, \ - kValue) \ + TFJ(AsyncGeneratorYieldWithAwaitResolveClosure, kJSArgcReceiverSlots + 1, \ + kReceiver, kValue) \ TFJ(AsyncGeneratorReturnClosedResolveClosure, kJSArgcReceiverSlots + 1, \ kReceiver, kValue) \ TFJ(AsyncGeneratorReturnClosedRejectClosure, kJSArgcReceiverSlots + 1, \ @@ -1094,7 +1097,8 @@ namespace internal { TFS(CreateDataProperty, kReceiver, kKey, kValue) \ ASM(MemCopyUint8Uint8, CCall) \ ASM(MemMove, CCall) \ - TFC(FindNonDefaultConstructor, FindNonDefaultConstructor) \ + TFC(FindNonDefaultConstructorOrConstruct, \ + FindNonDefaultConstructorOrConstruct) \ \ /* Trace */ \ CPP(IsTraceCategoryEnabled) \ @@ -1750,6 +1754,16 @@ namespace internal { CPP(DisplayNamesPrototypeResolvedOptions) \ /* ecma402 #sec-Intl.DisplayNames.supportedLocalesOf */ \ CPP(DisplayNamesSupportedLocalesOf) \ + /* ecma402 #sec-intl-durationformat-constructor */ \ + CPP(DurationFormatConstructor) \ + /* ecma402 #sec-Intl.DurationFormat.prototype.format */ \ + CPP(DurationFormatPrototypeFormat) \ + /* ecma402 #sec-Intl.DurationFormat.prototype.formatToParts */ \ + CPP(DurationFormatPrototypeFormatToParts) \ + /* ecma402 #sec-Intl.DurationFormat.prototype.resolvedOptions */ \ + CPP(DurationFormatPrototypeResolvedOptions) \ + /* ecma402 #sec-Intl.DurationFormat.supportedLocalesOf */ \ + CPP(DurationFormatSupportedLocalesOf) \ /* ecma402 #sec-intl.getcanonicallocales */ \ CPP(IntlGetCanonicalLocales) \ /* ecma402 #sec-intl.supportedvaluesof */ \ diff --git a/deps/v8/src/builtins/builtins-internal-gen.cc b/deps/v8/src/builtins/builtins-internal-gen.cc index eb7790d3fc4d1e..907b41e8da641d 100644 --- a/deps/v8/src/builtins/builtins-internal-gen.cc +++ b/deps/v8/src/builtins/builtins-internal-gen.cc @@ -1305,10 +1305,17 @@ void Builtins::Generate_BaselineOnStackReplacement(MacroAssembler* masm) { // architectures. #ifndef V8_TARGET_ARCH_X64 void Builtins::Generate_MaglevOnStackReplacement(MacroAssembler* masm) { - using D = OnStackReplacementDescriptor; + using D = + i::CallInterfaceDescriptorFor<Builtin::kMaglevOnStackReplacement>::type; static_assert(D::kParameterCount == 1); masm->Trap(); } +void Builtins::Generate_MaglevOutOfLinePrologue(MacroAssembler* masm) { + using D = + i::CallInterfaceDescriptorFor<Builtin::kMaglevOutOfLinePrologue>::type; + static_assert(D::kParameterCount == 0); + masm->Trap(); +} #endif // V8_TARGET_ARCH_X64 // ES6 [[Get]] operation. @@ -1502,7 +1509,7 @@ TF_BUILTIN(InstantiateAsmJs, CodeStubAssembler) { TailCallJSCode(code, context, function, new_target, arg_count); } -TF_BUILTIN(FindNonDefaultConstructor, CodeStubAssembler) { +TF_BUILTIN(FindNonDefaultConstructorOrConstruct, CodeStubAssembler) { auto this_function = Parameter<JSFunction>(Descriptor::kThisFunction); auto new_target = Parameter<Object>(Descriptor::kNewTarget); auto context = Parameter<Context>(Descriptor::kContext); @@ -1511,8 +1518,9 @@ TF_BUILTIN(FindNonDefaultConstructor, CodeStubAssembler) { Label found_default_base_ctor(this, &constructor), found_something_else(this, &constructor); - FindNonDefaultConstructor(context, this_function, constructor, - &found_default_base_ctor, &found_something_else); + FindNonDefaultConstructorOrConstruct(context, this_function, constructor, + &found_default_base_ctor, + &found_something_else); BIND(&found_default_base_ctor); { diff --git a/deps/v8/src/builtins/builtins-intl.cc b/deps/v8/src/builtins/builtins-intl.cc index 452e55120742c9..0410c3ef91fe38 100644 --- a/deps/v8/src/builtins/builtins-intl.cc +++ b/deps/v8/src/builtins/builtins-intl.cc @@ -21,6 +21,7 @@ #include "src/objects/js-collator-inl.h" #include "src/objects/js-date-time-format-inl.h" #include "src/objects/js-display-names-inl.h" +#include "src/objects/js-duration-format-inl.h" #include "src/objects/js-list-format-inl.h" #include "src/objects/js-locale-inl.h" #include "src/objects/js-number-format-inl.h" @@ -383,6 +384,51 @@ BUILTIN(DisplayNamesPrototypeOf) { JSDisplayNames::Of(isolate, holder, code_obj)); } +// Intl.DurationFormat +BUILTIN(DurationFormatConstructor) { + HandleScope scope(isolate); + + return DisallowCallConstructor<JSDurationFormat>( + args, isolate, v8::Isolate::UseCounterFeature::kDurationFormat, + "Intl.DurationFormat"); +} + +BUILTIN(DurationFormatPrototypeResolvedOptions) { + HandleScope scope(isolate); + CHECK_RECEIVER(JSDurationFormat, holder, + "Intl.DurationFormat.prototype.resolvedOptions"); + return *JSDurationFormat::ResolvedOptions(isolate, holder); +} + +BUILTIN(DurationFormatSupportedLocalesOf) { + HandleScope scope(isolate); + Handle<Object> locales = args.atOrUndefined(isolate, 1); + Handle<Object> options = args.atOrUndefined(isolate, 2); + + RETURN_RESULT_OR_FAILURE( + isolate, Intl::SupportedLocalesOf( + isolate, "Intl.DurationFormat.supportedLocalesOf", + JSDurationFormat::GetAvailableLocales(), locales, options)); +} + +BUILTIN(DurationFormatPrototypeFormat) { + HandleScope scope(isolate); + CHECK_RECEIVER(JSDurationFormat, holder, + "Intl.DurationFormat.prototype.format"); + Handle<Object> value = args.atOrUndefined(isolate, 1); + RETURN_RESULT_OR_FAILURE(isolate, + JSDurationFormat::Format(isolate, holder, value)); +} + +BUILTIN(DurationFormatPrototypeFormatToParts) { + HandleScope scope(isolate); + CHECK_RECEIVER(JSDurationFormat, holder, + "Intl.DurationFormat.prototype.formatToParts"); + Handle<Object> value = args.atOrUndefined(isolate, 1); + RETURN_RESULT_OR_FAILURE( + isolate, JSDurationFormat::FormatToParts(isolate, holder, value)); +} + // Intl.NumberFormat BUILTIN(NumberFormatConstructor) { diff --git a/deps/v8/src/builtins/builtins-json.cc b/deps/v8/src/builtins/builtins-json.cc index 896a45389c4abd..5ac1cd2bfc57e8 100644 --- a/deps/v8/src/builtins/builtins-json.cc +++ b/deps/v8/src/builtins/builtins-json.cc @@ -7,6 +7,7 @@ #include "src/json/json-parser.h" #include "src/json/json-stringifier.h" #include "src/logging/counters.h" +#include "src/objects/js-raw-json.h" #include "src/objects/objects-inl.h" namespace v8 { @@ -37,5 +38,19 @@ BUILTIN(JsonStringify) { JsonStringify(isolate, object, replacer, indent)); } +// https://tc39.es/proposal-json-parse-with-source/#sec-json.rawjson +BUILTIN(JsonRawJson) { + HandleScope scope(isolate); + Handle<Object> text = args.atOrUndefined(isolate, 1); + RETURN_RESULT_OR_FAILURE(isolate, JSRawJson::Create(isolate, text)); +} + +// https://tc39.es/proposal-json-parse-with-source/#sec-json.israwjson +BUILTIN(JsonIsRawJson) { + HandleScope scope(isolate); + Handle<Object> text = args.atOrUndefined(isolate, 1); + return isolate->heap()->ToBoolean(text->IsJSRawJson()); +} + } // namespace internal } // namespace v8 diff --git a/deps/v8/src/builtins/builtins-object.cc b/deps/v8/src/builtins/builtins-object.cc index d636801ad6b576..e6d26ef7c75f33 100644 --- a/deps/v8/src/builtins/builtins-object.cc +++ b/deps/v8/src/builtins/builtins-object.cc @@ -4,10 +4,9 @@ #include "src/builtins/builtins-utils-inl.h" #include "src/builtins/builtins.h" -#include "src/codegen/code-factory.h" #include "src/common/message-template.h" +#include "src/execution/isolate.h" #include "src/heap/heap-inl.h" // For ToBoolean. TODO(jkummerow): Drop. -#include "src/logging/counters.h" #include "src/objects/keys.h" #include "src/objects/lookup.h" #include "src/objects/objects-inl.h" @@ -150,6 +149,10 @@ Object ObjectLookupAccessor(Isolate* isolate, Handle<Object> object, return ObjectLookupAccessor(isolate, prototype, key, component); } + case LookupIterator::WASM_OBJECT: + THROW_NEW_ERROR_RETURN_FAILURE( + isolate, NewTypeError(MessageTemplate::kWasmObjectsAreOpaque)); + case LookupIterator::INTEGER_INDEXED_EXOTIC: case LookupIterator::DATA: return ReadOnlyRoots(isolate).undefined_value(); diff --git a/deps/v8/src/builtins/builtins-regexp-gen.h b/deps/v8/src/builtins/builtins-regexp-gen.h index ef606463143a6d..f89449356116f4 100644 --- a/deps/v8/src/builtins/builtins-regexp-gen.h +++ b/deps/v8/src/builtins/builtins-regexp-gen.h @@ -138,6 +138,9 @@ class RegExpBuiltinsAssembler : public CodeStubAssembler { TNode<BoolT> FastFlagGetterUnicode(TNode<JSRegExp> regexp) { return FastFlagGetter(regexp, JSRegExp::kUnicode); } + TNode<BoolT> FastFlagGetterUnicodeSets(TNode<JSRegExp> regexp) { + return FastFlagGetter(regexp, JSRegExp::kUnicodeSets); + } TNode<BoolT> SlowFlagGetter(TNode<Context> context, TNode<Object> regexp, JSRegExp::Flag flag); TNode<BoolT> FlagGetter(TNode<Context> context, TNode<Object> regexp, diff --git a/deps/v8/src/builtins/builtins-typed-array-gen.cc b/deps/v8/src/builtins/builtins-typed-array-gen.cc index 805837f722b46d..4d2f0b541b7b50 100644 --- a/deps/v8/src/builtins/builtins-typed-array-gen.cc +++ b/deps/v8/src/builtins/builtins-typed-array-gen.cc @@ -64,8 +64,8 @@ TNode<JSArrayBuffer> TypedArrayBuiltinsAssembler::AllocateEmptyOnHeapBuffer( StoreObjectFieldNoWriteBarrier(buffer, JSArrayBuffer::kBitFieldOffset, Int32Constant(bitfield_value)); - StoreObjectFieldNoWriteBarrier(buffer, JSArrayBuffer::kByteLengthOffset, - UintPtrConstant(0)); + StoreBoundedSizeToObject(buffer, JSArrayBuffer::kRawByteLengthOffset, + UintPtrConstant(0)); StoreSandboxedPointerToObject(buffer, JSArrayBuffer::kBackingStoreOffset, EmptyBackingStoreBufferConstant()); StoreObjectFieldNoWriteBarrier(buffer, JSArrayBuffer::kExtensionOffset, @@ -141,7 +141,7 @@ TF_BUILTIN(TypedArrayPrototypeByteLength, TypedArrayBuiltinsAssembler) { // Default to zero if the {receiver}s buffer was detached. TNode<UintPtrT> byte_length = Select<UintPtrT>( IsDetachedBuffer(receiver_buffer), [=] { return UintPtrConstant(0); }, - [=] { return LoadJSArrayBufferViewRawByteLength(receiver_array); }); + [=] { return LoadJSArrayBufferViewByteLength(receiver_array); }); Return(ChangeUintPtrToTagged(byte_length)); } } diff --git a/deps/v8/src/builtins/builtins-weak-refs.cc b/deps/v8/src/builtins/builtins-weak-refs.cc index aee330b4bd4f3c..a944159f247bec 100644 --- a/deps/v8/src/builtins/builtins-weak-refs.cc +++ b/deps/v8/src/builtins/builtins-weak-refs.cc @@ -9,6 +9,7 @@ namespace v8 { namespace internal { +// https://tc39.es/proposal-symbols-as-weakmap-keys/#sec-finalization-registry.prototype.unregister BUILTIN(FinalizationRegistryUnregister) { HandleScope scope(isolate); const char* method_name = "FinalizationRegistry.prototype.unregister"; @@ -24,16 +25,16 @@ BUILTIN(FinalizationRegistryUnregister) { Handle<Object> unregister_token = args.atOrUndefined(isolate, 1); - // 4. If Type(unregisterToken) is not Object, throw a TypeError exception. - if (!unregister_token->IsJSReceiver()) { + // 4. If CanBeHeldWeakly(unregisterToken) is false, throw a TypeError + // exception. + if (!unregister_token->CanBeHeldWeakly()) { THROW_NEW_ERROR_RETURN_FAILURE( - isolate, - NewTypeError(MessageTemplate::kWeakRefsUnregisterTokenMustBeObject, - unregister_token)); + isolate, NewTypeError(MessageTemplate::kInvalidWeakRefsUnregisterToken, + unregister_token)); } bool success = JSFinalizationRegistry::Unregister( - finalization_registry, Handle<JSReceiver>::cast(unregister_token), + finalization_registry, Handle<HeapObject>::cast(unregister_token), isolate); return *isolate->factory()->ToBoolean(success); diff --git a/deps/v8/src/builtins/cast.tq b/deps/v8/src/builtins/cast.tq index 5cb6e0bc92e0ed..0d347e3dd35cdc 100644 --- a/deps/v8/src/builtins/cast.tq +++ b/deps/v8/src/builtins/cast.tq @@ -697,6 +697,21 @@ Cast<JSReceiver|Null>(o: HeapObject): JSReceiver|Null } } +Cast<JSReceiver|Symbol>(implicit context: Context)(o: Object): JSReceiver|Symbol + labels CastError { + typeswitch (o) { + case (o: JSReceiver): { + return o; + } + case (o: Symbol): { + return o; + } + case (Object): { + goto CastError; + } + } +} + Cast<Smi|PromiseReaction>(o: Object): Smi|PromiseReaction labels CastError { typeswitch (o) { case (o: Smi): { diff --git a/deps/v8/src/builtins/finalization-registry.tq b/deps/v8/src/builtins/finalization-registry.tq index 38cae7ed20b9ff..4e4b4be068669f 100644 --- a/deps/v8/src/builtins/finalization-registry.tq +++ b/deps/v8/src/builtins/finalization-registry.tq @@ -1,6 +1,7 @@ // Copyright 2020 the V8 project authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. +#include "src/builtins/builtins-collections-gen.h" namespace runtime { extern runtime @@ -15,6 +16,9 @@ extern transitioning macro RemoveFinalizationRegistryCellFromUnregisterTokenMap( JSFinalizationRegistry, WeakCell): void; +extern macro WeakCollectionsBuiltinsAssembler::GotoIfCannotBeHeldWeakly(JSAny): + void labels NotWeakKey; + macro SplitOffTail(weakCell: WeakCell): WeakCell|Undefined { const weakCellTail = weakCell.next; weakCell.next = Undefined; @@ -125,6 +129,7 @@ FinalizationRegistryConstructor( return finalizationRegistry; } +// https://tc39.es/proposal-symbols-as-weakmap-keys/#sec-finalization-registry.prototype.register transitioning javascript builtin FinalizationRegistryRegister( js-implicit context: NativeContext, receiver: JSAny)(...arguments): JSAny { @@ -134,33 +139,32 @@ FinalizationRegistryRegister( ThrowTypeError( MessageTemplate::kIncompatibleMethodReceiver, 'FinalizationRegistry.prototype.register', receiver); - // 3. If Type(target) is not Object, throw a TypeError exception. - const target = Cast<JSReceiver>(arguments[0]) otherwise ThrowTypeError( - MessageTemplate::kWeakRefsRegisterTargetMustBeObject); + // 3. If CanBeHeldWeakly(target) is false, throw a TypeError exception. + GotoIfCannotBeHeldWeakly(arguments[0]) + otherwise ThrowTypeError(MessageTemplate::kInvalidWeakRefsRegisterTarget); + + const target = UnsafeCast<(JSReceiver | Symbol)>(arguments[0]); const heldValue = arguments[1]; // 4. If SameValue(target, heldValue), throw a TypeError exception. if (target == heldValue) { ThrowTypeError( MessageTemplate::kWeakRefsRegisterTargetAndHoldingsMustNotBeSame); } - // 5. If Type(unregisterToken) is not Object, + // 5. If CanBeHeldWeakly(unregisterToken) is false, // a. If unregisterToken is not undefined, throw a TypeError exception. // b. Set unregisterToken to empty. const unregisterTokenRaw = arguments[2]; - let unregisterToken: JSReceiver|Undefined; - typeswitch (unregisterTokenRaw) { - case (Undefined): { - unregisterToken = Undefined; - } - case (unregisterTokenObj: JSReceiver): { - unregisterToken = unregisterTokenObj; - } - case (JSAny): deferred { - ThrowTypeError( - MessageTemplate::kWeakRefsUnregisterTokenMustBeObject, - unregisterTokenRaw); - } + let unregisterToken: JSReceiver|Undefined|Symbol; + + if (IsUndefined(unregisterTokenRaw)) { + unregisterToken = Undefined; + } else { + GotoIfCannotBeHeldWeakly(unregisterTokenRaw) + otherwise ThrowTypeError( + MessageTemplate::kInvalidWeakRefsUnregisterToken, unregisterTokenRaw); + unregisterToken = UnsafeCast<(JSReceiver | Symbol)>(unregisterTokenRaw); } + // 6. Let cell be the Record { [[WeakRefTarget]] : target, [[HeldValue]]: // heldValue, [[UnregisterToken]]: unregisterToken }. // Allocate the WeakCell object in the old space, because 1) WeakCell weakness diff --git a/deps/v8/src/builtins/ia32/builtins-ia32.cc b/deps/v8/src/builtins/ia32/builtins-ia32.cc index ecb1bd9136279e..5535c68c7edb4e 100644 --- a/deps/v8/src/builtins/ia32/builtins-ia32.cc +++ b/deps/v8/src/builtins/ia32/builtins-ia32.cc @@ -1111,7 +1111,7 @@ void Builtins::Generate_InterpreterEntryTrampoline( { // Restore actual argument count. __ movd(eax, xmm0); - __ MaybeOptimizeCodeOrTailCallOptimizedCodeSlot(flags, xmm1); + __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, xmm1); } __ bind(&compile_lazy); @@ -1640,8 +1640,7 @@ void Builtins::Generate_BaselineOutOfLinePrologue(MacroAssembler* masm) { // requires the stack to only contain valid frames. __ Drop(2); __ movd(arg_count, saved_arg_count); // Restore actual argument count. - __ MaybeOptimizeCodeOrTailCallOptimizedCodeSlot(flags, - saved_feedback_vector); + __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, saved_feedback_vector); __ Trap(); } diff --git a/deps/v8/src/builtins/loong64/builtins-loong64.cc b/deps/v8/src/builtins/loong64/builtins-loong64.cc index c147a80d1a3b06..157c45c4325269 100644 --- a/deps/v8/src/builtins/loong64/builtins-loong64.cc +++ b/deps/v8/src/builtins/loong64/builtins-loong64.cc @@ -1022,7 +1022,7 @@ void Builtins::Generate_BaselineOutOfLinePrologue(MacroAssembler* masm) { // Ensure the flags is not allocated again. // Drop the frame created by the baseline call. __ Pop(ra, fp); - __ MaybeOptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector); + __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector); __ Trap(); } @@ -1275,7 +1275,7 @@ void Builtins::Generate_InterpreterEntryTrampoline( __ jmp(&after_stack_check_interrupt); __ bind(&flags_need_processing); - __ MaybeOptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector); + __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector); __ bind(&is_baseline); { @@ -2997,8 +2997,8 @@ void CallApiFunctionAndReturn(MacroAssembler* masm, Register function_address, __ St_w(s2, MemOperand(s5, kLevelOffset)); Label profiler_enabled, done_api_call; - __ li(t7, ExternalReference::is_profiling_address(isolate)); - __ Ld_b(t7, MemOperand(t7, 0)); + __ Ld_b(t7, __ ExternalReferenceAsOperand( + ExternalReference::is_profiling_address(isolate), t7)); __ Branch(&profiler_enabled, ne, t7, Operand(zero_reg)); #ifdef V8_RUNTIME_CALL_STATS __ li(t7, ExternalReference::address_of_runtime_stats_flag()); diff --git a/deps/v8/src/builtins/mips64/builtins-mips64.cc b/deps/v8/src/builtins/mips64/builtins-mips64.cc index f164f1d9b05ee1..598ef531a9e607 100644 --- a/deps/v8/src/builtins/mips64/builtins-mips64.cc +++ b/deps/v8/src/builtins/mips64/builtins-mips64.cc @@ -1021,7 +1021,7 @@ void Builtins::Generate_BaselineOutOfLinePrologue(MacroAssembler* masm) { // Ensure the flags is not allocated again. // Drop the frame created by the baseline call. __ Pop(ra, fp); - __ MaybeOptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector); + __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector); __ Trap(); } @@ -1270,7 +1270,7 @@ void Builtins::Generate_InterpreterEntryTrampoline( __ jmp(&after_stack_check_interrupt); __ bind(&flags_need_processing); - __ MaybeOptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector); + __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector); __ bind(&is_baseline); { // Load the feedback vector from the closure. @@ -3014,8 +3014,8 @@ void CallApiFunctionAndReturn(MacroAssembler* masm, Register function_address, __ Sw(s2, MemOperand(s5, kLevelOffset)); Label profiler_enabled, done_api_call; - __ li(t9, ExternalReference::is_profiling_address(isolate)); - __ Lb(t9, MemOperand(t9, 0)); + __ Lb(t9, __ ExternalReferenceAsOperand( + ExternalReference::is_profiling_address(isolate), t9)); __ Branch(&profiler_enabled, ne, t9, Operand(zero_reg)); #ifdef V8_RUNTIME_CALL_STATS __ li(t9, ExternalReference::address_of_runtime_stats_flag()); diff --git a/deps/v8/src/builtins/ppc/builtins-ppc.cc b/deps/v8/src/builtins/ppc/builtins-ppc.cc index 4993dfccde45b6..8decc418fce299 100644 --- a/deps/v8/src/builtins/ppc/builtins-ppc.cc +++ b/deps/v8/src/builtins/ppc/builtins-ppc.cc @@ -433,7 +433,7 @@ void OnStackReplacement(MacroAssembler* masm, OsrSourceTier source, ConstantPoolUnavailableScope constant_pool_unavailable(masm); __ addi(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start - if (v8_flags.enable_embedded_constant_pool) { + if (V8_EMBEDDED_CONSTANT_POOL_BOOL) { __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r3); } @@ -845,7 +845,7 @@ void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type, // r8: argv __ li(r0, Operand(-1)); // Push a bad frame pointer to fail if it is used. __ push(r0); - if (v8_flags.enable_embedded_constant_pool) { + if (V8_EMBEDDED_CONSTANT_POOL_BOOL) { __ li(kConstantPoolRegister, Operand::Zero()); __ push(kConstantPoolRegister); } @@ -1292,13 +1292,13 @@ void Builtins::Generate_BaselineOutOfLinePrologue(MacroAssembler* masm) { ASM_CODE_COMMENT_STRING(masm, "Optimized marker check"); // Drop the frame created by the baseline call. - if (v8_flags.enable_embedded_constant_pool) { + if (V8_EMBEDDED_CONSTANT_POOL_BOOL) { __ Pop(r0, fp, kConstantPoolRegister); } else { __ Pop(r0, fp); } __ mtlr(r0); - __ MaybeOptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector); + __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector); __ Trap(); } @@ -1568,7 +1568,7 @@ void Builtins::Generate_InterpreterEntryTrampoline( __ jmp(&after_stack_check_interrupt); __ bind(&flags_need_processing); - __ MaybeOptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector); + __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector); __ bind(&is_baseline); { @@ -3055,7 +3055,7 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size, ConstantPoolUnavailableScope constant_pool_unavailable(masm); __ Move(ip, pending_handler_entrypoint_address); __ LoadU64(ip, MemOperand(ip)); - if (v8_flags.enable_embedded_constant_pool) { + if (V8_EMBEDDED_CONSTANT_POOL_BOOL) { __ Move(kConstantPoolRegister, pending_handler_constant_pool_address); __ LoadU64(kConstantPoolRegister, MemOperand(kConstantPoolRegister)); } diff --git a/deps/v8/src/builtins/promise-any.tq b/deps/v8/src/builtins/promise-any.tq index ffb285a06a8355..7e707e649f11bc 100644 --- a/deps/v8/src/builtins/promise-any.tq +++ b/deps/v8/src/builtins/promise-any.tq @@ -119,7 +119,19 @@ PromiseAnyRejectElementClosure( kPromiseAnyRejectElementRemainingSlot); // 9. Set errors[index] to x. - const newCapacity = IntPtrMax(SmiUntag(remainingElementsCount), index + 1); + + // The max computation below is an optimization to avoid excessive allocations + // in the case of input promises being asynchronously rejected in ascending + // index order. + // + // Note that subtracting 1 from remainingElementsCount is intentional. The + // value of remainingElementsCount is 1 larger than the actual value during + // iteration. So in the case of synchronous rejection, newCapacity is the + // correct size by subtracting 1. In the case of asynchronous rejection this + // is 1 smaller than the correct size, but is not incorrect as it is maxed + // with index + 1. + const newCapacity = + IntPtrMax(SmiUntag(remainingElementsCount) - 1, index + 1); if (newCapacity > errors.length_intptr) deferred { errors = ExtractFixedArray(errors, 0, errors.length_intptr, newCapacity); *ContextSlot( @@ -306,6 +318,7 @@ Reject(JSAny) { PromiseAnyRejectElementContextSlots:: kPromiseAnyRejectElementErrorsSlot); + check(errors.length == index - 1); const error = ConstructAggregateError(errors); // 3. Return ThrowCompletion(error). goto Reject(error); diff --git a/deps/v8/src/builtins/promise-resolve.tq b/deps/v8/src/builtins/promise-resolve.tq index 114b1e922b1f48..c5ad5eefd5c42b 100644 --- a/deps/v8/src/builtins/promise-resolve.tq +++ b/deps/v8/src/builtins/promise-resolve.tq @@ -161,6 +161,12 @@ ResolvePromise(implicit context: Context)( } goto Slow; } label Slow deferred { + // Skip "then" lookup for Wasm objects as they are opaque. + @if(V8_ENABLE_WEBASSEMBLY) + if (Is<WasmObject>(resolution)) { + return FulfillPromise(promise, resolution); + } + // 9. Let then be Get(resolution, "then"). // 10. If then is an abrupt completion, then try { diff --git a/deps/v8/src/builtins/reflect.tq b/deps/v8/src/builtins/reflect.tq index 477c586403c022..c0591e7f6c92c2 100644 --- a/deps/v8/src/builtins/reflect.tq +++ b/deps/v8/src/builtins/reflect.tq @@ -38,6 +38,13 @@ transitioning javascript builtin ReflectSetPrototypeOf( const objectJSReceiver = Cast<JSReceiver>(object) otherwise ThrowTypeError( MessageTemplate::kCalledOnNonObject, 'Reflect.setPrototypeOf'); + + // Wasm objects do not support having prototypes. + @if(V8_ENABLE_WEBASSEMBLY) + if (Is<WasmObject>(objectJSReceiver)) { + ThrowTypeError(MessageTemplate::kWasmObjectsAreOpaque); + } + typeswitch (proto) { case (proto: JSReceiver|Null): { return object::ObjectSetPrototypeOfDontThrow(objectJSReceiver, proto); diff --git a/deps/v8/src/builtins/regexp-match-all.tq b/deps/v8/src/builtins/regexp-match-all.tq index 1f9aa1819f497a..48f12218b94889 100644 --- a/deps/v8/src/builtins/regexp-match-all.tq +++ b/deps/v8/src/builtins/regexp-match-all.tq @@ -52,9 +52,10 @@ transitioning macro RegExpPrototypeMatchAllImpl(implicit context: Context)( // 10. Else, let global be false. global = FastFlagGetter(matcherRegExp, Flag::kGlobal); - // 11. If flags contains "u", let fullUnicode be true. + // 11. If flags contains "u" or "v", let fullUnicode be true. // 12. Else, let fullUnicode be false. - unicode = FastFlagGetter(matcherRegExp, Flag::kUnicode); + unicode = FastFlagGetter(matcherRegExp, Flag::kUnicode) || + FastFlagGetter(matcherRegExp, Flag::kUnicodeSets); } case (Object): { // 4. Let C be ? SpeciesConstructor(R, %RegExp%). @@ -81,12 +82,15 @@ transitioning macro RegExpPrototypeMatchAllImpl(implicit context: Context)( const globalIndex: Smi = StringIndexOf(flagsString, globalCharString, 0); global = globalIndex != -1; - // 11. If flags contains "u", let fullUnicode be true. + // 11. If flags contains "u" or "v", let fullUnicode be true. // 12. Else, let fullUnicode be false. const unicodeCharString = StringConstant('u'); + const unicodeSetsCharString = StringConstant('v'); const unicodeIndex: Smi = StringIndexOf(flagsString, unicodeCharString, 0); - unicode = unicodeIndex != -1; + const unicodeSetsIndex: Smi = + StringIndexOf(flagsString, unicodeSetsCharString, 0); + unicode = unicodeIndex != -1 || unicodeSetsIndex != -1; } } diff --git a/deps/v8/src/builtins/regexp-match.tq b/deps/v8/src/builtins/regexp-match.tq index 3da132636a9e86..ff2dcf2c33c0fc 100644 --- a/deps/v8/src/builtins/regexp-match.tq +++ b/deps/v8/src/builtins/regexp-match.tq @@ -33,7 +33,8 @@ transitioning macro RegExpPrototypeMatchBody(implicit context: Context)( } dcheck(isGlobal); - const isUnicode: bool = FlagGetter(regexp, Flag::kUnicode, isFastPath); + const isUnicode: bool = FlagGetter(regexp, Flag::kUnicode, isFastPath) || + FlagGetter(regexp, Flag::kUnicodeSets, isFastPath); StoreLastIndex(regexp, 0, isFastPath); diff --git a/deps/v8/src/builtins/regexp-replace.tq b/deps/v8/src/builtins/regexp-replace.tq index d26f8d6949cd16..ecd99af0320598 100644 --- a/deps/v8/src/builtins/regexp-replace.tq +++ b/deps/v8/src/builtins/regexp-replace.tq @@ -132,7 +132,7 @@ transitioning macro RegExpReplaceFastString(implicit context: Context)( const global: bool = fastRegexp.global; if (global) { - unicode = fastRegexp.unicode; + unicode = fastRegexp.unicode || fastRegexp.unicodeSets; fastRegexp.lastIndex = 0; } diff --git a/deps/v8/src/builtins/riscv/builtins-riscv.cc b/deps/v8/src/builtins/riscv/builtins-riscv.cc index 0571568ebb3b6c..7cba83d4f0c704 100644 --- a/deps/v8/src/builtins/riscv/builtins-riscv.cc +++ b/deps/v8/src/builtins/riscv/builtins-riscv.cc @@ -1078,7 +1078,7 @@ void Builtins::Generate_BaselineOutOfLinePrologue(MacroAssembler* masm) { ASM_CODE_COMMENT_STRING(masm, "Optimized marker check"); // Drop the frame created by the baseline call. __ Pop(ra, fp); - __ MaybeOptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector); + __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector); __ Trap(); } @@ -1319,7 +1319,7 @@ void Builtins::Generate_InterpreterEntryTrampoline( __ Branch(&after_stack_check_interrupt); __ bind(&flags_need_processing); - __ MaybeOptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector); + __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector); __ bind(&is_baseline); { // Load the feedback vector from the closure. diff --git a/deps/v8/src/builtins/s390/builtins-s390.cc b/deps/v8/src/builtins/s390/builtins-s390.cc index e5c5e9d0437ed1..b9bc2bab3384b1 100644 --- a/deps/v8/src/builtins/s390/builtins-s390.cc +++ b/deps/v8/src/builtins/s390/builtins-s390.cc @@ -1335,7 +1335,7 @@ void Builtins::Generate_BaselineOutOfLinePrologue(MacroAssembler* masm) { // Drop the frame created by the baseline call. __ Pop(r14, fp); - __ MaybeOptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector); + __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector); __ Trap(); } @@ -1599,7 +1599,7 @@ void Builtins::Generate_InterpreterEntryTrampoline( __ jmp(&after_stack_check_interrupt); __ bind(&flags_need_processing); - __ MaybeOptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector); + __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector); __ bind(&is_baseline); { diff --git a/deps/v8/src/builtins/typed-array-slice.tq b/deps/v8/src/builtins/typed-array-slice.tq index a1dba47bacf36f..0ac50a3ec7563d 100644 --- a/deps/v8/src/builtins/typed-array-slice.tq +++ b/deps/v8/src/builtins/typed-array-slice.tq @@ -36,8 +36,20 @@ macro FastCopy( otherwise unreachable; const srcPtr: RawPtr = src.data_ptr + Convert<intptr>(startOffset); - dcheck(countBytes <= dest.byte_length); - dcheck(countBytes <= src.byte_length - startOffset); + @if(DEBUG) { + const srcLength = + LoadJSTypedArrayLengthAndCheckDetached(src) otherwise unreachable; + const srcByteLength = GetTypedArrayElementsInfo(src).CalculateByteLength( + srcLength) otherwise unreachable; + + const destLength = + LoadJSTypedArrayLengthAndCheckDetached(dest) otherwise unreachable; + const destByteLength = GetTypedArrayElementsInfo(dest).CalculateByteLength( + destLength) otherwise unreachable; + + dcheck(countBytes <= destByteLength); + dcheck(countBytes <= srcByteLength - startOffset); + } if (IsSharedArrayBuffer(src.buffer)) { // SABs need a relaxed memmove to preserve atomicity. diff --git a/deps/v8/src/builtins/typed-array-sort.tq b/deps/v8/src/builtins/typed-array-sort.tq index 37760ccb5c15f8..5793bf92ba8768 100644 --- a/deps/v8/src/builtins/typed-array-sort.tq +++ b/deps/v8/src/builtins/typed-array-sort.tq @@ -79,32 +79,16 @@ TypedArrayMergeSort(implicit context: Context)( return Undefined; } -// https://tc39.github.io/ecma262/#sec-%typedarray%.prototype.sort -transitioning javascript builtin TypedArrayPrototypeSort( - js-implicit context: NativeContext, - receiver: JSAny)(...arguments): JSTypedArray { - // 1. If comparefn is not undefined and IsCallable(comparefn) is false, - // throw a TypeError exception. - const comparefnObj: JSAny = arguments[0]; - if (comparefnObj != Undefined && !Is<Callable>(comparefnObj)) { - ThrowTypeError(MessageTemplate::kBadSortComparisonFunction, comparefnObj); - } - - // 2. Let obj be the this value. - const obj: JSAny = receiver; - - // 3. Let buffer be ? ValidateTypedArray(obj). - // 4. Let len be IntegerIndexedObjectLength(obj). - let len: uintptr = - ValidateTypedArrayAndGetLength(context, obj, kBuiltinNameSort); - const array: JSTypedArray = UnsafeCast<JSTypedArray>(obj); - +// Shared between TypedArray.prototype.sort and TypedArray.prototype.toSorted. +transitioning macro TypedArraySortCommon(implicit context: Context)( + array: JSTypedArray, len: uintptr, comparefnArg: Undefined|Callable, + isSort: constexpr bool): JSTypedArray { // Arrays of length 1 or less are considered sorted. if (len < 2) return array; // Default sorting is done in C++ using std::sort - if (comparefnObj == Undefined) { - return TypedArraySortFast(context, obj); + if (comparefnArg == Undefined) { + return TypedArraySortFast(context, array); } // Throw rather than crash if the TypedArray's size exceeds max FixedArray @@ -116,7 +100,7 @@ transitioning javascript builtin TypedArrayPrototypeSort( } const comparefn: Callable = - Cast<Callable>(comparefnObj) otherwise unreachable; + Cast<Callable>(comparefnArg) otherwise unreachable; const accessor: TypedArrayAccessor = GetTypedArrayAccessor(array.elements_kind); @@ -134,25 +118,58 @@ transitioning javascript builtin TypedArrayPrototypeSort( TypedArrayMergeSort(work2, 0, len, work1, array, comparefn); - // Reload the length; it's possible the backing ArrayBuffer has been resized - // to be OOB or detached, in which case treat it as length 0. - - try { - const newLen = LoadJSTypedArrayLengthAndCheckDetached(array) - otherwise DetachedOrOutOfBounds; - if (newLen < len) { - len = newLen; + // If this is TypedArray.prototype.sort, reload the length; it's possible the + // backing ArrayBuffer has been resized to be OOB or detached, in which case + // treat it as length 0. + // + // This is not possible in TypedArray.prototype.toSorted as the array being + // sorted is a copy that has not yet escaped to user script. + + let writebackLen = len; + if constexpr (isSort) { + try { + const newLen = LoadJSTypedArrayLengthAndCheckDetached(array) + otherwise DetachedOrOutOfBounds; + if (newLen < writebackLen) { + writebackLen = newLen; + } + } label DetachedOrOutOfBounds { + writebackLen = 0; } - } label DetachedOrOutOfBounds { - len = 0; + } else { + dcheck( + writebackLen == + LoadJSTypedArrayLengthAndCheckDetached(array) otherwise unreachable); } // work1 contains the sorted numbers. Write them back. - for (let i: uintptr = 0; i < len; ++i) { + for (let i: uintptr = 0; i < writebackLen; ++i) { accessor.StoreNumeric( context, array, i, UnsafeCast<Numeric>(work1.objects[i])); } return array; } + +// https://tc39.github.io/ecma262/#sec-%typedarray%.prototype.sort +transitioning javascript builtin TypedArrayPrototypeSort( + js-implicit context: NativeContext, + receiver: JSAny)(...arguments): JSTypedArray { + // 1. If comparefn is not undefined and IsCallable(comparefn) is false, + // throw a TypeError exception. + const comparefnObj: JSAny = arguments[0]; + const comparefn = Cast<(Undefined | Callable)>(comparefnObj) otherwise + ThrowTypeError(MessageTemplate::kBadSortComparisonFunction, comparefnObj); + + // 2. Let obj be the this value. + const obj: JSAny = receiver; + + // 3. Let buffer be ? ValidateTypedArray(obj). + // 4. Let len be IntegerIndexedObjectLength(obj). + const len: uintptr = + ValidateTypedArrayAndGetLength(context, obj, kBuiltinNameSort); + const array: JSTypedArray = UnsafeCast<JSTypedArray>(obj); + const kIsSort: constexpr bool = true; + return TypedArraySortCommon(array, len, comparefn, kIsSort); +} } diff --git a/deps/v8/src/builtins/typed-array-to-sorted.tq b/deps/v8/src/builtins/typed-array-to-sorted.tq new file mode 100644 index 00000000000000..c73821a9fc5b04 --- /dev/null +++ b/deps/v8/src/builtins/typed-array-to-sorted.tq @@ -0,0 +1,59 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +namespace typed_array { +// https://tc39.es/proposal-change-array-by-copy/#sec-%typedarray%.prototype.toSorted + +const kBuiltinNameToSorted: constexpr string = + '%TypedArray%.prototype.toSorted'; + +transitioning javascript builtin TypedArrayPrototypeToSorted( + js-implicit context: NativeContext, receiver: JSAny)(...arguments): JSAny { + // 1. If comparefn is not undefined and IsCallable(comparefn) is false, + // throw a TypeError exception. + const comparefnObj: JSAny = arguments[0]; + const comparefn = Cast<(Undefined | Callable)>(comparefnObj) otherwise + ThrowTypeError(MessageTemplate::kBadSortComparisonFunction, comparefnObj); + + // 2. Let O be the this value. + const obj: JSAny = receiver; + + // 3. Perform ? ValidateTypedArray(O). + // 4. Let buffer be obj.[[ViewedArrayBuffer]]. + // 5. Let len be O.[[ArrayLength]]. + const len: uintptr = + ValidateTypedArrayAndGetLength(context, obj, kBuiltinNameToSorted); + const array: JSTypedArray = UnsafeCast<JSTypedArray>(obj); + + // 6. Let A be ? TypedArrayCreateSameType(O, « 𝔽(len) »). + const copy = TypedArrayCreateSameType(array, len); + + // 7. NOTE: The following closure performs a numeric comparison rather than + // the string comparison used in 1.1.1.5. + // 8. Let SortCompare be a new Abstract Closure with parameters (x, y) that + // captures comparefn and buffer and performs the following steps when + // called: + // a. Return ? CompareTypedArrayElements(x, y, comparefn, buffer). + // 9. Let sortedList be ? SortIndexedProperties(obj, len, SortCompare, false). + // 10. Let j be 0. + // 11. Repeat, while j < len, + // a. Perform ! Set(A, ! ToString(𝔽(j)), sortedList[j], true). + // b. Set j to j + 1. + // 12. Return A. + + // Perform the sorting by copying the source TypedArray and sorting the copy + // in-place using the same code that as TypedArray.prototype.sort + const info = GetTypedArrayElementsInfo(copy); + const countBytes: uintptr = + info.CalculateByteLength(len) otherwise unreachable; + if (IsSharedArrayBuffer(array.buffer)) { + CallCRelaxedMemmove(copy.data_ptr, array.data_ptr, countBytes); + } else { + CallCMemmove(copy.data_ptr, array.data_ptr, countBytes); + } + + const kIsSort: constexpr bool = false; + return TypedArraySortCommon(copy, len, comparefn, kIsSort); +} +} diff --git a/deps/v8/src/builtins/typed-array-with.tq b/deps/v8/src/builtins/typed-array-with.tq index 25b58a37188c7c..c14de29364defc 100644 --- a/deps/v8/src/builtins/typed-array-with.tq +++ b/deps/v8/src/builtins/typed-array-with.tq @@ -7,10 +7,8 @@ const kBuiltinNameWith: constexpr string = '%TypedArray%.prototype.with'; // https://tc39.es/proposal-change-array-by-copy/#sec-%typedarray%.prototype.with transitioning javascript builtin TypedArrayPrototypeWith( - js-implicit context: NativeContext, receiver: JSAny)(...arguments): JSAny { - const index = arguments[0]; - let value: JSAny = arguments[1]; - + js-implicit context: NativeContext, receiver: JSAny)( + index: JSAny, valueArg: JSAny): JSAny { try { // 1. Let O be the this value. // 2. Perform ? ValidateTypedArray(O). @@ -21,12 +19,13 @@ transitioning javascript builtin TypedArrayPrototypeWith( otherwise IsDetachedOrOutOfBounds; const originalLength = attachedArrayAndLength.length; + let value: JSAny; if (IsBigInt64ElementsKind(array.elements_kind)) { // 4. If O.[[ContentType]] is BigInt, set value to ? ToBigInt(value). - value = ToBigInt(context, value); + value = ToBigInt(context, valueArg); } else { // 5. Else, set value to ? ToNumber(value). - value = ToNumber_Inline(value); + value = ToNumber_Inline(valueArg); } // 6. Let relativeIndex be ? ToIntegerOrInfinity(index). diff --git a/deps/v8/src/builtins/wasm.tq b/deps/v8/src/builtins/wasm.tq index dbf80befe2d7dd..03e117025ce3a5 100644 --- a/deps/v8/src/builtins/wasm.tq +++ b/deps/v8/src/builtins/wasm.tq @@ -746,14 +746,17 @@ macro IsWord16WasmArrayMap(map: Map): bool { } // Non-standard experimental feature. +// Arguments: array, start, count. transitioning javascript builtin ExperimentalWasmConvertArrayToString( - js-implicit context: NativeContext)( - array: JSAny, start: JSAny, count: JSAny): String { + js-implicit context: NativeContext)(...arguments): String { try { - const start = TryNumberToIntptr(start) otherwise goto InvalidArgument; - const count = TryNumberToIntptr(count) otherwise goto InvalidArgument; + if (arguments.length != 3) goto InvalidArgument; + const array = Cast<WasmArray>(arguments[0]) otherwise goto InvalidArgument; + const start = TryNumberToIntptr(arguments[1]) + otherwise goto InvalidArgument; + const count = TryNumberToIntptr(arguments[2]) + otherwise goto InvalidArgument; - const array = Cast<WasmArray>(array) otherwise goto InvalidArgument; if (!IsWord16WasmArrayMap(array.map)) goto InvalidArgument; const arrayContent = torque_internal::unsafe::NewConstSlice<char16>( array, kWasmArrayHeaderSize, Convert<intptr>(array.length)); @@ -768,16 +771,17 @@ transitioning javascript builtin ExperimentalWasmConvertArrayToString( } // Non-standard experimental feature. +// Arguments: string, sampleArray. transitioning javascript builtin ExperimentalWasmConvertStringToArray( - js-implicit context: NativeContext)( - string: JSAny, sampleArray: JSAny): WasmArray { + js-implicit context: NativeContext)(...arguments): WasmArray { try { + if (arguments.length != 2) goto InvalidArgument; + const string = Cast<String>(arguments[0]) otherwise goto InvalidArgument; const sampleArray = - Cast<WasmArray>(sampleArray) otherwise goto InvalidArgument; + Cast<WasmArray>(arguments[1]) otherwise goto InvalidArgument; const arrayMap = sampleArray.map; if (!IsWord16WasmArrayMap(arrayMap)) goto InvalidArgument; - const string = Cast<String>(string) otherwise goto InvalidArgument; const length = string.length; const result = diff --git a/deps/v8/src/builtins/weak-ref.tq b/deps/v8/src/builtins/weak-ref.tq index 56d3fc1c4314bf..051831698534ce 100644 --- a/deps/v8/src/builtins/weak-ref.tq +++ b/deps/v8/src/builtins/weak-ref.tq @@ -2,15 +2,18 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. +#include 'src/builtins/builtins-collections-gen.h' + namespace runtime { -extern runtime JSWeakRefAddToKeptObjects(implicit context: Context)(JSReceiver): - void; +extern runtime JSWeakRefAddToKeptObjects(implicit context: Context)( + JSReceiver | Symbol): void; } // namespace runtime namespace weakref { +// https://tc39.es/proposal-symbols-as-weakmap-keys/#sec-weak-ref-target transitioning javascript builtin WeakRefConstructor( js-implicit context: NativeContext, receiver: JSAny, newTarget: JSAny, @@ -19,15 +22,17 @@ WeakRefConstructor( if (newTarget == Undefined) { ThrowTypeError(MessageTemplate::kConstructorNotFunction, 'WeakRef'); } - // 2. If Type(target) is not Object, throw a TypeError exception. - const weakTarget = Cast<JSReceiver>(weakTarget) otherwise - ThrowTypeError( - MessageTemplate::kWeakRefsWeakRefConstructorTargetMustBeObject); + + // 2. If CanBeHeldWeakly(weakTarget) is false, throw a TypeError exception. + GotoIfCannotBeHeldWeakly(weakTarget) otherwise ThrowTypeError( + MessageTemplate::kInvalidWeakRefsWeakRefConstructorTarget); + // 3. Let weakRef be ? OrdinaryCreateFromConstructor(NewTarget, // "%WeakRefPrototype%", « [[WeakRefTarget]] »). const map = GetDerivedMap(target, UnsafeCast<JSReceiver>(newTarget)); const weakRef = UnsafeCast<JSWeakRef>(AllocateFastOrSlowJSObjectFromMap(map)); // 4. Perfom ! AddToKeptObjects(target). + const weakTarget = UnsafeCast<(JSReceiver | Symbol)>(weakTarget); runtime::JSWeakRefAddToKeptObjects(weakTarget); // 5. Set weakRef.[[WeakRefTarget]] to target. weakRef.target = weakTarget; @@ -52,7 +57,8 @@ WeakRefDeref(js-implicit context: NativeContext, receiver: JSAny)(): JSAny { if (target != Undefined) { // JSWeakRefAddToKeptObjects might allocate and cause a GC, but it // won't clear `target` since we hold it here on the stack. - runtime::JSWeakRefAddToKeptObjects(UnsafeCast<JSReceiver>(target)); + runtime::JSWeakRefAddToKeptObjects( + UnsafeCast<(JSReceiver | Symbol)>(target)); } return target; } diff --git a/deps/v8/src/builtins/x64/builtins-x64.cc b/deps/v8/src/builtins/x64/builtins-x64.cc index 03539e1caa29c8..6dfdffcdb84472 100644 --- a/deps/v8/src/builtins/x64/builtins-x64.cc +++ b/deps/v8/src/builtins/x64/builtins-x64.cc @@ -1197,8 +1197,7 @@ void Builtins::Generate_InterpreterEntryTrampoline( __ int3(); // Should not return. __ bind(&flags_need_processing); - __ MaybeOptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector, - closure); + __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector, closure); __ bind(&is_baseline); { @@ -1627,8 +1626,8 @@ void Builtins::Generate_BaselineOutOfLinePrologue(MacroAssembler* masm) { // return since we may do a runtime call along the way that requires the // stack to only contain valid frames. __ Drop(1); - __ MaybeOptimizeCodeOrTailCallOptimizedCodeSlot( - flags, feedback_vector, closure, JumpMode::kPushAndReturn); + __ OptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector, closure, + JumpMode::kPushAndReturn); __ Trap(); } @@ -2697,12 +2696,218 @@ void Builtins::Generate_BaselineOnStackReplacement(MacroAssembler* masm) { } void Builtins::Generate_MaglevOnStackReplacement(MacroAssembler* masm) { - using D = OnStackReplacementDescriptor; + using D = + i::CallInterfaceDescriptorFor<Builtin::kMaglevOnStackReplacement>::type; static_assert(D::kParameterCount == 1); OnStackReplacement(masm, OsrSourceTier::kMaglev, D::MaybeTargetCodeRegister()); } +// Called immediately at the start of Maglev-generated functions, with all +// state (register and stack) unchanged, except: +// +// - the stack slot byte size and +// - the tagged stack slot byte size +// +// are pushed as untagged arguments to the stack. This prologue builtin takes +// care of a few things that each Maglev function needs on entry: +// +// - the deoptimization check +// - tiering support (checking FeedbackVector flags) +// - the stack overflow / interrupt check +// - and finally, setting up the Maglev frame. +// +// If this builtin returns, the Maglev frame is fully set up and we are +// prepared for continued execution. Otherwise, we take one of multiple +// possible non-standard exit paths (deoptimization, tailcalling other code, or +// throwing a stack overflow exception). +void Builtins::Generate_MaglevOutOfLinePrologue(MacroAssembler* masm) { + using D = + i::CallInterfaceDescriptorFor<Builtin::kMaglevOutOfLinePrologue>::type; + static_assert(D::kParameterCount == 0); + + // This builtin is called by Maglev code prior to any register mutations, and + // the only stack mutation is pushing the arguments for this builtin. In + // other words: + // + // - The register state is the same as when we entered the Maglev code object, + // i.e. set up for a standard JS call. + // - The caller has not yet set up a stack frame. + // - The caller has pushed the (untagged) stack parameters for this builtin. + + static constexpr int kStackParameterCount = 2; + static constexpr int kReturnAddressCount = 1; + static constexpr int kReturnAddressOffset = 0 * kSystemPointerSize; + static constexpr int kTaggedStackSlotBytesOffset = 1 * kSystemPointerSize; + static constexpr int kTotalStackSlotBytesOffset = 2 * kSystemPointerSize; + USE(kReturnAddressOffset); + USE(kTaggedStackSlotBytesOffset); + USE(kTotalStackSlotBytesOffset); + + // Scratch registers. Don't clobber regs related to the calling + // convention (e.g. kJavaScriptCallArgCountRegister). + const Register scratch0 = rcx; + const Register scratch1 = r9; + const Register scratch2 = rbx; + + Label deoptimize, optimize, call_stack_guard, call_stack_guard_return; + + // A modified version of BailoutIfDeoptimized that drops the builtin frame + // before deoptimizing. + { + static constexpr int kCodeStartToCodeDataContainerOffset = + Code::kCodeDataContainerOffset - Code::kHeaderSize; + __ LoadTaggedPointerField(scratch0, + Operand(kJavaScriptCallCodeStartRegister, + kCodeStartToCodeDataContainerOffset)); + __ testl( + FieldOperand(scratch0, CodeDataContainer::kKindSpecificFlagsOffset), + Immediate(1 << Code::kMarkedForDeoptimizationBit)); + __ j(not_zero, &deoptimize); + } + + // Tiering support. + const Register flags = scratch0; + const Register feedback_vector = scratch1; + { + __ LoadTaggedPointerField( + feedback_vector, + FieldOperand(kJSFunctionRegister, JSFunction::kFeedbackCellOffset)); + __ LoadTaggedPointerField( + feedback_vector, FieldOperand(feedback_vector, Cell::kValueOffset)); + __ AssertFeedbackVector(feedback_vector); + + __ LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing( + flags, feedback_vector, CodeKind::MAGLEV, &optimize); + } + + // Good to go - set up the MAGLEV stack frame and return. + + // First, tear down to the caller frame. + const Register tagged_stack_slot_bytes = scratch1; + const Register total_stack_slot_bytes = scratch0; + const Register return_address = scratch2; + __ PopReturnAddressTo(return_address); + __ Pop(tagged_stack_slot_bytes); + __ Pop(total_stack_slot_bytes); + + __ EnterFrame(StackFrame::MAGLEV); + + // Save arguments in frame. + // TODO(leszeks): Consider eliding this frame if we don't make any calls + // that could clobber these registers. + __ Push(kContextRegister); + __ Push(kJSFunctionRegister); // Callee's JS function. + __ Push(kJavaScriptCallArgCountRegister); // Actual argument count. + + { + ASM_CODE_COMMENT_STRING(masm, " Stack/interrupt check"); + // Stack check. This folds the checks for both the interrupt stack limit + // check and the real stack limit into one by just checking for the + // interrupt limit. The interrupt limit is either equal to the real stack + // limit or tighter. By ensuring we have space until that limit after + // building the frame we can quickly precheck both at once. + // TODO(leszeks): Include a max call argument size here. + __ Move(kScratchRegister, rsp); + __ subq(kScratchRegister, total_stack_slot_bytes); + __ cmpq(kScratchRegister, + __ StackLimitAsOperand(StackLimitKind::kInterruptStackLimit)); + __ j(below, &call_stack_guard); + __ bind(&call_stack_guard_return); + } + + // Initialize stack slots: + // + // - tagged slots are initialized with smi zero. + // - untagged slots are simply reserved without initialization. + // + // Tagged slots first. + const Register untagged_stack_slot_bytes = total_stack_slot_bytes; + { + Label next, loop_condition, loop_header; + + DCHECK_EQ(total_stack_slot_bytes, untagged_stack_slot_bytes); + __ subq(total_stack_slot_bytes, tagged_stack_slot_bytes); + + const Register smi_zero = rax; + DCHECK(!AreAliased(smi_zero, scratch0, scratch1, scratch2)); + __ Move(smi_zero, Smi::zero()); + + __ jmp(&loop_condition, Label::kNear); + + // TODO(leszeks): Consider filling with xmm + movdqa instead. + // TODO(v8:7700): Consider doing more than one push per loop iteration. + __ bind(&loop_header); + __ pushq(rax); + __ bind(&loop_condition); + __ subq(tagged_stack_slot_bytes, Immediate(kSystemPointerSize)); + __ j(greater_equal, &loop_header, Label::kNear); + + __ bind(&next); + } + + // Untagged slots second. + __ subq(rsp, untagged_stack_slot_bytes); + + // The "all-good" return location. This is the only spot where we actually + // return to the caller. + __ PushReturnAddressFrom(return_address); + __ ret(0); + + __ bind(&deoptimize); + { + // Drop the frame and jump to CompileLazyDeoptimizedCode. This is slightly + // fiddly due to the CET shadow stack (otherwise we could do a conditional + // Jump to the builtin). + __ Drop(kStackParameterCount + kReturnAddressCount); + __ Move(scratch0, + BUILTIN_CODE(masm->isolate(), CompileLazyDeoptimizedCode)); + __ LoadCodeObjectEntry(scratch0, scratch0); + __ PushReturnAddressFrom(scratch0); + __ ret(0); + } + + __ bind(&optimize); + { + __ Drop(kStackParameterCount + kReturnAddressCount); + __ AssertFunction(kJSFunctionRegister); + __ OptimizeCodeOrTailCallOptimizedCodeSlot( + flags, feedback_vector, kJSFunctionRegister, JumpMode::kPushAndReturn); + __ Trap(); + } + + __ bind(&call_stack_guard); + { + ASM_CODE_COMMENT_STRING(masm, "Stack/interrupt call"); + + // Push the MAGLEV code return address now, as if it had been pushed by the + // call to this builtin. + __ PushReturnAddressFrom(return_address); + + { + FrameScope inner_frame_scope(masm, StackFrame::INTERNAL); + __ SmiTag(total_stack_slot_bytes); + __ Push(total_stack_slot_bytes); + __ SmiTag(tagged_stack_slot_bytes); + __ Push(tagged_stack_slot_bytes); + // Save any registers that can be referenced by maglev::RegisterInput. + // TODO(leszeks): Only push those that are used by the graph. + __ Push(kJavaScriptCallNewTargetRegister); + // Push the frame size. + __ Push(total_stack_slot_bytes); + __ CallRuntime(Runtime::kStackGuardWithGap, 1); + __ Pop(kJavaScriptCallNewTargetRegister); + __ Pop(tagged_stack_slot_bytes); + __ SmiUntag(tagged_stack_slot_bytes); + __ Pop(total_stack_slot_bytes); + __ SmiUntag(total_stack_slot_bytes); + } + + __ PopReturnAddressTo(return_address); + __ jmp(&call_stack_guard_return); + } +} + #if V8_ENABLE_WEBASSEMBLY void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) { // The function index was pushed to the stack by the caller as int32. diff --git a/deps/v8/src/codegen/arm/assembler-arm-inl.h b/deps/v8/src/codegen/arm/assembler-arm-inl.h index cb0e5f6b7537b5..b1cd1d5205a1cd 100644 --- a/deps/v8/src/codegen/arm/assembler-arm-inl.h +++ b/deps/v8/src/codegen/arm/assembler-arm-inl.h @@ -66,8 +66,7 @@ void RelocInfo::apply(intptr_t delta) { } Address RelocInfo::target_address() { - DCHECK(IsCodeTargetMode(rmode_) || IsRuntimeEntry(rmode_) || - IsWasmCall(rmode_)); + DCHECK(IsCodeTargetMode(rmode_) || IsWasmCall(rmode_)); return Assembler::target_address_at(pc_, constant_pool_); } @@ -92,11 +91,7 @@ Address RelocInfo::constant_pool_entry_address() { int RelocInfo::target_address_size() { return kPointerSize; } HeapObject RelocInfo::target_object(PtrComprCageBase cage_base) { - DCHECK(IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_) || - IsDataEmbeddedObject(rmode_)); - if (IsDataEmbeddedObject(rmode_)) { - return HeapObject::cast(Object(ReadUnalignedValue<Address>(pc_))); - } + DCHECK(IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_)); return HeapObject::cast( Object(Assembler::target_address_at(pc_, constant_pool_))); } @@ -105,8 +100,6 @@ Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) { if (IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_)) { return Handle<HeapObject>(reinterpret_cast<Address*>( Assembler::target_address_at(pc_, constant_pool_))); - } else if (IsDataEmbeddedObject(rmode_)) { - return Handle<HeapObject>::cast(ReadUnalignedValue<Handle<Object>>(pc_)); } DCHECK(IsRelativeCodeTarget(rmode_)); return origin->relative_code_target_object_handle_at(pc_); @@ -115,15 +108,9 @@ Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) { void RelocInfo::set_target_object(Heap* heap, HeapObject target, WriteBarrierMode write_barrier_mode, ICacheFlushMode icache_flush_mode) { - DCHECK(IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_) || - IsDataEmbeddedObject(rmode_)); - if (IsDataEmbeddedObject(rmode_)) { - WriteUnalignedValue(pc_, target.ptr()); - // No need to flush icache since no instructions were changed. - } else { - Assembler::set_target_address_at(pc_, constant_pool_, target.ptr(), - icache_flush_mode); - } + DCHECK(IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_)); + Assembler::set_target_address_at(pc_, constant_pool_, target.ptr(), + icache_flush_mode); if (!host().is_null() && !v8_flags.disable_write_barriers) { WriteBarrierForCode(host(), this, target, write_barrier_mode); } @@ -153,19 +140,6 @@ Address RelocInfo::target_internal_reference_address() { Builtin RelocInfo::target_builtin_at(Assembler* origin) { UNREACHABLE(); } -Address RelocInfo::target_runtime_entry(Assembler* origin) { - DCHECK(IsRuntimeEntry(rmode_)); - return target_address(); -} - -void RelocInfo::set_target_runtime_entry(Address target, - WriteBarrierMode write_barrier_mode, - ICacheFlushMode icache_flush_mode) { - DCHECK(IsRuntimeEntry(rmode_)); - if (target_address() != target) - set_target_address(target, write_barrier_mode, icache_flush_mode); -} - Address RelocInfo::target_off_heap_target() { DCHECK(IsOffHeapTarget(rmode_)); return Assembler::target_address_at(pc_, constant_pool_); @@ -173,8 +147,8 @@ Address RelocInfo::target_off_heap_target() { void RelocInfo::WipeOut() { DCHECK(IsFullEmbeddedObject(rmode_) || IsCodeTarget(rmode_) || - IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) || - IsInternalReference(rmode_) || IsOffHeapTarget(rmode_)); + IsExternalReference(rmode_) || IsInternalReference(rmode_) || + IsOffHeapTarget(rmode_)); if (IsInternalReference(rmode_)) { Memory<Address>(pc_) = kNullAddress; } else { diff --git a/deps/v8/src/codegen/arm/assembler-arm.cc b/deps/v8/src/codegen/arm/assembler-arm.cc index 31d76c61984f60..b2d7cad0963cd7 100644 --- a/deps/v8/src/codegen/arm/assembler-arm.cc +++ b/deps/v8/src/codegen/arm/assembler-arm.cc @@ -5218,8 +5218,7 @@ void Assembler::dd(uint32_t data, RelocInfo::Mode rmode) { DCHECK(is_const_pool_blocked() || pending_32_bit_constants_.empty()); CheckBuffer(); if (!RelocInfo::IsNoInfo(rmode)) { - DCHECK(RelocInfo::IsDataEmbeddedObject(rmode) || - RelocInfo::IsLiteralConstant(rmode)); + DCHECK(RelocInfo::IsLiteralConstant(rmode)); RecordRelocInfo(rmode); } base::WriteUnalignedValue(reinterpret_cast<Address>(pc_), data); @@ -5232,8 +5231,7 @@ void Assembler::dq(uint64_t value, RelocInfo::Mode rmode) { DCHECK(is_const_pool_blocked() || pending_32_bit_constants_.empty()); CheckBuffer(); if (!RelocInfo::IsNoInfo(rmode)) { - DCHECK(RelocInfo::IsDataEmbeddedObject(rmode) || - RelocInfo::IsLiteralConstant(rmode)); + DCHECK(RelocInfo::IsLiteralConstant(rmode)); RecordRelocInfo(rmode); } base::WriteUnalignedValue(reinterpret_cast<Address>(pc_), value); diff --git a/deps/v8/src/codegen/arm/macro-assembler-arm.cc b/deps/v8/src/codegen/arm/macro-assembler-arm.cc index f88c04333f479f..565d0820bdd0b5 100644 --- a/deps/v8/src/codegen/arm/macro-assembler-arm.cc +++ b/deps/v8/src/codegen/arm/macro-assembler-arm.cc @@ -2078,7 +2078,7 @@ void MacroAssembler::LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing( b(ne, flags_need_processing); } -void MacroAssembler::MaybeOptimizeCodeOrTailCallOptimizedCodeSlot( +void MacroAssembler::OptimizeCodeOrTailCallOptimizedCodeSlot( Register flags, Register feedback_vector) { ASM_CODE_COMMENT(this); DCHECK(!AreAliased(flags, feedback_vector)); diff --git a/deps/v8/src/codegen/arm/macro-assembler-arm.h b/deps/v8/src/codegen/arm/macro-assembler-arm.h index 099b1551bf8b1d..51f79075812a84 100644 --- a/deps/v8/src/codegen/arm/macro-assembler-arm.h +++ b/deps/v8/src/codegen/arm/macro-assembler-arm.h @@ -779,8 +779,8 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler { void LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing( Register flags, Register feedback_vector, CodeKind current_code_kind, Label* flags_need_processing); - void MaybeOptimizeCodeOrTailCallOptimizedCodeSlot(Register flags, - Register feedback_vector); + void OptimizeCodeOrTailCallOptimizedCodeSlot(Register flags, + Register feedback_vector); // --------------------------------------------------------------------------- // Runtime calls diff --git a/deps/v8/src/codegen/arm64/assembler-arm64-inl.h b/deps/v8/src/codegen/arm64/assembler-arm64-inl.h index 5df2d876457ddc..37158040ead8c0 100644 --- a/deps/v8/src/codegen/arm64/assembler-arm64-inl.h +++ b/deps/v8/src/codegen/arm64/assembler-arm64-inl.h @@ -537,16 +537,6 @@ Builtin Assembler::target_builtin_at(Address pc) { return static_cast<Builtin>(builtin_id); } -Address Assembler::runtime_entry_at(Address pc) { - Instruction* instr = reinterpret_cast<Instruction*>(pc); - if (instr->IsLdrLiteralX()) { - return Assembler::target_address_at(pc, 0 /* unused */); - } else { - DCHECK(instr->IsBranchAndLink() || instr->IsUnconditionalBranch()); - return instr->ImmPCOffset() + options().code_range_base; - } -} - int Assembler::deserialization_special_target_size(Address location) { Instruction* instr = reinterpret_cast<Instruction*>(location); if (instr->IsBranchAndLink() || instr->IsUnconditionalBranch()) { @@ -630,7 +620,7 @@ int RelocInfo::target_address_size() { Address RelocInfo::target_address() { DCHECK(IsCodeTarget(rmode_) || IsNearBuiltinEntry(rmode_) || - IsRuntimeEntry(rmode_) || IsWasmCall(rmode_)); + IsWasmCall(rmode_)); return Assembler::target_address_at(pc_, constant_pool_); } @@ -665,13 +655,12 @@ Address RelocInfo::constant_pool_entry_address() { HeapObject RelocInfo::target_object(PtrComprCageBase cage_base) { DCHECK(IsCodeTarget(rmode_) || IsEmbeddedObjectMode(rmode_)); - if (IsDataEmbeddedObject(rmode_)) { - return HeapObject::cast(Object(ReadUnalignedValue<Address>(pc_))); - } else if (IsCompressedEmbeddedObject(rmode_)) { + if (IsCompressedEmbeddedObject(rmode_)) { Tagged_t compressed = Assembler::target_compressed_address_at(pc_, constant_pool_); DCHECK(!HAS_SMI_TAG(compressed)); - Object obj(DecompressTaggedPointer(cage_base, compressed)); + Object obj(V8HeapCompressionScheme::DecompressTaggedPointer(cage_base, + compressed)); // Embedding of compressed Code objects must not happen when external code // space is enabled, because CodeDataContainers must be used instead. DCHECK_IMPLIES(V8_EXTERNAL_CODE_SPACE_BOOL, @@ -684,9 +673,7 @@ HeapObject RelocInfo::target_object(PtrComprCageBase cage_base) { } Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) { - if (IsDataEmbeddedObject(rmode_)) { - return Handle<HeapObject>::cast(ReadUnalignedValue<Handle<Object>>(pc_)); - } else if (IsEmbeddedObjectMode(rmode_)) { + if (IsEmbeddedObjectMode(rmode_)) { return origin->target_object_handle_at(pc_); } else { DCHECK(IsCodeTarget(rmode_)); @@ -698,12 +685,11 @@ void RelocInfo::set_target_object(Heap* heap, HeapObject target, WriteBarrierMode write_barrier_mode, ICacheFlushMode icache_flush_mode) { DCHECK(IsCodeTarget(rmode_) || IsEmbeddedObjectMode(rmode_)); - if (IsDataEmbeddedObject(rmode_)) { - WriteUnalignedValue(pc_, target.ptr()); - // No need to flush icache since no instructions were changed. - } else if (IsCompressedEmbeddedObject(rmode_)) { + if (IsCompressedEmbeddedObject(rmode_)) { Assembler::set_target_compressed_address_at( - pc_, constant_pool_, CompressTagged(target.ptr()), icache_flush_mode); + pc_, constant_pool_, + V8HeapCompressionScheme::CompressTagged(target.ptr()), + icache_flush_mode); } else { DCHECK(IsFullEmbeddedObject(rmode_)); Assembler::set_target_address_at(pc_, constant_pool_, target.ptr(), @@ -741,20 +727,6 @@ Builtin RelocInfo::target_builtin_at(Assembler* origin) { return Assembler::target_builtin_at(pc_); } -Address RelocInfo::target_runtime_entry(Assembler* origin) { - DCHECK(IsRuntimeEntry(rmode_)); - return target_address(); -} - -void RelocInfo::set_target_runtime_entry(Address target, - WriteBarrierMode write_barrier_mode, - ICacheFlushMode icache_flush_mode) { - DCHECK(IsRuntimeEntry(rmode_)); - if (target_address() != target) { - set_target_address(target, write_barrier_mode, icache_flush_mode); - } -} - Address RelocInfo::target_off_heap_target() { DCHECK(IsOffHeapTarget(rmode_)); return Assembler::target_address_at(pc_, constant_pool_); @@ -762,8 +734,8 @@ Address RelocInfo::target_off_heap_target() { void RelocInfo::WipeOut() { DCHECK(IsEmbeddedObjectMode(rmode_) || IsCodeTarget(rmode_) || - IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) || - IsInternalReference(rmode_) || IsOffHeapTarget(rmode_)); + IsExternalReference(rmode_) || IsInternalReference(rmode_) || + IsOffHeapTarget(rmode_)); if (IsInternalReference(rmode_)) { WriteUnalignedValue<Address>(pc_, kNullAddress); } else if (IsCompressedEmbeddedObject(rmode_)) { diff --git a/deps/v8/src/codegen/arm64/assembler-arm64.cc b/deps/v8/src/codegen/arm64/assembler-arm64.cc index 754c79815ab06f..dc06c743a02d30 100644 --- a/deps/v8/src/codegen/arm64/assembler-arm64.cc +++ b/deps/v8/src/codegen/arm64/assembler-arm64.cc @@ -188,7 +188,6 @@ CPURegList CPURegList::GetCallerSavedV(int size) { const int RelocInfo::kApplyMask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) | RelocInfo::ModeMask(RelocInfo::NEAR_BUILTIN_ENTRY) | - RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY) | RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE); bool RelocInfo::IsCodedSpecially() { @@ -1134,10 +1133,20 @@ void Assembler::smull(const Register& rd, const Register& rn, void Assembler::smulh(const Register& rd, const Register& rn, const Register& rm) { - DCHECK(AreSameSizeAndType(rd, rn, rm)); + DCHECK(rd.Is64Bits()); + DCHECK(rn.Is64Bits()); + DCHECK(rm.Is64Bits()); DataProcessing3Source(rd, rn, rm, xzr, SMULH_x); } +void Assembler::umulh(const Register& rd, const Register& rn, + const Register& rm) { + DCHECK(rd.Is64Bits()); + DCHECK(rn.Is64Bits()); + DCHECK(rm.Is64Bits()); + DataProcessing3Source(rd, rn, rm, xzr, UMULH_x); +} + void Assembler::sdiv(const Register& rd, const Register& rn, const Register& rm) { DCHECK(rd.SizeInBits() == rn.SizeInBits()); @@ -4306,7 +4315,6 @@ void Assembler::GrowBuffer() { void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data, ConstantPoolMode constant_pool_mode) { if ((rmode == RelocInfo::INTERNAL_REFERENCE) || - (rmode == RelocInfo::DATA_EMBEDDED_OBJECT) || (rmode == RelocInfo::CONST_POOL) || (rmode == RelocInfo::VENEER_POOL) || (rmode == RelocInfo::DEOPT_SCRIPT_OFFSET) || (rmode == RelocInfo::DEOPT_INLINING_ID) || @@ -4318,7 +4326,6 @@ void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data, RelocInfo::IsDeoptNodeId(rmode) || RelocInfo::IsDeoptPosition(rmode) || RelocInfo::IsInternalReference(rmode) || - RelocInfo::IsDataEmbeddedObject(rmode) || RelocInfo::IsLiteralConstant(rmode) || RelocInfo::IsConstPool(rmode) || RelocInfo::IsVeneerPool(rmode)); // These modes do not need an entry in the constant pool. diff --git a/deps/v8/src/codegen/arm64/assembler-arm64.h b/deps/v8/src/codegen/arm64/assembler-arm64.h index 368a2e5f42bf78..68f773a92436c3 100644 --- a/deps/v8/src/codegen/arm64/assembler-arm64.h +++ b/deps/v8/src/codegen/arm64/assembler-arm64.h @@ -273,14 +273,6 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase { // code is moved into the code space. static inline Builtin target_builtin_at(Address pc); - // Returns the target address for a runtime function for the call encoded - // at 'pc'. - // Runtime entries can be temporarily encoded as the offset between the - // runtime function entrypoint and the code range base (stored in the - // code_range_base field), in order to be encodable as we generate the code, - // before it is moved into the code space. - inline Address runtime_entry_at(Address pc); - // This sets the branch destination. 'location' here can be either the pc of // an immediate branch or the address of an entry in the constant pool. // This is for calls and branches within generated code. @@ -751,9 +743,12 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase { // 32 x 32 -> 64-bit multiply. void smull(const Register& rd, const Register& rn, const Register& rm); - // Xd = bits<127:64> of Xn * Xm. + // Xd = bits<127:64> of Xn * Xm, signed. void smulh(const Register& rd, const Register& rn, const Register& rm); + // Xd = bits<127:64> of Xn * Xm, unsigned. + void umulh(const Register& rd, const Register& rn, const Register& rm); + // Signed 32 x 32 -> 64-bit multiply and accumulate. void smaddl(const Register& rd, const Register& rn, const Register& rm, const Register& ra); @@ -2070,8 +2065,7 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase { void dd(uint32_t data, RelocInfo::Mode rmode = RelocInfo::NO_INFO) { BlockPoolsScope no_pool_scope(this); if (!RelocInfo::IsNoInfo(rmode)) { - DCHECK(RelocInfo::IsDataEmbeddedObject(rmode) || - RelocInfo::IsLiteralConstant(rmode)); + DCHECK(RelocInfo::IsLiteralConstant(rmode)); RecordRelocInfo(rmode); } dc32(data); @@ -2079,8 +2073,7 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase { void dq(uint64_t data, RelocInfo::Mode rmode = RelocInfo::NO_INFO) { BlockPoolsScope no_pool_scope(this); if (!RelocInfo::IsNoInfo(rmode)) { - DCHECK(RelocInfo::IsDataEmbeddedObject(rmode) || - RelocInfo::IsLiteralConstant(rmode)); + DCHECK(RelocInfo::IsLiteralConstant(rmode)); RecordRelocInfo(rmode); } dc64(data); @@ -2088,8 +2081,7 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase { void dp(uintptr_t data, RelocInfo::Mode rmode = RelocInfo::NO_INFO) { BlockPoolsScope no_pool_scope(this); if (!RelocInfo::IsNoInfo(rmode)) { - DCHECK(RelocInfo::IsDataEmbeddedObject(rmode) || - RelocInfo::IsLiteralConstant(rmode)); + DCHECK(RelocInfo::IsLiteralConstant(rmode)); RecordRelocInfo(rmode); } dc64(data); diff --git a/deps/v8/src/codegen/arm64/macro-assembler-arm64-inl.h b/deps/v8/src/codegen/arm64/macro-assembler-arm64-inl.h index bf34708491058b..0c7e7357534fc9 100644 --- a/deps/v8/src/codegen/arm64/macro-assembler-arm64-inl.h +++ b/deps/v8/src/codegen/arm64/macro-assembler-arm64-inl.h @@ -944,7 +944,7 @@ void TurboAssembler::Smull(const Register& rd, const Register& rn, smull(rd, rn, rm); } -void MacroAssembler::Smulh(const Register& rd, const Register& rn, +void TurboAssembler::Smulh(const Register& rd, const Register& rn, const Register& rm) { DCHECK(allow_macro_instructions()); DCHECK(!rd.IsZero()); @@ -958,6 +958,13 @@ void TurboAssembler::Umull(const Register& rd, const Register& rn, umaddl(rd, rn, rm, xzr); } +void TurboAssembler::Umulh(const Register& rd, const Register& rn, + const Register& rm) { + DCHECK(allow_macro_instructions()); + DCHECK(!rd.IsZero()); + umulh(rd, rn, rm); +} + void TurboAssembler::Sxtb(const Register& rd, const Register& rn) { DCHECK(allow_macro_instructions()); DCHECK(!rd.IsZero()); diff --git a/deps/v8/src/codegen/arm64/macro-assembler-arm64.cc b/deps/v8/src/codegen/arm64/macro-assembler-arm64.cc index 0112f35129c89d..25834f25823cd1 100644 --- a/deps/v8/src/codegen/arm64/macro-assembler-arm64.cc +++ b/deps/v8/src/codegen/arm64/macro-assembler-arm64.cc @@ -1440,7 +1440,7 @@ void MacroAssembler::LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing( TestAndBranchIfAnySet(flags, kFlagsMask, flags_need_processing); } -void MacroAssembler::MaybeOptimizeCodeOrTailCallOptimizedCodeSlot( +void MacroAssembler::OptimizeCodeOrTailCallOptimizedCodeSlot( Register flags, Register feedback_vector) { ASM_CODE_COMMENT(this); DCHECK(!AreAliased(flags, feedback_vector)); @@ -3291,16 +3291,6 @@ void MacroAssembler::RecordWriteField(Register object, int offset, Bind(&done); } -void TurboAssembler::EncodeSandboxedPointer(const Register& value) { - ASM_CODE_COMMENT(this); -#ifdef V8_ENABLE_SANDBOX - Sub(value, value, kPtrComprCageBaseRegister); - Mov(value, Operand(value, LSL, kSandboxedPointerShift)); -#else - UNREACHABLE(); -#endif -} - void TurboAssembler::DecodeSandboxedPointer(const Register& value) { ASM_CODE_COMMENT(this); #ifdef V8_ENABLE_SANDBOX @@ -3313,19 +3303,27 @@ void TurboAssembler::DecodeSandboxedPointer(const Register& value) { void TurboAssembler::LoadSandboxedPointerField( const Register& destination, const MemOperand& field_operand) { +#ifdef V8_ENABLE_SANDBOX ASM_CODE_COMMENT(this); Ldr(destination, field_operand); DecodeSandboxedPointer(destination); +#else + UNREACHABLE(); +#endif } void TurboAssembler::StoreSandboxedPointerField( const Register& value, const MemOperand& dst_field_operand) { +#ifdef V8_ENABLE_SANDBOX ASM_CODE_COMMENT(this); UseScratchRegisterScope temps(this); Register scratch = temps.AcquireX(); - Mov(scratch, value); - EncodeSandboxedPointer(scratch); + Sub(scratch, value, kPtrComprCageBaseRegister); + Mov(scratch, Operand(scratch, LSL, kSandboxedPointerShift)); Str(scratch, dst_field_operand); +#else + UNREACHABLE(); +#endif } void TurboAssembler::LoadExternalPointerField(Register destination, diff --git a/deps/v8/src/codegen/arm64/macro-assembler-arm64.h b/deps/v8/src/codegen/arm64/macro-assembler-arm64.h index 4c3715b69cf060..99121e3f4b9de3 100644 --- a/deps/v8/src/codegen/arm64/macro-assembler-arm64.h +++ b/deps/v8/src/codegen/arm64/macro-assembler-arm64.h @@ -1056,7 +1056,9 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase { inline void Lsl(const Register& rd, const Register& rn, unsigned shift); inline void Lsl(const Register& rd, const Register& rn, const Register& rm); inline void Umull(const Register& rd, const Register& rn, const Register& rm); + inline void Umulh(const Register& rd, const Register& rn, const Register& rm); inline void Smull(const Register& rd, const Register& rn, const Register& rm); + inline void Smulh(const Register& rd, const Register& rn, const Register& rm); inline void Sxtb(const Register& rd, const Register& rn); inline void Sxth(const Register& rd, const Register& rn); @@ -1452,13 +1454,9 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase { // Transform a SandboxedPointer from/to its encoded form, which is used when // the pointer is stored on the heap and ensures that the pointer will always // point into the sandbox. - void EncodeSandboxedPointer(const Register& value); void DecodeSandboxedPointer(const Register& value); - - // Load and decode a SandboxedPointer from the heap. void LoadSandboxedPointerField(const Register& destination, const MemOperand& field_operand); - // Encode and store a SandboxedPointer to the heap. void StoreSandboxedPointerField(const Register& value, const MemOperand& dst_field_operand); @@ -1630,7 +1628,6 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler { const Register& ra); inline void Smsubl(const Register& rd, const Register& rn, const Register& rm, const Register& ra); - inline void Smulh(const Register& rd, const Register& rn, const Register& rm); inline void Stnp(const CPURegister& rt, const CPURegister& rt2, const MemOperand& dst); inline void Umaddl(const Register& rd, const Register& rn, const Register& rm, @@ -1842,8 +1839,8 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler { void LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing( Register flags, Register feedback_vector, CodeKind current_code_kind, Label* flags_need_processing); - void MaybeOptimizeCodeOrTailCallOptimizedCodeSlot(Register flags, - Register feedback_vector); + void OptimizeCodeOrTailCallOptimizedCodeSlot(Register flags, + Register feedback_vector); // Helpers ------------------------------------------------------------------ diff --git a/deps/v8/src/codegen/assembler.h b/deps/v8/src/codegen/assembler.h index 8a8164d0739ace..1073a9d4f63432 100644 --- a/deps/v8/src/codegen/assembler.h +++ b/deps/v8/src/codegen/assembler.h @@ -245,7 +245,7 @@ class V8_EXPORT_PRIVATE AssemblerBase : public Malloced { } bool is_constant_pool_available() const { - if (v8_flags.enable_embedded_constant_pool) { + if (V8_EMBEDDED_CONSTANT_POOL_BOOL) { // We need to disable constant pool here for embeded builtins // because the metadata section is not adjacent to instructions return constant_pool_available_ && !options().isolate_independent_code; @@ -374,7 +374,7 @@ class V8_EXPORT_PRIVATE AssemblerBase : public Malloced { byte* pc_; void set_constant_pool_available(bool available) { - if (v8_flags.enable_embedded_constant_pool) { + if (V8_EMBEDDED_CONSTANT_POOL_BOOL) { constant_pool_available_ = available; } else { // Embedded constant pool not supported on this architecture. diff --git a/deps/v8/src/codegen/background-merge-task.h b/deps/v8/src/codegen/background-merge-task.h index 0e7cffbaf6e90e..643a6a84efadbb 100644 --- a/deps/v8/src/codegen/background-merge-task.h +++ b/deps/v8/src/codegen/background-merge-task.h @@ -35,7 +35,7 @@ class V8_EXPORT_PRIVATE BackgroundMergeTask { // Step 2: on the background thread, update pointers in the new Script's // object graph to point to corresponding objects from the cached Script where - // appropriate. May only be called if HasCachedScript returned true. + // appropriate. May only be called if HasPendingBackgroundWork returned true. void BeginMergeInBackground(LocalIsolate* isolate, Handle<Script> new_script); // Step 3: on the main thread again, complete the merge so that all relevant @@ -45,10 +45,11 @@ class V8_EXPORT_PRIVATE BackgroundMergeTask { Handle<SharedFunctionInfo> CompleteMergeInForeground( Isolate* isolate, Handle<Script> new_script); - bool HasCachedScript() const { return !cached_script_.is_null(); } + bool HasPendingBackgroundWork() const { + return state_ == kPendingBackgroundWork; + } bool HasPendingForegroundWork() const { - return !used_new_sfis_.empty() || - !new_compiled_data_for_cached_sfis_.empty(); + return state_ == kPendingForegroundWork; } private: @@ -81,6 +82,14 @@ class V8_EXPORT_PRIVATE BackgroundMergeTask { Handle<FeedbackMetadata> feedback_metadata; }; std::vector<NewCompiledDataForCachedSfi> new_compiled_data_for_cached_sfis_; + + enum State { + kNotStarted, + kPendingBackgroundWork, + kPendingForegroundWork, + kDone, + }; + State state_ = kNotStarted; }; } // namespace internal diff --git a/deps/v8/src/codegen/bailout-reason.h b/deps/v8/src/codegen/bailout-reason.h index 35056c4137de1d..cdd9e123185d14 100644 --- a/deps/v8/src/codegen/bailout-reason.h +++ b/deps/v8/src/codegen/bailout-reason.h @@ -28,6 +28,7 @@ namespace internal { V(kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, \ "The function_data field should be a BytecodeArray on interpreter entry") \ V(kInputStringTooLong, "Input string too long") \ + V(kInputDoesNotFitSmi, "Input number is too large to fit in a Smi") \ V(kInvalidBytecode, "Invalid bytecode") \ V(kInvalidBytecodeAdvance, "Cannot advance current bytecode, ") \ V(kInvalidHandleScopeLevel, "Invalid HandleScope level") \ diff --git a/deps/v8/src/codegen/code-stub-assembler.cc b/deps/v8/src/codegen/code-stub-assembler.cc index 65010070276a4d..a069126f94daff 100644 --- a/deps/v8/src/codegen/code-stub-assembler.cc +++ b/deps/v8/src/codegen/code-stub-assembler.cc @@ -836,6 +836,15 @@ TNode<IntPtrT> CodeStubAssembler::TryIntPtrSub(TNode<IntPtrT> a, return Projection<0>(pair); } +TNode<IntPtrT> CodeStubAssembler::TryIntPtrMul(TNode<IntPtrT> a, + TNode<IntPtrT> b, + Label* if_overflow) { + TNode<PairT<IntPtrT, BoolT>> pair = IntPtrMulWithOverflow(a, b); + TNode<BoolT> overflow = Projection<1>(pair); + GotoIf(overflow, if_overflow); + return Projection<0>(pair); +} + TNode<Int32T> CodeStubAssembler::TryInt32Mul(TNode<Int32T> a, TNode<Int32T> b, Label* if_overflow) { TNode<PairT<Int32T, BoolT>> pair = Int32MulWithOverflow(a, b); @@ -1263,6 +1272,7 @@ TNode<HeapObject> CodeStubAssembler::AllocateRaw(TNode<IntPtrT> size_in_bytes, BIND(&next); } + adjusted_size = AlignToAllocationAlignment(adjusted_size.value()); TNode<IntPtrT> new_top = IntPtrAdd(UncheckedCast<IntPtrT>(top), adjusted_size.value()); @@ -1542,6 +1552,33 @@ TNode<RawPtrT> CodeStubAssembler::EmptyBackingStoreBufferConstant() { #endif // V8_ENABLE_SANDBOX } +TNode<UintPtrT> CodeStubAssembler::LoadBoundedSizeFromObject( + TNode<HeapObject> object, TNode<IntPtrT> field_offset) { +#ifdef V8_ENABLE_SANDBOX + TNode<Uint64T> raw_value = LoadObjectField<Uint64T>(object, field_offset); + TNode<Uint64T> shift_amount = Uint64Constant(kBoundedSizeShift); + TNode<Uint64T> decoded_value = Word64Shr(raw_value, shift_amount); + return ReinterpretCast<UintPtrT>(decoded_value); +#else + return LoadObjectField<UintPtrT>(object, field_offset); +#endif // V8_ENABLE_SANDBOX +} + +void CodeStubAssembler::StoreBoundedSizeToObject(TNode<HeapObject> object, + TNode<IntPtrT> offset, + TNode<UintPtrT> value) { +#ifdef V8_ENABLE_SANDBOX + CSA_DCHECK(this, UintPtrLessThan( + value, IntPtrConstant(kMaxSafeBufferSizeForSandbox))); + TNode<Uint64T> raw_value = ReinterpretCast<Uint64T>(value); + TNode<Uint64T> shift_amount = Uint64Constant(kBoundedSizeShift); + TNode<Uint64T> encoded_value = Word64Shl(raw_value, shift_amount); + StoreObjectFieldNoWriteBarrier<Uint64T>(object, offset, encoded_value); +#else + StoreObjectFieldNoWriteBarrier<UintPtrT>(object, offset, value); +#endif // V8_ENABLE_SANDBOX +} + #ifdef V8_ENABLE_SANDBOX TNode<RawPtrT> CodeStubAssembler::ExternalPointerTableAddress( ExternalPointerTag tag) { @@ -1996,7 +2033,7 @@ TNode<IntPtrT> CodeStubAssembler::LoadJSReceiverIdentityHash( } TNode<Uint32T> CodeStubAssembler::LoadNameHashAssumeComputed(TNode<Name> name) { - TNode<Uint32T> hash_field = LoadNameRawHashField(name); + TNode<Uint32T> hash_field = LoadNameRawHash(name); CSA_DCHECK(this, IsClearWord32(hash_field, Name::kHashNotComputedMask)); return DecodeWord32<Name::HashBits>(hash_field); } @@ -2011,6 +2048,42 @@ TNode<Uint32T> CodeStubAssembler::LoadNameHash(TNode<Name> name, return DecodeWord32<Name::HashBits>(raw_hash_field); } +TNode<Uint32T> CodeStubAssembler::LoadNameRawHash(TNode<Name> name) { + TVARIABLE(Uint32T, var_raw_hash); + + Label if_forwarding_index(this, Label::kDeferred), done(this); + + TNode<Uint32T> raw_hash_field = LoadNameRawHashField(name); + GotoIf(IsSetWord32(raw_hash_field, Name::kHashNotComputedMask), + &if_forwarding_index); + + var_raw_hash = raw_hash_field; + Goto(&done); + + BIND(&if_forwarding_index); + { + CSA_DCHECK(this, + IsEqualInWord32<Name::HashFieldTypeBits>( + raw_hash_field, Name::HashFieldType::kForwardingIndex)); + TNode<ExternalReference> function = + ExternalConstant(ExternalReference::raw_hash_from_forward_table()); + const TNode<ExternalReference> isolate_ptr = + ExternalConstant(ExternalReference::isolate_address(isolate())); + TNode<Uint32T> result = UncheckedCast<Uint32T>(CallCFunction( + function, MachineType::Uint32(), + std::make_pair(MachineType::Pointer(), isolate_ptr), + std::make_pair( + MachineType::Int32(), + DecodeWord32<Name::ForwardingIndexValueBits>(raw_hash_field)))); + + var_raw_hash = result; + Goto(&done); + } + + BIND(&done); + return var_raw_hash.value(); +} + TNode<Smi> CodeStubAssembler::LoadStringLengthAsSmi(TNode<String> string) { return SmiFromIntPtr(LoadStringLengthAsWord(string)); } @@ -3975,7 +4048,7 @@ TNode<JSArray> CodeStubAssembler::AllocateJSArray( int base_size = array_header_size; if (allocation_site) { DCHECK(V8_ALLOCATION_SITE_TRACKING_BOOL); - base_size += AllocationMemento::kSize; + base_size += ALIGN_TO_ALLOCATION_ALIGNMENT(AllocationMemento::kSize); } TNode<IntPtrT> size = IntPtrConstant(base_size); @@ -3999,6 +4072,32 @@ TNode<FixedArrayBase> InnerAllocateElements(CodeStubAssembler* csa, } // namespace +TNode<IntPtrT> CodeStubAssembler::AlignToAllocationAlignment( + TNode<IntPtrT> value) { + if (!V8_COMPRESS_POINTERS_8GB_BOOL) return value; + + Label not_aligned(this), is_aligned(this); + TVARIABLE(IntPtrT, result, value); + + Branch(WordIsAligned(value, kObjectAlignment8GbHeap), &is_aligned, + ¬_aligned); + + BIND(¬_aligned); + { + if (kObjectAlignment8GbHeap == 2 * kTaggedSize) { + result = IntPtrAdd(value, IntPtrConstant(kTaggedSize)); + } else { + result = + WordAnd(IntPtrAdd(value, IntPtrConstant(kObjectAlignment8GbHeapMask)), + IntPtrConstant(~kObjectAlignment8GbHeapMask)); + } + Goto(&is_aligned); + } + + BIND(&is_aligned); + return result.value(); +} + std::pair<TNode<JSArray>, TNode<FixedArrayBase>> CodeStubAssembler::AllocateUninitializedJSArrayWithElements( ElementsKind kind, TNode<Map> array_map, TNode<Smi> length, @@ -4039,17 +4138,18 @@ CodeStubAssembler::AllocateUninitializedJSArrayWithElements( BIND(&nonempty); { - int base_size = array_header_size; + int base_size = ALIGN_TO_ALLOCATION_ALIGNMENT(array_header_size); if (allocation_site) { DCHECK(V8_ALLOCATION_SITE_TRACKING_BOOL); - base_size += AllocationMemento::kSize; + base_size += ALIGN_TO_ALLOCATION_ALIGNMENT(AllocationMemento::kSize); } const int elements_offset = base_size; // Compute space for elements base_size += FixedArray::kHeaderSize; - TNode<IntPtrT> size = ElementOffsetFromIndex(capacity, kind, base_size); + TNode<IntPtrT> size = AlignToAllocationAlignment( + ElementOffsetFromIndex(capacity, kind, base_size)); // For very large arrays in which the requested allocation exceeds the // maximal size of a regular heap object, we cannot use the allocation @@ -4125,8 +4225,10 @@ TNode<JSArray> CodeStubAssembler::AllocateUninitializedJSArray( if (allocation_site) { DCHECK(V8_ALLOCATION_SITE_TRACKING_BOOL); - InitializeAllocationMemento(array, IntPtrConstant(JSArray::kHeaderSize), - *allocation_site); + InitializeAllocationMemento( + array, + IntPtrConstant(ALIGN_TO_ALLOCATION_ALIGNMENT(JSArray::kHeaderSize)), + *allocation_site); } return CAST(array); @@ -6760,6 +6862,9 @@ TNode<BoolT> CodeStubAssembler::IsUniqueName(TNode<HeapObject> object) { // characters, or is outside MAX_SAFE_INTEGER/size_t range). Note that for // non-TypedArray receivers, there are additional strings that must be treated // as named property keys, namely the range [0xFFFFFFFF, MAX_SAFE_INTEGER]. +// TODO(pthier): Handle forwarding indices correctly. The forwarded hash could +// be an integer index. Consider using 1 bit of the forward index to indicate +// the type of the forwarded hash. TNode<BoolT> CodeStubAssembler::IsUniqueNameNoIndex(TNode<HeapObject> object) { TNode<Uint16T> instance_type = LoadInstanceType(object); return Select<BoolT>( @@ -6784,7 +6889,7 @@ TNode<BoolT> CodeStubAssembler::IsUniqueNameNoCachedIndex( return Select<BoolT>( IsInternalizedStringInstanceType(instance_type), [=] { - return IsSetWord32(LoadNameRawHashField(CAST(object)), + return IsSetWord32(LoadNameRawHash(CAST(object)), Name::kDoesNotContainCachedArrayIndexMask); }, [=] { return IsSymbolInstanceType(instance_type); }); @@ -6799,6 +6904,26 @@ TNode<BoolT> CodeStubAssembler::IsBigInt(TNode<HeapObject> object) { return IsBigIntInstanceType(LoadInstanceType(object)); } +void CodeStubAssembler::GotoIfLargeBigInt(TNode<BigInt> bigint, + Label* true_label) { + // Small BigInts are BigInts in the range [-2^63 + 1, 2^63 - 1] so that they + // can fit in 64-bit registers. Excluding -2^63 from the range makes the check + // simpler and faster. The other BigInts are seen as "large". + // TODO(panq): We might need to reevaluate of the range of small BigInts. + DCHECK(Is64()); + Label false_label(this); + TNode<Uint32T> length = + DecodeWord32<BigIntBase::LengthBits>(LoadBigIntBitfield(bigint)); + GotoIf(Word32Equal(length, Uint32Constant(0)), &false_label); + GotoIfNot(Word32Equal(length, Uint32Constant(1)), true_label); + Branch(WordEqual(UintPtrConstant(0), + WordAnd(LoadBigIntDigit(bigint, 0), + UintPtrConstant(static_cast<uintptr_t>( + 1ULL << (sizeof(uintptr_t) * 8 - 1))))), + &false_label, true_label); + Bind(&false_label); +} + TNode<BoolT> CodeStubAssembler::IsPrimitiveInstanceType( TNode<Int32T> instance_type) { return Int32LessThanOrEqual(instance_type, @@ -8555,14 +8680,18 @@ void CodeStubAssembler::NameDictionaryLookup( std::is_same<Dictionary, GlobalDictionary>::value || std::is_same<Dictionary, NameToIndexHashTable>::value, "Unexpected NameDictionary"); - DCHECK_EQ(MachineType::PointerRepresentation(), var_name_index->rep()); + DCHECK_IMPLIES(var_name_index != nullptr, + MachineType::PointerRepresentation() == var_name_index->rep()); DCHECK_IMPLIES(mode == kFindInsertionIndex, if_found == nullptr); Comment("NameDictionaryLookup"); CSA_DCHECK(this, IsUniqueName(unique_name)); + Label if_not_computed(this, Label::kDeferred); + TNode<IntPtrT> capacity = SmiUntag(GetCapacity<Dictionary>(dictionary)); TNode<IntPtrT> mask = IntPtrSub(capacity, IntPtrConstant(1)); - TNode<UintPtrT> hash = ChangeUint32ToWord(LoadNameHash(unique_name)); + TNode<UintPtrT> hash = + ChangeUint32ToWord(LoadNameHash(unique_name, &if_not_computed)); // See Dictionary::FirstProbe(). TNode<IntPtrT> count = IntPtrConstant(0); @@ -8570,11 +8699,13 @@ void CodeStubAssembler::NameDictionaryLookup( TNode<Oddball> undefined = UndefinedConstant(); // Appease the variable merging algorithm for "Goto(&loop)" below. - *var_name_index = IntPtrConstant(0); + if (var_name_index) *var_name_index = IntPtrConstant(0); TVARIABLE(IntPtrT, var_count, count); TVARIABLE(IntPtrT, var_entry, initial_entry); - Label loop(this, {&var_count, &var_entry, var_name_index}); + VariableList loop_vars({&var_count, &var_entry}, zone()); + if (var_name_index) loop_vars.push_back(var_name_index); + Label loop(this, loop_vars); Goto(&loop); BIND(&loop); { @@ -8582,7 +8713,7 @@ void CodeStubAssembler::NameDictionaryLookup( TNode<IntPtrT> entry = var_entry.value(); TNode<IntPtrT> index = EntryToIndex<Dictionary>(entry); - *var_name_index = index; + if (var_name_index) *var_name_index = index; TNode<HeapObject> current = CAST(UnsafeLoadFixedArrayElement(dictionary, index)); @@ -8607,6 +8738,52 @@ void CodeStubAssembler::NameDictionaryLookup( var_entry = entry; Goto(&loop); } + + BIND(&if_not_computed); + { + // Strings will only have the forwarding index with experimental shared + // memory features turned on. To minimize affecting the fast path, the + // forwarding index branch defers both fetching the actual hash value and + // the dictionary lookup to the runtime. + ExternalReference func_ref; + if constexpr (std::is_same<Dictionary, NameDictionary>::value) { + func_ref = + mode == kFindExisting + ? ExternalReference::name_dictionary_lookup_forwarded_string() + : ExternalReference:: + name_dictionary_find_insertion_entry_forwarded_string(); + } else if constexpr (std::is_same<Dictionary, GlobalDictionary>::value) { + func_ref = + mode == kFindExisting + ? ExternalReference::global_dictionary_lookup_forwarded_string() + : ExternalReference:: + global_dictionary_find_insertion_entry_forwarded_string(); + } else { + func_ref = + mode == kFindExisting + ? ExternalReference:: + name_to_index_hashtable_lookup_forwarded_string() + : ExternalReference:: + name_to_index_hashtable_find_insertion_entry_forwarded_string(); + } + const TNode<ExternalReference> function = ExternalConstant(func_ref); + const TNode<ExternalReference> isolate_ptr = + ExternalConstant(ExternalReference::isolate_address(isolate())); + TNode<IntPtrT> entry = UncheckedCast<IntPtrT>(CallCFunction( + function, MachineType::IntPtr(), + std::make_pair(MachineType::Pointer(), isolate_ptr), + std::make_pair(MachineType::TaggedPointer(), dictionary), + std::make_pair(MachineType::TaggedPointer(), unique_name))); + if (var_name_index) *var_name_index = EntryToIndex<Dictionary>(entry); + if (mode == kFindExisting) { + GotoIf(IntPtrEqual(entry, + IntPtrConstant(InternalIndex::NotFound().raw_value())), + if_not_found); + Goto(if_found); + } else { + Goto(if_not_found); + } + } } // Instantiate template methods to workaround GCC compilation issue. @@ -11570,7 +11747,8 @@ void CodeStubAssembler::TrapAllocationMemento(TNode<JSObject> object, TNode<ExternalReference> new_space_top_address = ExternalConstant( ExternalReference::new_space_allocation_top_address(isolate())); - const int kMementoMapOffset = JSArray::kHeaderSize; + const int kMementoMapOffset = + ALIGN_TO_ALLOCATION_ALIGNMENT(JSArray::kHeaderSize); const int kMementoLastWordOffset = kMementoMapOffset + AllocationMemento::kSize - kTaggedSize; @@ -13730,7 +13908,7 @@ TNode<HeapObject> CodeStubAssembler::GetSuperConstructor( return LoadMapPrototype(map); } -void CodeStubAssembler::FindNonDefaultConstructor( +void CodeStubAssembler::FindNonDefaultConstructorOrConstruct( TNode<Context> context, TNode<JSFunction> this_function, TVariable<Object>& constructor, Label* found_default_base_ctor, Label* found_something_else) { @@ -13758,10 +13936,16 @@ void CodeStubAssembler::FindNonDefaultConstructor( GotoIfNot(IsJSFunction(CAST(constructor.value())), found_something_else); // If there are class fields, bail out. TODO(v8:13091): Handle them here. - TNode<Oddball> has_class_fields = - HasProperty(context, constructor.value(), ClassFieldsSymbolConstant(), - kHasProperty); - GotoIf(IsTrue(has_class_fields), found_something_else); + const TNode<SharedFunctionInfo> shared_function_info = + LoadObjectField<SharedFunctionInfo>( + CAST(constructor.value()), JSFunction::kSharedFunctionInfoOffset); + const TNode<Uint32T> has_class_fields = + DecodeWord32<SharedFunctionInfo::RequiresInstanceMembersInitializerBit>( + LoadObjectField<Uint32T>(shared_function_info, + SharedFunctionInfo::kFlagsOffset)); + + GotoIf(Word32NotEqual(has_class_fields, Int32Constant(0)), + found_something_else); // If there are private methods, bail out. TODO(v8:13091): Handle them here. TNode<Context> function_context = @@ -14145,7 +14329,8 @@ TNode<JSObject> CodeStubAssembler::AllocateJSIteratorResultForEntry( StoreFixedArrayElement(elements, 1, value); TNode<Map> array_map = CAST(LoadContextElement( native_context, Context::JS_ARRAY_PACKED_ELEMENTS_MAP_INDEX)); - TNode<HeapObject> array = Allocate(JSArray::kHeaderSize); + TNode<HeapObject> array = + Allocate(ALIGN_TO_ALLOCATION_ALIGNMENT(JSArray::kHeaderSize)); StoreMapNoWriteBarrier(array, array_map); StoreObjectFieldRoot(array, JSArray::kPropertiesOrHashOffset, RootIndex::kEmptyFixedArray); @@ -14190,6 +14375,18 @@ void CodeStubAssembler::ThrowIfArrayBufferViewBufferIsDetached( ThrowIfArrayBufferIsDetached(context, buffer, method_name); } +TNode<UintPtrT> CodeStubAssembler::LoadJSArrayBufferByteLength( + TNode<JSArrayBuffer> array_buffer) { + return LoadBoundedSizeFromObject(array_buffer, + JSArrayBuffer::kRawByteLengthOffset); +} + +TNode<UintPtrT> CodeStubAssembler::LoadJSArrayBufferMaxByteLength( + TNode<JSArrayBuffer> array_buffer) { + return LoadBoundedSizeFromObject(array_buffer, + JSArrayBuffer::kRawMaxByteLengthOffset); +} + TNode<RawPtrT> CodeStubAssembler::LoadJSArrayBufferBackingStorePtr( TNode<JSArrayBuffer> array_buffer) { return LoadSandboxedPointerFromObject(array_buffer, @@ -14202,16 +14399,38 @@ TNode<JSArrayBuffer> CodeStubAssembler::LoadJSArrayBufferViewBuffer( JSArrayBufferView::kBufferOffset); } -TNode<UintPtrT> CodeStubAssembler::LoadJSArrayBufferViewRawByteLength( +TNode<UintPtrT> CodeStubAssembler::LoadJSArrayBufferViewByteLength( TNode<JSArrayBufferView> array_buffer_view) { - return LoadObjectField<UintPtrT>(array_buffer_view, - JSArrayBufferView::kByteLengthOffset); + return LoadBoundedSizeFromObject(array_buffer_view, + JSArrayBufferView::kRawByteLengthOffset); +} + +void CodeStubAssembler::StoreJSArrayBufferViewByteLength( + TNode<JSArrayBufferView> array_buffer_view, TNode<UintPtrT> value) { + StoreBoundedSizeToObject(array_buffer_view, + JSArrayBufferView::kRawByteLengthOffset, value); } TNode<UintPtrT> CodeStubAssembler::LoadJSArrayBufferViewByteOffset( TNode<JSArrayBufferView> array_buffer_view) { - return LoadObjectField<UintPtrT>(array_buffer_view, - JSArrayBufferView::kByteOffsetOffset); + return LoadBoundedSizeFromObject(array_buffer_view, + JSArrayBufferView::kRawByteOffsetOffset); +} + +void CodeStubAssembler::StoreJSArrayBufferViewByteOffset( + TNode<JSArrayBufferView> array_buffer_view, TNode<UintPtrT> value) { + StoreBoundedSizeToObject(array_buffer_view, + JSArrayBufferView::kRawByteOffsetOffset, value); +} + +TNode<UintPtrT> CodeStubAssembler::LoadJSTypedArrayLength( + TNode<JSTypedArray> typed_array) { + return LoadBoundedSizeFromObject(typed_array, JSTypedArray::kRawLengthOffset); +} + +void CodeStubAssembler::StoreJSTypedArrayLength(TNode<JSTypedArray> typed_array, + TNode<UintPtrT> value) { + StoreBoundedSizeToObject(typed_array, JSTypedArray::kRawLengthOffset, value); } TNode<UintPtrT> CodeStubAssembler::LoadJSTypedArrayLengthAndCheckDetached( @@ -14309,7 +14528,7 @@ CodeStubAssembler::LoadVariableLengthJSArrayBufferViewByteLength( // Check if the backing RAB has shrunk so that the buffer is out of // bounds. TNode<UintPtrT> array_byte_length = - LoadJSArrayBufferViewRawByteLength(array); + LoadJSArrayBufferViewByteLength(array); GotoIfNot(UintPtrGreaterThanOrEqual( buffer_byte_length, UintPtrAdd(array_byte_offset, array_byte_length)), @@ -14354,7 +14573,7 @@ void CodeStubAssembler::IsJSArrayBufferViewDetachedOrOutOfBounds( // Check if the backing RAB has shrunk so that the buffer is out of // bounds. TNode<UintPtrT> array_byte_length = - LoadJSArrayBufferViewRawByteLength(array_buffer_view); + LoadJSArrayBufferViewByteLength(array_buffer_view); Branch(UintPtrGreaterThanOrEqual( buffer_byte_length, UintPtrAdd(array_byte_offset, array_byte_length)), diff --git a/deps/v8/src/codegen/code-stub-assembler.h b/deps/v8/src/codegen/code-stub-assembler.h index b53b8465baa79f..6d654b5d5fa3f9 100644 --- a/deps/v8/src/codegen/code-stub-assembler.h +++ b/deps/v8/src/codegen/code-stub-assembler.h @@ -71,9 +71,9 @@ enum class PrimitiveType { kBoolean, kNumber, kString, kSymbol }; V(AsyncGeneratorReturnResolveSharedFun, \ async_generator_return_resolve_shared_fun, \ AsyncGeneratorReturnResolveSharedFun) \ - V(AsyncGeneratorYieldResolveSharedFun, \ - async_generator_yield_resolve_shared_fun, \ - AsyncGeneratorYieldResolveSharedFun) \ + V(AsyncGeneratorYieldWithAwaitResolveSharedFun, \ + async_generator_yield_with_await_resolve_shared_fun, \ + AsyncGeneratorYieldWithAwaitResolveSharedFun) \ V(AsyncIteratorValueUnwrapSharedFun, async_iterator_value_unwrap_shared_fun, \ AsyncIteratorValueUnwrapSharedFun) \ V(IsConcatSpreadableProtector, is_concat_spreadable_protector, \ @@ -628,6 +628,8 @@ class V8_EXPORT_PRIVATE CodeStubAssembler Label* if_overflow); TNode<IntPtrT> TryIntPtrSub(TNode<IntPtrT> a, TNode<IntPtrT> b, Label* if_overflow); + TNode<IntPtrT> TryIntPtrMul(TNode<IntPtrT> a, TNode<IntPtrT> b, + Label* if_overflow); TNode<Int32T> TryInt32Mul(TNode<Int32T> a, TNode<Int32T> b, Label* if_overflow); TNode<Smi> TrySmiAdd(TNode<Smi> a, TNode<Smi> b, Label* if_overflow); @@ -772,11 +774,18 @@ class V8_EXPORT_PRIVATE CodeStubAssembler void GotoIfNumber(TNode<Object> value, Label* is_number); TNode<Number> SmiToNumber(TNode<Smi> v) { return v; } + // BigInt operations. + void GotoIfLargeBigInt(TNode<BigInt> bigint, Label* true_label); + TNode<Number> BitwiseOp(TNode<Word32T> left32, TNode<Word32T> right32, Operation bitwise_op); TNode<Number> BitwiseSmiOp(TNode<Smi> left32, TNode<Smi> right32, Operation bitwise_op); + // Align the value to kObjectAlignment8GbHeap if V8_COMPRESS_POINTERS_8GB is + // defined. + TNode<IntPtrT> AlignToAllocationAlignment(TNode<IntPtrT> value); + // Allocate an object of the given size. TNode<HeapObject> AllocateInNewSpace( TNode<IntPtrT> size, AllocationFlags flags = AllocationFlag::kNone); @@ -1078,10 +1087,10 @@ class V8_EXPORT_PRIVATE CodeStubAssembler void GotoIfForceSlowPath(Label* if_true); // - // Caged pointer related functionality. + // Sandboxed pointer related functionality. // - // Load a caged pointer value from an object. + // Load a sandboxed pointer value from an object. TNode<RawPtrT> LoadSandboxedPointerFromObject(TNode<HeapObject> object, int offset) { return LoadSandboxedPointerFromObject(object, IntPtrConstant(offset)); @@ -1090,7 +1099,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler TNode<RawPtrT> LoadSandboxedPointerFromObject(TNode<HeapObject> object, TNode<IntPtrT> offset); - // Stored a caged pointer value to an object. + // Stored a sandboxed pointer value to an object. void StoreSandboxedPointerToObject(TNode<HeapObject> object, int offset, TNode<RawPtrT> pointer) { StoreSandboxedPointerToObject(object, IntPtrConstant(offset), pointer); @@ -1102,6 +1111,27 @@ class V8_EXPORT_PRIVATE CodeStubAssembler TNode<RawPtrT> EmptyBackingStoreBufferConstant(); + // + // Bounded size related functionality. + // + + // Load a bounded size value from an object. + TNode<UintPtrT> LoadBoundedSizeFromObject(TNode<HeapObject> object, + int offset) { + return LoadBoundedSizeFromObject(object, IntPtrConstant(offset)); + } + + TNode<UintPtrT> LoadBoundedSizeFromObject(TNode<HeapObject> object, + TNode<IntPtrT> offset); + + // Stored a bounded size value to an object. + void StoreBoundedSizeToObject(TNode<HeapObject> object, int offset, + TNode<UintPtrT> value) { + StoreBoundedSizeToObject(object, IntPtrConstant(offset), value); + } + + void StoreBoundedSizeToObject(TNode<HeapObject> object, TNode<IntPtrT> offset, + TNode<UintPtrT> value); // // ExternalPointerT-related functionality. // @@ -1451,6 +1481,10 @@ class V8_EXPORT_PRIVATE CodeStubAssembler Label* if_hash_not_computed = nullptr); TNode<Uint32T> LoadNameHashAssumeComputed(TNode<Name> name); + // Load the Name::RawHash() value of a name as an uint32 value. Follows + // through the forwarding table. + TNode<Uint32T> LoadNameRawHash(TNode<Name> name); + // Load length field of a String object as Smi value. TNode<Smi> LoadStringLengthAsSmi(TNode<String> string); // Load length field of a String object as intptr_t value. @@ -2047,11 +2081,11 @@ class V8_EXPORT_PRIVATE CodeStubAssembler Label* if_bailout); TNode<Object> GetConstructor(TNode<Map> map); - void FindNonDefaultConstructor(TNode<Context> context, - TNode<JSFunction> this_function, - TVariable<Object>& constructor, - Label* found_default_base_ctor, - Label* found_something_else); + void FindNonDefaultConstructorOrConstruct(TNode<Context> context, + TNode<JSFunction> this_function, + TVariable<Object>& constructor, + Label* found_default_base_ctor, + Label* found_something_else); TNode<Map> GetInstanceTypeMap(InstanceType instance_type); @@ -2720,6 +2754,12 @@ class V8_EXPORT_PRIVATE CodeStubAssembler ExternalReference::address_of_shared_string_table_flag()); } + TNode<BoolT> HasHarmonySymbolAsWeakmapKeyFlag() { + return LoadRuntimeFlag( + ExternalReference:: + address_of_FLAG_harmony_symbol_as_weakmap_key()); + } + // True iff |object| is a Smi or a HeapNumber or a BigInt. TNode<BoolT> IsNumeric(TNode<Object> object); @@ -3738,6 +3778,10 @@ class V8_EXPORT_PRIVATE CodeStubAssembler TNode<BoolT> IsSideEffectFreeDebuggingActive(); // JSArrayBuffer helpers + TNode<UintPtrT> LoadJSArrayBufferByteLength( + TNode<JSArrayBuffer> array_buffer); + TNode<UintPtrT> LoadJSArrayBufferMaxByteLength( + TNode<JSArrayBuffer> array_buffer); TNode<RawPtrT> LoadJSArrayBufferBackingStorePtr( TNode<JSArrayBuffer> array_buffer); void ThrowIfArrayBufferIsDetached(TNode<Context> context, @@ -3747,16 +3791,22 @@ class V8_EXPORT_PRIVATE CodeStubAssembler // JSArrayBufferView helpers TNode<JSArrayBuffer> LoadJSArrayBufferViewBuffer( TNode<JSArrayBufferView> array_buffer_view); - TNode<UintPtrT> LoadJSArrayBufferViewRawByteLength( + TNode<UintPtrT> LoadJSArrayBufferViewByteLength( TNode<JSArrayBufferView> array_buffer_view); - + void StoreJSArrayBufferViewByteLength( + TNode<JSArrayBufferView> array_buffer_view, TNode<UintPtrT> value); TNode<UintPtrT> LoadJSArrayBufferViewByteOffset( TNode<JSArrayBufferView> array_buffer_view); + void StoreJSArrayBufferViewByteOffset( + TNode<JSArrayBufferView> array_buffer_view, TNode<UintPtrT> value); void ThrowIfArrayBufferViewBufferIsDetached( TNode<Context> context, TNode<JSArrayBufferView> array_buffer_view, const char* method_name); // JSTypedArray helpers + TNode<UintPtrT> LoadJSTypedArrayLength(TNode<JSTypedArray> typed_array); + void StoreJSTypedArrayLength(TNode<JSTypedArray> typed_array, + TNode<UintPtrT> value); TNode<UintPtrT> LoadJSTypedArrayLengthAndCheckDetached( TNode<JSTypedArray> typed_array, Label* detached); // Helper for length tracking JSTypedArrays and JSTypedArrays backed by diff --git a/deps/v8/src/codegen/compiler.cc b/deps/v8/src/codegen/compiler.cc index a0636afc787c30..5431deb83e743d 100644 --- a/deps/v8/src/codegen/compiler.cc +++ b/deps/v8/src/codegen/compiler.cc @@ -176,13 +176,14 @@ class CompilerTracer : public AllStatic { function->DebugNameCStr().get(), osr_offset.ToInt(), ToString(mode)); } - static void TraceCompilationStats(Isolate* isolate, - OptimizedCompilationInfo* info, - double ms_creategraph, double ms_optimize, - double ms_codegen) { + static void TraceFinishTurbofanCompile(Isolate* isolate, + OptimizedCompilationInfo* info, + double ms_creategraph, + double ms_optimize, + double ms_codegen) { if (!v8_flags.trace_opt || !info->IsOptimizing()) return; CodeTracer::Scope scope(isolate->GetCodeTracer()); - PrintTracePrefix(scope, "optimizing", info); + PrintTracePrefix(scope, "completed compiling", info); if (info->is_osr()) PrintF(scope.file(), " OSR"); PrintF(scope.file(), " - took %0.3f, %0.3f, %0.3f ms", ms_creategraph, ms_optimize, ms_codegen); @@ -194,7 +195,7 @@ class CompilerTracer : public AllStatic { double ms_timetaken) { if (!v8_flags.trace_baseline) return; CodeTracer::Scope scope(isolate->GetCodeTracer()); - PrintTracePrefix(scope, "compiling", shared, CodeKind::BASELINE); + PrintTracePrefix(scope, "completed compiling", shared, CodeKind::BASELINE); PrintF(scope.file(), " - took %0.3f ms", ms_timetaken); PrintTraceSuffix(scope); } @@ -525,7 +526,7 @@ void TurbofanCompilationJob::RecordCompilationStats(ConcurrencyMode mode, double ms_creategraph = time_taken_to_prepare_.InMillisecondsF(); double ms_optimize = time_taken_to_execute_.InMillisecondsF(); double ms_codegen = time_taken_to_finalize_.InMillisecondsF(); - CompilerTracer::TraceCompilationStats( + CompilerTracer::TraceFinishTurbofanCompile( isolate, compilation_info(), ms_creategraph, ms_optimize, ms_codegen); if (v8_flags.trace_opt_stats) { static double compilation_time = 0.0; @@ -1074,8 +1075,9 @@ bool CompileTurbofan_Concurrent(Isolate* isolate, TimerEventScope<TimerEventRecompileSynchronous> timer(isolate); RCS_SCOPE(isolate, RuntimeCallCounterId::kOptimizeConcurrentPrepare); - TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"), - "V8.OptimizeConcurrentPrepare"); + TRACE_EVENT_WITH_FLOW0(TRACE_DISABLED_BY_DEFAULT("v8.compile"), + "V8.OptimizeConcurrentPrepare", job.get(), + TRACE_EVENT_FLAG_FLOW_OUT); if (!PrepareJobWithHandleScope(job.get(), isolate, compilation_info, ConcurrencyMode::kConcurrent)) { @@ -1209,6 +1211,10 @@ MaybeHandle<CodeT> CompileMaglev(Isolate* isolate, Handle<JSFunction> function, auto job = maglev::MaglevCompilationJob::New(isolate, function); { + TRACE_EVENT_WITH_FLOW0( + TRACE_DISABLED_BY_DEFAULT("v8.compile"), + IsSynchronous(mode) ? "V8.MaglevPrepare" : "V8.MaglevConcurrentPrepare", + job.get(), TRACE_EVENT_FLAG_FLOW_OUT); CompilerTracer::TraceStartMaglevCompile(isolate, function, mode); CompilationJob::Status status = job->PrepareJob(isolate); CHECK_EQ(status, CompilationJob::SUCCEEDED); // TODO(v8:7700): Use status. @@ -1959,13 +1965,18 @@ void BackgroundMergeTask::SetUpOnMainThread(Isolate* isolate, Handle<String> source_text, const ScriptDetails& script_details, LanguageMode language_mode) { + DCHECK_EQ(state_, kNotStarted); + HandleScope handle_scope(isolate); CompilationCacheScript::LookupResult lookup_result = isolate->compilation_cache()->LookupScript(source_text, script_details, language_mode); Handle<Script> script; - if (!lookup_result.script().ToHandle(&script)) return; + if (!lookup_result.script().ToHandle(&script)) { + state_ = kDone; + return; + } // Any data sent to the background thread will need to be a persistent handle. persistent_handles_ = std::make_unique<PersistentHandles>(isolate); @@ -1976,15 +1987,19 @@ void BackgroundMergeTask::SetUpOnMainThread(Isolate* isolate, // from the cache, assuming the top-level SFI is still compiled by then. // Thus, there is no need to keep the Script pointer for background merging. // Do nothing in this case. + state_ = kDone; } else { DCHECK(lookup_result.toplevel_sfi().is_null()); // A background merge is required. + state_ = kPendingBackgroundWork; cached_script_ = persistent_handles_->NewHandle(*script); } } void BackgroundMergeTask::BeginMergeInBackground(LocalIsolate* isolate, Handle<Script> new_script) { + DCHECK_EQ(state_, kPendingBackgroundWork); + LocalHeap* local_heap = isolate->heap(); local_heap->AttachPersistentHandles(std::move(persistent_handles_)); LocalHandleScope handle_scope(local_heap); @@ -2057,10 +2072,14 @@ void BackgroundMergeTask::BeginMergeInBackground(LocalIsolate* isolate, if (forwarder.HasAnythingToForward()) { forwarder.IterateAndForwardPointers(); } + + state_ = kPendingForegroundWork; } Handle<SharedFunctionInfo> BackgroundMergeTask::CompleteMergeInForeground( Isolate* isolate, Handle<Script> new_script) { + DCHECK_EQ(state_, kPendingForegroundWork); + HandleScope handle_scope(isolate); ConstantPoolPointerForwarder forwarder(isolate, isolate->main_thread_local_heap()); @@ -2119,10 +2138,7 @@ Handle<SharedFunctionInfo> BackgroundMergeTask::CompleteMergeInForeground( SharedFunctionInfo::cast(maybe_toplevel_sfi.GetHeapObjectAssumeWeak()), isolate); - // Abandon the persistent handles from the background thread, so that - // future calls to HasPendingForegroundWork return false. - used_new_sfis_.clear(); - new_compiled_data_for_cached_sfis_.clear(); + state_ = kDone; return handle_scope.CloseAndEscape(result); } @@ -2303,14 +2319,14 @@ void BackgroundCompileTask::SourceTextAvailable( bool BackgroundDeserializeTask::ShouldMergeWithExistingScript() const { DCHECK(v8_flags.merge_background_deserialized_script_with_compilation_cache); - return background_merge_task_.HasCachedScript() && + return background_merge_task_.HasPendingBackgroundWork() && off_thread_data_.HasResult(); } bool BackgroundCompileTask::ShouldMergeWithExistingScript() const { DCHECK(v8_flags.stress_background_compile); DCHECK(!script_.is_null()); - return background_merge_task_.HasCachedScript() && + return background_merge_task_.HasPendingBackgroundWork() && jobs_to_retry_finalization_on_main_thread_.empty(); } @@ -2676,31 +2692,6 @@ bool Compiler::CompileBaseline(Isolate* isolate, Handle<JSFunction> function, return true; } -// static -bool Compiler::CompileMaglev(Isolate* isolate, Handle<JSFunction> function, - ConcurrencyMode mode, - IsCompiledScope* is_compiled_scope) { -#ifdef V8_ENABLE_MAGLEV - // Bytecode must be available for maglev compilation. - DCHECK(is_compiled_scope->is_compiled()); - // TODO(v8:7700): Support concurrent compilation. - DCHECK(IsSynchronous(mode)); - - // Maglev code needs a feedback vector. - JSFunction::EnsureFeedbackVector(isolate, function, is_compiled_scope); - - MaybeHandle<CodeT> maybe_code = Maglev::Compile(isolate, function); - Handle<CodeT> code; - if (!maybe_code.ToHandle(&code)) return false; - - DCHECK_EQ(code->kind(), CodeKind::MAGLEV); - function->set_code(*code); - return true; -#else - return false; -#endif // V8_ENABLE_MAGLEV -} - // static MaybeHandle<SharedFunctionInfo> Compiler::CompileToplevel( ParseInfo* parse_info, Handle<Script> script, Isolate* isolate, @@ -2758,10 +2749,11 @@ void Compiler::CompileOptimized(Isolate* isolate, Handle<JSFunction> function, // static MaybeHandle<SharedFunctionInfo> Compiler::CompileForLiveEdit( - ParseInfo* parse_info, Handle<Script> script, Isolate* isolate) { + ParseInfo* parse_info, Handle<Script> script, + MaybeHandle<ScopeInfo> outer_scope_info, Isolate* isolate) { IsCompiledScope is_compiled_scope; - return Compiler::CompileToplevel(parse_info, script, isolate, - &is_compiled_scope); + return v8::internal::CompileToplevel(parse_info, script, outer_scope_info, + isolate, &is_compiled_scope); } // static @@ -3290,8 +3282,6 @@ MaybeHandle<SharedFunctionInfo> CompileScriptOnMainThread( if (!maybe_script.ToHandle(&script)) { script = NewScript(isolate, &parse_info, source, script_details, natives); } - DCHECK_IMPLIES(parse_info.flags().collect_type_profile(), - script->IsUserJavaScript()); DCHECK_EQ(parse_info.flags().is_repl_mode(), script->is_repl_mode()); return Compiler::CompileToplevel(&parse_info, script, isolate, @@ -3939,8 +3929,9 @@ void Compiler::FinalizeTurbofanCompilationJob(TurbofanCompilationJob* job, TimerEventScope<TimerEventRecompileSynchronous> timer(isolate); RCS_SCOPE(isolate, RuntimeCallCounterId::kOptimizeConcurrentFinalize); - TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"), - "V8.OptimizeConcurrentFinalize"); + TRACE_EVENT_WITH_FLOW0(TRACE_DISABLED_BY_DEFAULT("v8.compile"), + "V8.OptimizeConcurrentFinalize", job, + TRACE_EVENT_FLAG_FLOW_IN); Handle<JSFunction> function = compilation_info->closure(); Handle<SharedFunctionInfo> shared = compilation_info->shared_info(); @@ -4020,9 +4011,9 @@ void Compiler::FinalizeMaglevCompilationJob(maglev::MaglevCompilationJob* job, // Note the finalized Code object has already been installed on the // function by MaglevCompilationJob::FinalizeJobImpl. - const bool kIsContextSpecializing = false; OptimizedCodeCache::Insert(isolate, *function, BytecodeOffset::None(), - function->code(), kIsContextSpecializing); + function->code(), + job->specialize_to_function_context()); // Reset ticks just after installation since ticks accumulated in lower // tiers use a different (lower) budget than ticks collected in Maglev diff --git a/deps/v8/src/codegen/compiler.h b/deps/v8/src/codegen/compiler.h index f5f3c5beb842b8..7a0d18a4524a3e 100644 --- a/deps/v8/src/codegen/compiler.h +++ b/deps/v8/src/codegen/compiler.h @@ -89,10 +89,6 @@ class V8_EXPORT_PRIVATE Compiler : public AllStatic { ClearExceptionFlag flag, IsCompiledScope* is_compiled_scope); - static bool CompileMaglev(Isolate* isolate, Handle<JSFunction> function, - ConcurrencyMode mode, - IsCompiledScope* is_compiled_scope); - static void CompileOptimized(Isolate* isolate, Handle<JSFunction> function, ConcurrencyMode mode, CodeKind code_kind); @@ -105,7 +101,7 @@ class V8_EXPORT_PRIVATE Compiler : public AllStatic { V8_WARN_UNUSED_RESULT static MaybeHandle<SharedFunctionInfo> CompileForLiveEdit(ParseInfo* parse_info, Handle<Script> script, - Isolate* isolate); + MaybeHandle<ScopeInfo> outer_scope_info, Isolate* isolate); // Collect source positions for a function that has already been compiled to // bytecode, but for which source positions were not collected (e.g. because diff --git a/deps/v8/src/codegen/external-reference.cc b/deps/v8/src/codegen/external-reference.cc index 542be964001ddc..f0c0cd32bbf1dc 100644 --- a/deps/v8/src/codegen/external-reference.cc +++ b/deps/v8/src/codegen/external-reference.cc @@ -585,6 +585,11 @@ ExternalReference ExternalReference::address_of_log_or_trace_osr() { return ExternalReference(&v8_flags.log_or_trace_osr); } +ExternalReference +ExternalReference::address_of_FLAG_harmony_symbol_as_weakmap_key() { + return ExternalReference(&FLAG_harmony_symbol_as_weakmap_key); +} + ExternalReference ExternalReference::address_of_builtin_subclassing_flag() { return ExternalReference(&v8_flags.builtin_subclassing); } @@ -1013,6 +1018,50 @@ static uint32_t ComputeSeededIntegerHash(Isolate* isolate, int32_t key) { } FUNCTION_REFERENCE(compute_integer_hash, ComputeSeededIntegerHash) + +enum LookupMode { kFindExisting, kFindInsertionEntry }; +template <typename Dictionary, LookupMode mode> +static size_t NameDictionaryLookupForwardedString(Isolate* isolate, + Address raw_dict, + Address raw_key) { + // This function cannot allocate, but there is a HandleScope because it needs + // to pass Handle<Name> to the dictionary methods. + DisallowGarbageCollection no_gc; + HandleScope handle_scope(isolate); + + Handle<String> key(String::cast(Object(raw_key)), isolate); + // This function should only be used as the slow path for forwarded strings. + DCHECK(Name::IsForwardingIndex(key->raw_hash_field())); + + Dictionary dict = Dictionary::cast(Object(raw_dict)); + ReadOnlyRoots roots(isolate); + uint32_t hash = key->hash(); + InternalIndex entry = mode == kFindExisting + ? dict.FindEntry(isolate, roots, key, hash) + : dict.FindInsertionEntry(isolate, roots, hash); + return entry.raw_value(); +} + +FUNCTION_REFERENCE( + name_dictionary_lookup_forwarded_string, + (NameDictionaryLookupForwardedString<NameDictionary, kFindExisting>)) +FUNCTION_REFERENCE( + name_dictionary_find_insertion_entry_forwarded_string, + (NameDictionaryLookupForwardedString<NameDictionary, kFindInsertionEntry>)) +FUNCTION_REFERENCE( + global_dictionary_lookup_forwarded_string, + (NameDictionaryLookupForwardedString<GlobalDictionary, kFindExisting>)) +FUNCTION_REFERENCE(global_dictionary_find_insertion_entry_forwarded_string, + (NameDictionaryLookupForwardedString<GlobalDictionary, + kFindInsertionEntry>)) +FUNCTION_REFERENCE( + name_to_index_hashtable_lookup_forwarded_string, + (NameDictionaryLookupForwardedString<NameToIndexHashTable, kFindExisting>)) +FUNCTION_REFERENCE( + name_to_index_hashtable_find_insertion_entry_forwarded_string, + (NameDictionaryLookupForwardedString<NameToIndexHashTable, + kFindInsertionEntry>)) + FUNCTION_REFERENCE(copy_fast_number_jsarray_elements_to_typed_array, CopyFastNumberJSArrayElementsToTypedArray) FUNCTION_REFERENCE(copy_typed_array_elements_to_typed_array, @@ -1022,6 +1071,8 @@ FUNCTION_REFERENCE(try_string_to_index_or_lookup_existing, StringTable::TryStringToIndexOrLookupExisting) FUNCTION_REFERENCE(string_from_forward_table, StringForwardingTable::GetForwardStringAddress) +FUNCTION_REFERENCE(raw_hash_from_forward_table, + StringForwardingTable::GetRawHashStatic) FUNCTION_REFERENCE(string_to_array_index_function, String::ToArrayIndex) FUNCTION_REFERENCE(array_indexof_includes_smi_or_object, ArrayIndexOfIncludesSmiOrObject) diff --git a/deps/v8/src/codegen/external-reference.h b/deps/v8/src/codegen/external-reference.h index b6df8547f51d90..8e14a8b208e968 100644 --- a/deps/v8/src/codegen/external-reference.h +++ b/deps/v8/src/codegen/external-reference.h @@ -98,7 +98,9 @@ class StatsCounter; V(abort_with_reason, "abort_with_reason") \ V(address_of_log_or_trace_osr, "v8_flags.log_or_trace_osr") \ V(address_of_FLAG_harmony_regexp_unicode_sets, \ - "v8_flags.harmony_regexp_unicdoe_sets") \ + "v8_flags.harmony_regexp_unicode_sets") \ + V(address_of_FLAG_harmony_symbol_as_weakmap_key, \ + "v8_flags.harmony_symbol_as_weakmap_key") \ V(address_of_builtin_subclassing_flag, "v8_flags.builtin_subclassing") \ V(address_of_double_abs_constant, "double_absolute_constant") \ V(address_of_double_neg_constant, "double_negate_constant") \ @@ -208,6 +210,19 @@ class StatsCounter; V(try_string_to_index_or_lookup_existing, \ "try_string_to_index_or_lookup_existing") \ V(string_from_forward_table, "string_from_forward_table") \ + V(raw_hash_from_forward_table, "raw_hash_from_forward_table") \ + V(name_dictionary_lookup_forwarded_string, \ + "name_dictionary_lookup_forwarded_string") \ + V(name_dictionary_find_insertion_entry_forwarded_string, \ + "name_dictionary_find_insertion_entry_forwarded_string") \ + V(global_dictionary_lookup_forwarded_string, \ + "global_dictionary_lookup_forwarded_string") \ + V(global_dictionary_find_insertion_entry_forwarded_string, \ + "global_dictionary_find_insertion_entry_forwarded_string") \ + V(name_to_index_hashtable_lookup_forwarded_string, \ + "name_to_index_hashtable_lookup_forwarded_string") \ + V(name_to_index_hashtable_find_insertion_entry_forwarded_string, \ + "name_to_index_hashtable_find_insertion_entry_forwarded_string") \ IF_WASM(V, wasm_call_trap_callback_for_testing, \ "wasm::call_trap_callback_for_testing") \ IF_WASM(V, wasm_f32_ceil, "wasm::f32_ceil_wrapper") \ diff --git a/deps/v8/src/codegen/ia32/assembler-ia32-inl.h b/deps/v8/src/codegen/ia32/assembler-ia32-inl.h index e1d001ed89fe41..f03b032ec2fad3 100644 --- a/deps/v8/src/codegen/ia32/assembler-ia32-inl.h +++ b/deps/v8/src/codegen/ia32/assembler-ia32-inl.h @@ -53,10 +53,8 @@ bool CpuFeatures::SupportsOptimizer() { return true; } void RelocInfo::apply(intptr_t delta) { DCHECK_EQ(kApplyMask, (RelocInfo::ModeMask(RelocInfo::CODE_TARGET) | RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE) | - RelocInfo::ModeMask(RelocInfo::OFF_HEAP_TARGET) | - RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY))); - if (IsRuntimeEntry(rmode_) || IsCodeTarget(rmode_) || - IsOffHeapTarget(rmode_)) { + RelocInfo::ModeMask(RelocInfo::OFF_HEAP_TARGET))); + if (IsCodeTarget(rmode_) || IsOffHeapTarget(rmode_)) { base::WriteUnalignedValue(pc_, base::ReadUnalignedValue<int32_t>(pc_) - delta); } else if (IsInternalReference(rmode_)) { @@ -67,7 +65,7 @@ void RelocInfo::apply(intptr_t delta) { } Address RelocInfo::target_address() { - DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_) || IsWasmCall(rmode_)); + DCHECK(IsCodeTarget(rmode_) || IsWasmCall(rmode_)); return Assembler::target_address_at(pc_, constant_pool_); } @@ -81,22 +79,19 @@ Address RelocInfo::constant_pool_entry_address() { UNREACHABLE(); } int RelocInfo::target_address_size() { return Assembler::kSpecialTargetSize; } HeapObject RelocInfo::target_object(PtrComprCageBase cage_base) { - DCHECK(IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_) || - IsDataEmbeddedObject(rmode_)); + DCHECK(IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_)); return HeapObject::cast(Object(ReadUnalignedValue<Address>(pc_))); } Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) { - DCHECK(IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_) || - IsDataEmbeddedObject(rmode_)); + DCHECK(IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_)); return Handle<HeapObject>::cast(ReadUnalignedValue<Handle<Object>>(pc_)); } void RelocInfo::set_target_object(Heap* heap, HeapObject target, WriteBarrierMode write_barrier_mode, ICacheFlushMode icache_flush_mode) { - DCHECK(IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_) || - IsDataEmbeddedObject(rmode_)); + DCHECK(IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_)); WriteUnalignedValue(pc_, target.ptr()); if (icache_flush_mode != SKIP_ICACHE_FLUSH) { FlushInstructionCache(pc_, sizeof(Address)); @@ -132,20 +127,6 @@ Address RelocInfo::target_internal_reference_address() { Builtin RelocInfo::target_builtin_at(Assembler* origin) { UNREACHABLE(); } -Address RelocInfo::target_runtime_entry(Assembler* origin) { - DCHECK(IsRuntimeEntry(rmode_)); - return ReadUnalignedValue<Address>(pc_); -} - -void RelocInfo::set_target_runtime_entry(Address target, - WriteBarrierMode write_barrier_mode, - ICacheFlushMode icache_flush_mode) { - DCHECK(IsRuntimeEntry(rmode_)); - if (target_address() != target) { - set_target_address(target, write_barrier_mode, icache_flush_mode); - } -} - Address RelocInfo::target_off_heap_target() { DCHECK(IsOffHeapTarget(rmode_)); return Assembler::target_address_at(pc_, constant_pool_); @@ -155,8 +136,7 @@ void RelocInfo::WipeOut() { if (IsFullEmbeddedObject(rmode_) || IsExternalReference(rmode_) || IsInternalReference(rmode_)) { WriteUnalignedValue(pc_, kNullAddress); - } else if (IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_) || - IsOffHeapTarget(rmode_)) { + } else if (IsCodeTarget(rmode_) || IsOffHeapTarget(rmode_)) { // Effectively write zero into the relocation. Assembler::set_target_address_at(pc_, constant_pool_, pc_ + sizeof(int32_t)); diff --git a/deps/v8/src/codegen/ia32/assembler-ia32.cc b/deps/v8/src/codegen/ia32/assembler-ia32.cc index b55a57d45cf4cf..ab9a895e9d6f16 100644 --- a/deps/v8/src/codegen/ia32/assembler-ia32.cc +++ b/deps/v8/src/codegen/ia32/assembler-ia32.cc @@ -204,8 +204,7 @@ void Displacement::init(Label* L, Type type) { const int RelocInfo::kApplyMask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) | RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE) | - RelocInfo::ModeMask(RelocInfo::OFF_HEAP_TARGET) | - RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY); + RelocInfo::ModeMask(RelocInfo::OFF_HEAP_TARGET); bool RelocInfo::IsCodedSpecially() { // The deserializer needs to know whether a pointer is specially coded. Being @@ -1625,11 +1624,7 @@ void Assembler::call(Address entry, RelocInfo::Mode rmode) { EnsureSpace ensure_space(this); DCHECK(!RelocInfo::IsCodeTarget(rmode)); EMIT(0xE8); - if (RelocInfo::IsRuntimeEntry(rmode)) { - emit(entry, rmode); - } else { - emit(entry - (reinterpret_cast<Address>(pc_) + sizeof(int32_t)), rmode); - } + emit(entry - (reinterpret_cast<Address>(pc_) + sizeof(int32_t)), rmode); } void Assembler::wasm_call(Address entry, RelocInfo::Mode rmode) { @@ -1702,7 +1697,7 @@ void Assembler::jmp(Address entry, RelocInfo::Mode rmode) { EnsureSpace ensure_space(this); DCHECK(!RelocInfo::IsCodeTarget(rmode)); EMIT(0xE9); - if (RelocInfo::IsRuntimeEntry(rmode) || RelocInfo::IsWasmCall(rmode)) { + if (RelocInfo::IsWasmCall(rmode)) { emit(entry, rmode); } else { emit(entry - (reinterpret_cast<Address>(pc_) + sizeof(int32_t)), rmode); @@ -1772,11 +1767,7 @@ void Assembler::j(Condition cc, byte* entry, RelocInfo::Mode rmode) { // 0000 1111 1000 tttn #32-bit disp. EMIT(0x0F); EMIT(0x80 | cc); - if (RelocInfo::IsRuntimeEntry(rmode)) { - emit(reinterpret_cast<uint32_t>(entry), rmode); - } else { - emit(entry - (pc_ + sizeof(int32_t)), rmode); - } + emit(entry - (pc_ + sizeof(int32_t)), rmode); } void Assembler::j(Condition cc, Handle<Code> code, RelocInfo::Mode rmode) { @@ -3395,8 +3386,7 @@ void Assembler::db(uint8_t data) { void Assembler::dd(uint32_t data, RelocInfo::Mode rmode) { EnsureSpace ensure_space(this); if (!RelocInfo::IsNoInfo(rmode)) { - DCHECK(RelocInfo::IsDataEmbeddedObject(rmode) || - RelocInfo::IsLiteralConstant(rmode)); + DCHECK(RelocInfo::IsLiteralConstant(rmode)); RecordRelocInfo(rmode); } emit(data); @@ -3404,10 +3394,7 @@ void Assembler::dd(uint32_t data, RelocInfo::Mode rmode) { void Assembler::dq(uint64_t data, RelocInfo::Mode rmode) { EnsureSpace ensure_space(this); - if (!RelocInfo::IsNoInfo(rmode)) { - DCHECK(RelocInfo::IsDataEmbeddedObject(rmode)); - RecordRelocInfo(rmode); - } + DCHECK(RelocInfo::IsNoInfo(rmode)); emit_q(data); } diff --git a/deps/v8/src/codegen/ia32/assembler-ia32.h b/deps/v8/src/codegen/ia32/assembler-ia32.h index bd7ea311c8705e..2cbc775ef5c8f6 100644 --- a/deps/v8/src/codegen/ia32/assembler-ia32.h +++ b/deps/v8/src/codegen/ia32/assembler-ia32.h @@ -1670,6 +1670,8 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase { void emit_sse_operand(XMMRegister dst, Register src); Address addr_at(int pos) { + DCHECK_GE(pos, 0); + DCHECK_LT(pos, pc_offset()); return reinterpret_cast<Address>(buffer_start_ + pos); } diff --git a/deps/v8/src/codegen/ia32/macro-assembler-ia32.cc b/deps/v8/src/codegen/ia32/macro-assembler-ia32.cc index eeb33d2aedc077..e5bd88e80ad1ca 100644 --- a/deps/v8/src/codegen/ia32/macro-assembler-ia32.cc +++ b/deps/v8/src/codegen/ia32/macro-assembler-ia32.cc @@ -848,7 +848,7 @@ void MacroAssembler::LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing( j(not_zero, flags_need_processing); } -void MacroAssembler::MaybeOptimizeCodeOrTailCallOptimizedCodeSlot( +void MacroAssembler::OptimizeCodeOrTailCallOptimizedCodeSlot( Register flags, XMMRegister saved_feedback_vector) { ASM_CODE_COMMENT(this); Label maybe_has_optimized_code, maybe_needs_logging; diff --git a/deps/v8/src/codegen/ia32/macro-assembler-ia32.h b/deps/v8/src/codegen/ia32/macro-assembler-ia32.h index bc71da2dd0b6d3..a55beb1a4e1f23 100644 --- a/deps/v8/src/codegen/ia32/macro-assembler-ia32.h +++ b/deps/v8/src/codegen/ia32/macro-assembler-ia32.h @@ -564,7 +564,7 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler { void LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing( Register flags, XMMRegister saved_feedback_vector, CodeKind current_code_kind, Label* flags_need_processing); - void MaybeOptimizeCodeOrTailCallOptimizedCodeSlot( + void OptimizeCodeOrTailCallOptimizedCodeSlot( Register flags, XMMRegister saved_feedback_vector); // Abort execution if argument is not a smi, enabled via --debug-code. diff --git a/deps/v8/src/codegen/interface-descriptors.h b/deps/v8/src/codegen/interface-descriptors.h index e8b8a53a9905f2..da5bc9072c6e6f 100644 --- a/deps/v8/src/codegen/interface-descriptors.h +++ b/deps/v8/src/codegen/interface-descriptors.h @@ -72,7 +72,7 @@ namespace internal { V(CopyDataPropertiesWithExcludedPropertiesOnStack) \ V(CppBuiltinAdaptor) \ V(FastNewObject) \ - V(FindNonDefaultConstructor) \ + V(FindNonDefaultConstructorOrConstruct) \ V(ForInPrepare) \ V(GetIteratorStackParameter) \ V(GetProperty) \ @@ -1817,9 +1817,9 @@ class InterpreterCEntry2Descriptor static constexpr auto registers(); }; -class FindNonDefaultConstructorDescriptor +class FindNonDefaultConstructorOrConstructDescriptor : public StaticCallInterfaceDescriptor< - FindNonDefaultConstructorDescriptor> { + FindNonDefaultConstructorOrConstructDescriptor> { public: DEFINE_RESULT_AND_PARAMETERS(2, kThisFunction, kNewTarget) DEFINE_RESULT_AND_PARAMETER_TYPES( @@ -1827,7 +1827,7 @@ class FindNonDefaultConstructorDescriptor MachineType::AnyTagged(), // result 2 (constructor_or_instance) MachineType::AnyTagged(), // kThisFunction MachineType::AnyTagged()) // kNewTarget - DECLARE_DESCRIPTOR(FindNonDefaultConstructorDescriptor) + DECLARE_DESCRIPTOR(FindNonDefaultConstructorOrConstructDescriptor) }; class ForInPrepareDescriptor diff --git a/deps/v8/src/codegen/loong64/assembler-loong64-inl.h b/deps/v8/src/codegen/loong64/assembler-loong64-inl.h index 6a77068d0b3d8d..0fe72de2ff2b84 100644 --- a/deps/v8/src/codegen/loong64/assembler-loong64-inl.h +++ b/deps/v8/src/codegen/loong64/assembler-loong64-inl.h @@ -41,8 +41,7 @@ void RelocInfo::apply(intptr_t delta) { } Address RelocInfo::target_address() { - DCHECK(IsCodeTargetMode(rmode_) || IsRuntimeEntry(rmode_) || - IsWasmCall(rmode_)); + DCHECK(IsCodeTargetMode(rmode_) || IsWasmCall(rmode_)); return Assembler::target_address_at(pc_, constant_pool_); } @@ -87,19 +86,13 @@ void Assembler::deserialization_set_target_internal_reference_at( } HeapObject RelocInfo::target_object(PtrComprCageBase cage_base) { - DCHECK(IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_) || - IsDataEmbeddedObject(rmode_)); - if (IsDataEmbeddedObject(rmode_)) { - return HeapObject::cast(Object(ReadUnalignedValue<Address>(pc_))); - } + DCHECK(IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_)); return HeapObject::cast( Object(Assembler::target_address_at(pc_, constant_pool_))); } Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) { - if (IsDataEmbeddedObject(rmode_)) { - return Handle<HeapObject>::cast(ReadUnalignedValue<Handle<Object>>(pc_)); - } else if (IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_)) { + if (IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_)) { return Handle<HeapObject>(reinterpret_cast<Address*>( Assembler::target_address_at(pc_, constant_pool_))); } else { @@ -111,15 +104,9 @@ Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) { void RelocInfo::set_target_object(Heap* heap, HeapObject target, WriteBarrierMode write_barrier_mode, ICacheFlushMode icache_flush_mode) { - DCHECK(IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_) || - IsDataEmbeddedObject(rmode_)); - if (IsDataEmbeddedObject(rmode_)) { - WriteUnalignedValue(pc_, target.ptr()); - // No need to flush icache since no instructions were changed. - } else { - Assembler::set_target_address_at(pc_, constant_pool_, target.ptr(), - icache_flush_mode); - } + DCHECK(IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_)); + Assembler::set_target_address_at(pc_, constant_pool_, target.ptr(), + icache_flush_mode); if (!host().is_null() && !v8_flags.disable_write_barriers) { WriteBarrierForCode(host(), this, target, write_barrier_mode); } @@ -161,19 +148,6 @@ Handle<Code> Assembler::relative_code_target_object_handle_at( Builtin RelocInfo::target_builtin_at(Assembler* origin) { UNREACHABLE(); } -Address RelocInfo::target_runtime_entry(Assembler* origin) { - DCHECK(IsRuntimeEntry(rmode_)); - return target_address(); -} - -void RelocInfo::set_target_runtime_entry(Address target, - WriteBarrierMode write_barrier_mode, - ICacheFlushMode icache_flush_mode) { - DCHECK(IsRuntimeEntry(rmode_)); - if (target_address() != target) - set_target_address(target, write_barrier_mode, icache_flush_mode); -} - Address RelocInfo::target_off_heap_target() { DCHECK(IsOffHeapTarget(rmode_)); return Assembler::target_address_at(pc_, constant_pool_); @@ -181,8 +155,8 @@ Address RelocInfo::target_off_heap_target() { void RelocInfo::WipeOut() { DCHECK(IsFullEmbeddedObject(rmode_) || IsCodeTarget(rmode_) || - IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) || - IsInternalReference(rmode_) || IsOffHeapTarget(rmode_)); + IsExternalReference(rmode_) || IsInternalReference(rmode_) || + IsOffHeapTarget(rmode_)); if (IsInternalReference(rmode_)) { Memory<Address>(pc_) = kNullAddress; } else { diff --git a/deps/v8/src/codegen/loong64/assembler-loong64.cc b/deps/v8/src/codegen/loong64/assembler-loong64.cc index da8e626f3da4c2..b636538f779aae 100644 --- a/deps/v8/src/codegen/loong64/assembler-loong64.cc +++ b/deps/v8/src/codegen/loong64/assembler-loong64.cc @@ -2154,8 +2154,7 @@ void Assembler::dd(uint32_t data, RelocInfo::Mode rmode) { CheckBuffer(); } if (!RelocInfo::IsNoInfo(rmode)) { - DCHECK(RelocInfo::IsDataEmbeddedObject(rmode) || - RelocInfo::IsLiteralConstant(rmode)); + DCHECK(RelocInfo::IsLiteralConstant(rmode)); RecordRelocInfo(rmode); } *reinterpret_cast<uint32_t*>(pc_) = data; @@ -2167,8 +2166,7 @@ void Assembler::dq(uint64_t data, RelocInfo::Mode rmode) { CheckBuffer(); } if (!RelocInfo::IsNoInfo(rmode)) { - DCHECK(RelocInfo::IsDataEmbeddedObject(rmode) || - RelocInfo::IsLiteralConstant(rmode)); + DCHECK(RelocInfo::IsLiteralConstant(rmode)); RecordRelocInfo(rmode); } *reinterpret_cast<uint64_t*>(pc_) = data; diff --git a/deps/v8/src/codegen/loong64/macro-assembler-loong64.cc b/deps/v8/src/codegen/loong64/macro-assembler-loong64.cc index 2822a0e205ef0d..30fd4934a920f8 100644 --- a/deps/v8/src/codegen/loong64/macro-assembler-loong64.cc +++ b/deps/v8/src/codegen/loong64/macro-assembler-loong64.cc @@ -476,6 +476,19 @@ void TurboAssembler::Mulh_d(Register rd, Register rj, const Operand& rk) { } } +void TurboAssembler::Mulh_du(Register rd, Register rj, const Operand& rk) { + if (rk.is_reg()) { + mulh_du(rd, rj, rk.rm()); + } else { + // li handles the relocation. + UseScratchRegisterScope temps(this); + Register scratch = temps.Acquire(); + DCHECK(rj != scratch); + li(scratch, rk); + mulh_du(rd, rj, scratch); + } +} + void TurboAssembler::Div_w(Register rd, Register rj, const Operand& rk) { if (rk.is_reg()) { div_w(rd, rj, rk.rm()); @@ -2508,6 +2521,35 @@ void TurboAssembler::LoadRootRegisterOffset(Register destination, } } +MemOperand TurboAssembler::ExternalReferenceAsOperand( + ExternalReference reference, Register scratch) { + if (root_array_available_ && options().enable_root_relative_access) { + int64_t offset = + RootRegisterOffsetForExternalReference(isolate(), reference); + if (is_int32(offset)) { + return MemOperand(kRootRegister, static_cast<int32_t>(offset)); + } + } + if (root_array_available_ && options().isolate_independent_code) { + if (IsAddressableThroughRootRegister(isolate(), reference)) { + // Some external references can be efficiently loaded as an offset from + // kRootRegister. + intptr_t offset = + RootRegisterOffsetForExternalReference(isolate(), reference); + CHECK(is_int32(offset)); + return MemOperand(kRootRegister, static_cast<int32_t>(offset)); + } else { + // Otherwise, do a memory load from the external reference table. + Ld_d(scratch, MemOperand(kRootRegister, + RootRegisterOffsetForExternalReferenceTableEntry( + isolate(), reference))); + return MemOperand(scratch, 0); + } + } + li(scratch, reference); + return MemOperand(scratch, 0); +} + void TurboAssembler::Jump(Register target, Condition cond, Register rj, const Operand& rk) { BlockTrampolinePoolScope block_trampoline_pool(this); @@ -3292,6 +3334,38 @@ void TurboAssembler::MulOverflow_w(Register dst, Register left, xor_(overflow, overflow, scratch2); } +void TurboAssembler::MulOverflow_d(Register dst, Register left, + const Operand& right, Register overflow) { + ASM_CODE_COMMENT(this); + BlockTrampolinePoolScope block_trampoline_pool(this); + UseScratchRegisterScope temps(this); + Register scratch = temps.Acquire(); + Register scratch2 = temps.Acquire(); + Register right_reg = no_reg; + if (!right.is_reg()) { + li(scratch, Operand(right)); + right_reg = scratch; + } else { + right_reg = right.rm(); + } + + DCHECK(left != scratch2 && right_reg != scratch2 && dst != scratch2 && + overflow != scratch2); + DCHECK(overflow != left && overflow != right_reg); + + if (dst == left || dst == right_reg) { + Mul_d(scratch2, left, right_reg); + Mulh_d(overflow, left, right_reg); + mov(dst, scratch2); + } else { + Mul_d(dst, left, right_reg); + Mulh_d(overflow, left, right_reg); + } + + srai_d(scratch2, dst, 63); + xor_(overflow, overflow, scratch2); +} + void MacroAssembler::CallRuntime(const Runtime::Function* f, int num_arguments, SaveFPRegsMode save_doubles) { ASM_CODE_COMMENT(this); @@ -4270,7 +4344,7 @@ void MacroAssembler::LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing( Branch(flags_need_processing, ne, scratch, Operand(zero_reg)); } -void MacroAssembler::MaybeOptimizeCodeOrTailCallOptimizedCodeSlot( +void MacroAssembler::OptimizeCodeOrTailCallOptimizedCodeSlot( Register flags, Register feedback_vector) { ASM_CODE_COMMENT(this); DCHECK(!AreAliased(flags, feedback_vector)); diff --git a/deps/v8/src/codegen/loong64/macro-assembler-loong64.h b/deps/v8/src/codegen/loong64/macro-assembler-loong64.h index ac0d4b36764c9e..57395c903ab65b 100644 --- a/deps/v8/src/codegen/loong64/macro-assembler-loong64.h +++ b/deps/v8/src/codegen/loong64/macro-assembler-loong64.h @@ -160,6 +160,15 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase { void LoadRootRegisterOffset(Register destination, intptr_t offset) final; void LoadRootRelative(Register destination, int32_t offset) final; + // Operand pointing to an external reference. + // May emit code to set up the scratch register. The operand is + // only guaranteed to be correct as long as the scratch register + // isn't changed. + // If the operand is used more than once, use a scratch register + // that is guaranteed not to be clobbered. + MemOperand ExternalReferenceAsOperand(ExternalReference reference, + Register scratch); + inline void Move(Register output, MemOperand operand) { Ld_d(output, operand); } @@ -384,6 +393,7 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase { DEFINE_INSTRUCTION(Mulh_wu) DEFINE_INSTRUCTION(Mul_d) DEFINE_INSTRUCTION(Mulh_d) + DEFINE_INSTRUCTION(Mulh_du) DEFINE_INSTRUCTION2(Div_w) DEFINE_INSTRUCTION2(Div_d) DEFINE_INSTRUCTION2(Div_wu) @@ -636,9 +646,11 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase { // overflow occured, otherwise it is zero or positive void SubOverflow_d(Register dst, Register left, const Operand& right, Register overflow); - // MulOverflow_w sets overflow register to zero if no overflow occured + // MulOverflow_{w/d} set overflow register to zero if no overflow occured void MulOverflow_w(Register dst, Register left, const Operand& right, Register overflow); + void MulOverflow_d(Register dst, Register left, const Operand& right, + Register overflow); // TODO(LOONG_dev): LOONG64 Remove this constant // Number of instructions needed for calculation of switch table entry address @@ -1054,8 +1066,8 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler { void LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing( Register flags, Register feedback_vector, CodeKind current_code_kind, Label* flags_need_processing); - void MaybeOptimizeCodeOrTailCallOptimizedCodeSlot(Register flags, - Register feedback_vector); + void OptimizeCodeOrTailCallOptimizedCodeSlot(Register flags, + Register feedback_vector); template <typename Field> void DecodeField(Register dst, Register src) { diff --git a/deps/v8/src/codegen/machine-type.cc b/deps/v8/src/codegen/machine-type.cc index bde4742b8d8f00..a633a1020b1211 100644 --- a/deps/v8/src/codegen/machine-type.cc +++ b/deps/v8/src/codegen/machine-type.cc @@ -79,6 +79,10 @@ std::ostream& operator<<(std::ostream& os, MachineSemantic type) { return os << "kTypeInt64"; case MachineSemantic::kUint64: return os << "kTypeUint64"; + case MachineSemantic::kSignedBigInt64: + return os << "kTypeSignedBigInt64"; + case MachineSemantic::kUnsignedBigInt64: + return os << "kTypeUnsignedBigInt64"; case MachineSemantic::kNumber: return os << "kTypeNumber"; case MachineSemantic::kAny: diff --git a/deps/v8/src/codegen/machine-type.h b/deps/v8/src/codegen/machine-type.h index 7dcca6db11fc36..29d7de75838e5e 100644 --- a/deps/v8/src/codegen/machine-type.h +++ b/deps/v8/src/codegen/machine-type.h @@ -86,6 +86,8 @@ enum class MachineSemantic : uint8_t { kUint32, kInt64, kUint64, + kSignedBigInt64, + kUnsignedBigInt64, kNumber, kAny }; @@ -152,10 +154,6 @@ class MachineType { constexpr bool IsCompressedPointer() const { return representation() == MachineRepresentation::kCompressedPointer; } - constexpr static MachineRepresentation TaggedRepresentation() { - return (kTaggedSize == 4) ? MachineRepresentation::kWord32 - : MachineRepresentation::kWord64; - } constexpr static MachineRepresentation PointerRepresentation() { return (kSystemPointerSize == 4) ? MachineRepresentation::kWord32 : MachineRepresentation::kWord64; @@ -193,6 +191,14 @@ class MachineType { return MachineType(MachineRepresentation::kWord64, MachineSemantic::kUint64); } + constexpr static MachineType SignedBigInt64() { + return MachineType(MachineRepresentation::kWord64, + MachineSemantic::kSignedBigInt64); + } + constexpr static MachineType UnsignedBigInt64() { + return MachineType(MachineRepresentation::kWord64, + MachineSemantic::kUnsignedBigInt64); + } constexpr static MachineType Float32() { return MachineType(MachineRepresentation::kFloat32, MachineSemantic::kNumber); @@ -234,7 +240,7 @@ class MachineType { } constexpr static MachineType SandboxedPointer() { return MachineType(MachineRepresentation::kSandboxedPointer, - MachineSemantic::kNone); + MachineSemantic::kInt64); } constexpr static MachineType Bool() { return MachineType(MachineRepresentation::kBit, MachineSemantic::kBool); diff --git a/deps/v8/src/codegen/macro-assembler.h b/deps/v8/src/codegen/macro-assembler.h index aaf30dea62a81e..61b26a320f276c 100644 --- a/deps/v8/src/codegen/macro-assembler.h +++ b/deps/v8/src/codegen/macro-assembler.h @@ -136,10 +136,10 @@ class V8_NODISCARD FrameAndConstantPoolScope { : masm_(masm), type_(type), old_has_frame_(masm->has_frame()), - old_constant_pool_available_(v8_flags.enable_embedded_constant_pool && + old_constant_pool_available_(V8_EMBEDDED_CONSTANT_POOL_BOOL && masm->is_constant_pool_available()) { masm->set_has_frame(true); - if (v8_flags.enable_embedded_constant_pool) { + if (V8_EMBEDDED_CONSTANT_POOL_BOOL) { masm->set_constant_pool_available(true); } if (type_ != StackFrame::MANUAL && type_ != StackFrame::NO_FRAME_TYPE) { @@ -150,7 +150,7 @@ class V8_NODISCARD FrameAndConstantPoolScope { ~FrameAndConstantPoolScope() { masm_->LeaveFrame(type_); masm_->set_has_frame(old_has_frame_); - if (v8_flags.enable_embedded_constant_pool) { + if (V8_EMBEDDED_CONSTANT_POOL_BOOL) { masm_->set_constant_pool_available(old_constant_pool_available_); } } @@ -169,14 +169,14 @@ class V8_NODISCARD ConstantPoolUnavailableScope { public: explicit ConstantPoolUnavailableScope(Assembler* assembler) : assembler_(assembler), - old_constant_pool_available_(v8_flags.enable_embedded_constant_pool && + old_constant_pool_available_(V8_EMBEDDED_CONSTANT_POOL_BOOL && assembler->is_constant_pool_available()) { - if (v8_flags.enable_embedded_constant_pool) { + if (V8_EMBEDDED_CONSTANT_POOL_BOOL) { assembler->set_constant_pool_available(false); } } ~ConstantPoolUnavailableScope() { - if (v8_flags.enable_embedded_constant_pool) { + if (V8_EMBEDDED_CONSTANT_POOL_BOOL) { assembler_->set_constant_pool_available(old_constant_pool_available_); } } diff --git a/deps/v8/src/codegen/mips64/assembler-mips64-inl.h b/deps/v8/src/codegen/mips64/assembler-mips64-inl.h index 7ae17d08476cd0..33c1bfcf2d8694 100644 --- a/deps/v8/src/codegen/mips64/assembler-mips64-inl.h +++ b/deps/v8/src/codegen/mips64/assembler-mips64-inl.h @@ -69,7 +69,7 @@ void RelocInfo::apply(intptr_t delta) { } Address RelocInfo::target_address() { - DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_) || IsWasmCall(rmode_)); + DCHECK(IsCodeTarget(rmode_) || IsWasmCall(rmode_)); return Assembler::target_address_at(pc_, constant_pool_); } @@ -136,37 +136,23 @@ void Assembler::deserialization_set_target_internal_reference_at( } HeapObject RelocInfo::target_object(PtrComprCageBase cage_base) { - DCHECK(IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_) || - IsDataEmbeddedObject(rmode_)); - if (IsDataEmbeddedObject(rmode_)) { - return HeapObject::cast(Object(ReadUnalignedValue<Address>(pc_))); - } + DCHECK(IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_)); return HeapObject::cast( Object(Assembler::target_address_at(pc_, constant_pool_))); } Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) { - if (IsDataEmbeddedObject(rmode_)) { - return Handle<HeapObject>::cast(ReadUnalignedValue<Handle<Object>>(pc_)); - } else { - DCHECK(IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_)); - return Handle<HeapObject>(reinterpret_cast<Address*>( - Assembler::target_address_at(pc_, constant_pool_))); - } + DCHECK(IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_)); + return Handle<HeapObject>(reinterpret_cast<Address*>( + Assembler::target_address_at(pc_, constant_pool_))); } void RelocInfo::set_target_object(Heap* heap, HeapObject target, WriteBarrierMode write_barrier_mode, ICacheFlushMode icache_flush_mode) { - DCHECK(IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_) || - IsDataEmbeddedObject(rmode_)); - if (IsDataEmbeddedObject(rmode_)) { - WriteUnalignedValue(pc_, target.ptr()); - // No need to flush icache since no instructions were changed. - } else { - Assembler::set_target_address_at(pc_, constant_pool_, target.ptr(), - icache_flush_mode); - } + DCHECK(IsCodeTarget(rmode_) || IsFullEmbeddedObject(rmode_)); + Assembler::set_target_address_at(pc_, constant_pool_, target.ptr(), + icache_flush_mode); if (!host().is_null() && !v8_flags.disable_write_barriers) { WriteBarrierForCode(host(), this, target, write_barrier_mode); } @@ -205,19 +191,6 @@ Address RelocInfo::target_internal_reference_address() { Builtin RelocInfo::target_builtin_at(Assembler* origin) { UNREACHABLE(); } -Address RelocInfo::target_runtime_entry(Assembler* origin) { - DCHECK(IsRuntimeEntry(rmode_)); - return target_address(); -} - -void RelocInfo::set_target_runtime_entry(Address target, - WriteBarrierMode write_barrier_mode, - ICacheFlushMode icache_flush_mode) { - DCHECK(IsRuntimeEntry(rmode_)); - if (target_address() != target) - set_target_address(target, write_barrier_mode, icache_flush_mode); -} - Address RelocInfo::target_off_heap_target() { DCHECK(IsOffHeapTarget(rmode_)); return Assembler::target_address_at(pc_, constant_pool_); @@ -225,9 +198,8 @@ Address RelocInfo::target_off_heap_target() { void RelocInfo::WipeOut() { DCHECK(IsFullEmbeddedObject(rmode_) || IsCodeTarget(rmode_) || - IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) || - IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_) || - IsOffHeapTarget(rmode_)); + IsExternalReference(rmode_) || IsInternalReference(rmode_) || + IsInternalReferenceEncoded(rmode_) || IsOffHeapTarget(rmode_)); if (IsInternalReference(rmode_)) { Memory<Address>(pc_) = kNullAddress; } else if (IsInternalReferenceEncoded(rmode_)) { diff --git a/deps/v8/src/codegen/mips64/assembler-mips64.cc b/deps/v8/src/codegen/mips64/assembler-mips64.cc index 3ddb972a4c70e4..758a3d1664efcc 100644 --- a/deps/v8/src/codegen/mips64/assembler-mips64.cc +++ b/deps/v8/src/codegen/mips64/assembler-mips64.cc @@ -3775,8 +3775,7 @@ void Assembler::db(uint8_t data) { void Assembler::dd(uint32_t data, RelocInfo::Mode rmode) { CheckForEmitInForbiddenSlot(); if (!RelocInfo::IsNoInfo(rmode)) { - DCHECK(RelocInfo::IsDataEmbeddedObject(rmode) || - RelocInfo::IsLiteralConstant(rmode)); + DCHECK(RelocInfo::IsLiteralConstant(rmode)); RecordRelocInfo(rmode); } *reinterpret_cast<uint32_t*>(pc_) = data; @@ -3786,8 +3785,7 @@ void Assembler::dd(uint32_t data, RelocInfo::Mode rmode) { void Assembler::dq(uint64_t data, RelocInfo::Mode rmode) { CheckForEmitInForbiddenSlot(); if (!RelocInfo::IsNoInfo(rmode)) { - DCHECK(RelocInfo::IsDataEmbeddedObject(rmode) || - RelocInfo::IsLiteralConstant(rmode)); + DCHECK(RelocInfo::IsLiteralConstant(rmode)); RecordRelocInfo(rmode); } *reinterpret_cast<uint64_t*>(pc_) = data; diff --git a/deps/v8/src/codegen/mips64/macro-assembler-mips64.cc b/deps/v8/src/codegen/mips64/macro-assembler-mips64.cc index 24e6fee51aee2b..896b685c1ec8ad 100644 --- a/deps/v8/src/codegen/mips64/macro-assembler-mips64.cc +++ b/deps/v8/src/codegen/mips64/macro-assembler-mips64.cc @@ -513,6 +513,29 @@ void TurboAssembler::Dmulh(Register rd, Register rs, const Operand& rt) { } } +void TurboAssembler::Dmulhu(Register rd, Register rs, const Operand& rt) { + if (rt.is_reg()) { + if (kArchVariant == kMips64r6) { + dmuhu(rd, rs, rt.rm()); + } else { + dmultu(rs, rt.rm()); + mfhi(rd); + } + } else { + // li handles the relocation. + UseScratchRegisterScope temps(this); + Register scratch = temps.Acquire(); + DCHECK(rs != scratch); + li(scratch, rt); + if (kArchVariant == kMips64r6) { + dmuhu(rd, rs, scratch); + } else { + dmultu(rs, scratch); + mfhi(rd); + } + } +} + void TurboAssembler::Mult(Register rs, const Operand& rt) { if (rt.is_reg()) { mult(rs, rt.rm()); @@ -4227,6 +4250,35 @@ void TurboAssembler::LoadRootRegisterOffset(Register destination, } } +MemOperand TurboAssembler::ExternalReferenceAsOperand( + ExternalReference reference, Register scratch) { + if (root_array_available_ && options().enable_root_relative_access) { + int64_t offset = + RootRegisterOffsetForExternalReference(isolate(), reference); + if (is_int32(offset)) { + return MemOperand(kRootRegister, static_cast<int32_t>(offset)); + } + } + if (root_array_available_ && options().isolate_independent_code) { + if (IsAddressableThroughRootRegister(isolate(), reference)) { + // Some external references can be efficiently loaded as an offset from + // kRootRegister. + intptr_t offset = + RootRegisterOffsetForExternalReference(isolate(), reference); + CHECK(is_int32(offset)); + return MemOperand(kRootRegister, static_cast<int32_t>(offset)); + } else { + // Otherwise, do a memory load from the external reference table. + Ld(scratch, MemOperand(kRootRegister, + RootRegisterOffsetForExternalReferenceTableEntry( + isolate(), reference))); + return MemOperand(scratch, 0); + } + } + li(scratch, reference); + return MemOperand(scratch, 0); +} + void TurboAssembler::Jump(Register target, Condition cond, Register rs, const Operand& rt, BranchDelaySlot bd) { BlockTrampolinePoolScope block_trampoline_pool(this); @@ -5192,6 +5244,36 @@ void TurboAssembler::MulOverflow(Register dst, Register left, xor_(overflow, overflow, scratch); } +void TurboAssembler::DMulOverflow(Register dst, Register left, + const Operand& right, Register overflow) { + ASM_CODE_COMMENT(this); + BlockTrampolinePoolScope block_trampoline_pool(this); + Register right_reg = no_reg; + Register scratch = t8; + if (!right.is_reg()) { + li(at, Operand(right)); + right_reg = at; + } else { + right_reg = right.rm(); + } + + DCHECK(left != scratch && right_reg != scratch && dst != scratch && + overflow != scratch); + DCHECK(overflow != left && overflow != right_reg); + + if (dst == left || dst == right_reg) { + Dmul(scratch, left, right_reg); + Dmulh(overflow, left, right_reg); + mov(dst, scratch); + } else { + Dmul(dst, left, right_reg); + Dmulh(overflow, left, right_reg); + } + + dsra32(scratch, dst, 31); + xor_(overflow, overflow, scratch); +} + void MacroAssembler::CallRuntime(const Runtime::Function* f, int num_arguments, SaveFPRegsMode save_doubles) { ASM_CODE_COMMENT(this); @@ -6315,7 +6397,7 @@ void MacroAssembler::LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing( Branch(flags_need_processing, ne, scratch, Operand(zero_reg)); } -void MacroAssembler::MaybeOptimizeCodeOrTailCallOptimizedCodeSlot( +void MacroAssembler::OptimizeCodeOrTailCallOptimizedCodeSlot( Register flags, Register feedback_vector) { ASM_CODE_COMMENT(this); Label maybe_has_optimized_code, maybe_needs_logging; diff --git a/deps/v8/src/codegen/mips64/macro-assembler-mips64.h b/deps/v8/src/codegen/mips64/macro-assembler-mips64.h index b927ac112fdf6c..77897e1efd3709 100644 --- a/deps/v8/src/codegen/mips64/macro-assembler-mips64.h +++ b/deps/v8/src/codegen/mips64/macro-assembler-mips64.h @@ -220,6 +220,15 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase { void LoadRootRegisterOffset(Register destination, intptr_t offset) final; void LoadRootRelative(Register destination, int32_t offset) final; + // Operand pointing to an external reference. + // May emit code to set up the scratch register. The operand is + // only guaranteed to be correct as long as the scratch register + // isn't changed. + // If the operand is used more than once, use a scratch register + // that is guaranteed not to be clobbered. + MemOperand ExternalReferenceAsOperand(ExternalReference reference, + Register scratch); + inline void Move(Register output, MemOperand operand) { Ld(output, operand); } // Jump, Call, and Ret pseudo instructions implementing inter-working. @@ -448,6 +457,7 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase { DEFINE_INSTRUCTION(Mulhu) DEFINE_INSTRUCTION(Dmul) DEFINE_INSTRUCTION(Dmulh) + DEFINE_INSTRUCTION(Dmulhu) DEFINE_INSTRUCTION2(Mult) DEFINE_INSTRUCTION2(Dmult) DEFINE_INSTRUCTION2(Multu) @@ -767,9 +777,11 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase { // overflow occured, otherwise it is zero or positive void DsubOverflow(Register dst, Register left, const Operand& right, Register overflow); - // MulOverflow sets overflow register to zero if no overflow occured + // [D]MulOverflow set overflow register to zero if no overflow occured void MulOverflow(Register dst, Register left, const Operand& right, Register overflow); + void DMulOverflow(Register dst, Register left, const Operand& right, + Register overflow); // Number of instructions needed for calculation of switch table entry address #ifdef _MIPS_ARCH_MIPS64R6 @@ -1243,8 +1255,8 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler { void LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing( Register flags, Register feedback_vector, CodeKind current_code_kind, Label* flags_need_processing); - void MaybeOptimizeCodeOrTailCallOptimizedCodeSlot(Register flags, - Register feedback_vector); + void OptimizeCodeOrTailCallOptimizedCodeSlot(Register flags, + Register feedback_vector); template <typename Field> void DecodeField(Register dst, Register src) { diff --git a/deps/v8/src/codegen/ppc/assembler-ppc-inl.h b/deps/v8/src/codegen/ppc/assembler-ppc-inl.h index 8997bab36ed8ab..898f1325209a5e 100644 --- a/deps/v8/src/codegen/ppc/assembler-ppc-inl.h +++ b/deps/v8/src/codegen/ppc/assembler-ppc-inl.h @@ -80,14 +80,14 @@ Address RelocInfo::target_internal_reference_address() { } Address RelocInfo::target_address() { - DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_) || IsWasmCall(rmode_)); + DCHECK(IsCodeTarget(rmode_) || IsWasmCall(rmode_)); return Assembler::target_address_at(pc_, constant_pool_); } Address RelocInfo::target_address_address() { DCHECK(HasTargetAddressAddress()); - if (v8_flags.enable_embedded_constant_pool && + if (V8_EMBEDDED_CONSTANT_POOL_BOOL && Assembler::IsConstantPoolLoadStart(pc_)) { // We return the PC for embedded constant pool since this function is used // by the serializer and expects the address to reside within the code @@ -108,7 +108,7 @@ Address RelocInfo::target_address_address() { } Address RelocInfo::constant_pool_entry_address() { - if (v8_flags.enable_embedded_constant_pool) { + if (V8_EMBEDDED_CONSTANT_POOL_BOOL) { DCHECK(constant_pool_); ConstantPoolEntry::Access access; if (Assembler::IsConstantPoolLoadStart(pc_, &access)) @@ -147,10 +147,8 @@ Handle<Object> Assembler::code_target_object_handle_at(Address pc, HeapObject RelocInfo::target_object(PtrComprCageBase cage_base) { DCHECK(IsCodeTarget(rmode_) || IsEmbeddedObjectMode(rmode_)); - if (IsDataEmbeddedObject(rmode_)) { - return HeapObject::cast(Object(ReadUnalignedValue<Address>(pc_))); - } else if (IsCompressedEmbeddedObject(rmode_)) { - return HeapObject::cast(Object(DecompressTaggedAny( + if (IsCompressedEmbeddedObject(rmode_)) { + return HeapObject::cast(Object(V8HeapCompressionScheme::DecompressTaggedAny( cage_base, Assembler::target_compressed_address_at(pc_, constant_pool_)))); } else { @@ -166,9 +164,7 @@ Handle<HeapObject> Assembler::compressed_embedded_object_handle_at( Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) { DCHECK(IsCodeTarget(rmode_) || IsEmbeddedObjectMode(rmode_)); - if (IsDataEmbeddedObject(rmode_)) { - return Handle<HeapObject>::cast(ReadUnalignedValue<Handle<Object>>(pc_)); - } else if (IsCodeTarget(rmode_)) { + if (IsCodeTarget(rmode_)) { return Handle<HeapObject>::cast( origin->code_target_object_handle_at(pc_, constant_pool_)); } else { @@ -184,12 +180,11 @@ void RelocInfo::set_target_object(Heap* heap, HeapObject target, WriteBarrierMode write_barrier_mode, ICacheFlushMode icache_flush_mode) { DCHECK(IsCodeTarget(rmode_) || IsEmbeddedObjectMode(rmode_)); - if (IsDataEmbeddedObject(rmode_)) { - WriteUnalignedValue(pc_, target.ptr()); - // No need to flush icache since no instructions were changed. - } else if (IsCompressedEmbeddedObject(rmode_)) { + if (IsCompressedEmbeddedObject(rmode_)) { Assembler::set_target_compressed_address_at( - pc_, constant_pool_, CompressTagged(target.ptr()), icache_flush_mode); + pc_, constant_pool_, + V8HeapCompressionScheme::CompressTagged(target.ptr()), + icache_flush_mode); } else { DCHECK(IsFullEmbeddedObject(rmode_)); Assembler::set_target_address_at(pc_, constant_pool_, target.ptr(), @@ -214,19 +209,6 @@ void RelocInfo::set_target_external_reference( Builtin RelocInfo::target_builtin_at(Assembler* origin) { UNREACHABLE(); } -Address RelocInfo::target_runtime_entry(Assembler* origin) { - DCHECK(IsRuntimeEntry(rmode_)); - return target_address(); -} - -void RelocInfo::set_target_runtime_entry(Address target, - WriteBarrierMode write_barrier_mode, - ICacheFlushMode icache_flush_mode) { - DCHECK(IsRuntimeEntry(rmode_)); - if (target_address() != target) - set_target_address(target, write_barrier_mode, icache_flush_mode); -} - Address RelocInfo::target_off_heap_target() { DCHECK(IsOffHeapTarget(rmode_)); return Assembler::target_address_at(pc_, constant_pool_); @@ -234,9 +216,8 @@ Address RelocInfo::target_off_heap_target() { void RelocInfo::WipeOut() { DCHECK(IsEmbeddedObjectMode(rmode_) || IsCodeTarget(rmode_) || - IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) || - IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_) || - IsOffHeapTarget(rmode_)); + IsExternalReference(rmode_) || IsInternalReference(rmode_) || + IsInternalReferenceEncoded(rmode_) || IsOffHeapTarget(rmode_)); if (IsInternalReference(rmode_)) { // Jump table entry Memory<Address>(pc_) = kNullAddress; @@ -269,7 +250,7 @@ void Assembler::UntrackBranch() { // Fetch the 32bit value from the FIXED_SEQUENCE lis/ori Address Assembler::target_address_at(Address pc, Address constant_pool) { - if (v8_flags.enable_embedded_constant_pool && constant_pool) { + if (V8_EMBEDDED_CONSTANT_POOL_BOOL && constant_pool) { ConstantPoolEntry::Access access; if (IsConstantPoolLoadStart(pc, &access)) return Memory<Address>(target_constant_pool_address_at( @@ -444,7 +425,7 @@ void Assembler::deserialization_set_target_internal_reference_at( void Assembler::set_target_address_at(Address pc, Address constant_pool, Address target, ICacheFlushMode icache_flush_mode) { - if (v8_flags.enable_embedded_constant_pool && constant_pool) { + if (V8_EMBEDDED_CONSTANT_POOL_BOOL && constant_pool) { ConstantPoolEntry::Access access; if (IsConstantPoolLoadStart(pc, &access)) { Memory<Address>(target_constant_pool_address_at( diff --git a/deps/v8/src/codegen/ppc/assembler-ppc.cc b/deps/v8/src/codegen/ppc/assembler-ppc.cc index 796bd09d554c62..43fda58a0c176f 100644 --- a/deps/v8/src/codegen/ppc/assembler-ppc.cc +++ b/deps/v8/src/codegen/ppc/assembler-ppc.cc @@ -167,8 +167,7 @@ bool RelocInfo::IsCodedSpecially() { } bool RelocInfo::IsInConstantPool() { - if (v8_flags.enable_embedded_constant_pool && - constant_pool_ != kNullAddress) { + if (V8_EMBEDDED_CONSTANT_POOL_BOOL && constant_pool_ != kNullAddress) { return Assembler::IsConstantPoolLoadStart(pc_); } return false; @@ -839,6 +838,16 @@ void Assembler::mulli(Register dst, Register src, const Operand& imm) { d_form(MULLI, dst, src, imm.immediate(), true); } +// Multiply hi doubleword +void Assembler::mulhd(Register dst, Register src1, Register src2, RCBit r) { + xo_form(EXT2 | MULHD, dst, src1, src2, LeaveOE, r); +} + +// Multiply hi doubleword unsigned +void Assembler::mulhdu(Register dst, Register src1, Register src2, RCBit r) { + xo_form(EXT2 | MULHDU, dst, src1, src2, LeaveOE, r); +} + // Multiply hi word void Assembler::mulhw(Register dst, Register src1, Register src2, RCBit r) { xo_form(EXT2 | MULHWX, dst, src1, src2, LeaveOE, r); @@ -1310,8 +1319,7 @@ int Assembler::instructions_required_for_mov(Register dst, bool Assembler::use_constant_pool_for_mov(Register dst, const Operand& src, bool canOptimize) const { - if (!v8_flags.enable_embedded_constant_pool || - !is_constant_pool_available()) { + if (!V8_EMBEDDED_CONSTANT_POOL_BOOL || !is_constant_pool_available()) { // If there is no constant pool available, we must use a mov // immediate sequence. return false; @@ -2155,8 +2163,7 @@ void Assembler::db(uint8_t data) { void Assembler::dd(uint32_t data, RelocInfo::Mode rmode) { CheckBuffer(); if (!RelocInfo::IsNoInfo(rmode)) { - DCHECK(RelocInfo::IsDataEmbeddedObject(rmode) || - RelocInfo::IsLiteralConstant(rmode)); + DCHECK(RelocInfo::IsLiteralConstant(rmode)); RecordRelocInfo(rmode); } *reinterpret_cast<uint32_t*>(pc_) = data; @@ -2166,8 +2173,7 @@ void Assembler::dd(uint32_t data, RelocInfo::Mode rmode) { void Assembler::dq(uint64_t value, RelocInfo::Mode rmode) { CheckBuffer(); if (!RelocInfo::IsNoInfo(rmode)) { - DCHECK(RelocInfo::IsDataEmbeddedObject(rmode) || - RelocInfo::IsLiteralConstant(rmode)); + DCHECK(RelocInfo::IsLiteralConstant(rmode)); RecordRelocInfo(rmode); } *reinterpret_cast<uint64_t*>(pc_) = value; @@ -2177,8 +2183,7 @@ void Assembler::dq(uint64_t value, RelocInfo::Mode rmode) { void Assembler::dp(uintptr_t data, RelocInfo::Mode rmode) { CheckBuffer(); if (!RelocInfo::IsNoInfo(rmode)) { - DCHECK(RelocInfo::IsDataEmbeddedObject(rmode) || - RelocInfo::IsLiteralConstant(rmode)); + DCHECK(RelocInfo::IsLiteralConstant(rmode)); RecordRelocInfo(rmode); } *reinterpret_cast<uintptr_t*>(pc_) = data; diff --git a/deps/v8/src/codegen/ppc/assembler-ppc.h b/deps/v8/src/codegen/ppc/assembler-ppc.h index 6563287d0a82e7..21a439a85e0483 100644 --- a/deps/v8/src/codegen/ppc/assembler-ppc.h +++ b/deps/v8/src/codegen/ppc/assembler-ppc.h @@ -309,10 +309,9 @@ class Assembler : public AssemblerBase { static constexpr int kMovInstructionsNoConstantPool = 2; static constexpr int kTaggedLoadInstructions = 1; #endif - static constexpr int kMovInstructions = - v8_flags.enable_embedded_constant_pool.value() - ? kMovInstructionsConstantPool - : kMovInstructionsNoConstantPool; + static constexpr int kMovInstructions = V8_EMBEDDED_CONSTANT_POOL_BOOL + ? kMovInstructionsConstantPool + : kMovInstructionsNoConstantPool; static inline int encode_crbit(const CRegister& cr, enum CRBit crbit) { return ((cr.code() * CRWIDTH) + crbit); @@ -895,6 +894,8 @@ class Assembler : public AssemblerBase { void mulhw(Register dst, Register src1, Register src2, RCBit r = LeaveRC); void mulhwu(Register dst, Register src1, Register src2, RCBit r = LeaveRC); + void mulhd(Register dst, Register src1, Register src2, RCBit r = LeaveRC); + void mulhdu(Register dst, Register src1, Register src2, RCBit r = LeaveRC); void mulli(Register dst, Register src, const Operand& imm); void divw(Register dst, Register src1, Register src2, OEBit o = LeaveOE, diff --git a/deps/v8/src/codegen/ppc/macro-assembler-ppc.cc b/deps/v8/src/codegen/ppc/macro-assembler-ppc.cc index 7fe0819e10ffbc..3f5040c9ffb8ad 100644 --- a/deps/v8/src/codegen/ppc/macro-assembler-ppc.cc +++ b/deps/v8/src/codegen/ppc/macro-assembler-ppc.cc @@ -898,7 +898,7 @@ void MacroAssembler::RecordWrite(Register object, Register slot_address, void TurboAssembler::PushCommonFrame(Register marker_reg) { int fp_delta = 0; mflr(r0); - if (v8_flags.enable_embedded_constant_pool) { + if (V8_EMBEDDED_CONSTANT_POOL_BOOL) { if (marker_reg.is_valid()) { Push(r0, fp, kConstantPoolRegister, marker_reg); fp_delta = 2; @@ -921,7 +921,7 @@ void TurboAssembler::PushCommonFrame(Register marker_reg) { void TurboAssembler::PushStandardFrame(Register function_reg) { int fp_delta = 0; mflr(r0); - if (v8_flags.enable_embedded_constant_pool) { + if (V8_EMBEDDED_CONSTANT_POOL_BOOL) { if (function_reg.is_valid()) { Push(r0, fp, kConstantPoolRegister, cp, function_reg); fp_delta = 3; @@ -943,7 +943,7 @@ void TurboAssembler::PushStandardFrame(Register function_reg) { } void TurboAssembler::RestoreFrameStateForTailCall() { - if (v8_flags.enable_embedded_constant_pool) { + if (V8_EMBEDDED_CONSTANT_POOL_BOOL) { LoadU64(kConstantPoolRegister, MemOperand(fp, StandardFrameConstants::kConstantPoolOffset)); set_constant_pool_available(false); @@ -1230,7 +1230,7 @@ void TurboAssembler::StubPrologue(StackFrame::Type type) { mov(r11, Operand(StackFrame::TypeToMarker(type))); PushCommonFrame(r11); } - if (v8_flags.enable_embedded_constant_pool) { + if (V8_EMBEDDED_CONSTANT_POOL_BOOL) { LoadConstantPoolPointerRegister(); set_constant_pool_available(true); } @@ -1238,7 +1238,7 @@ void TurboAssembler::StubPrologue(StackFrame::Type type) { void TurboAssembler::Prologue() { PushStandardFrame(r4); - if (v8_flags.enable_embedded_constant_pool) { + if (V8_EMBEDDED_CONSTANT_POOL_BOOL) { // base contains prologue address LoadConstantPoolPointerRegister(); set_constant_pool_available(true); @@ -1288,8 +1288,7 @@ void TurboAssembler::DropArgumentsAndPushNewReceiver(Register argc, void TurboAssembler::EnterFrame(StackFrame::Type type, bool load_constant_pool_pointer_reg) { - if (v8_flags.enable_embedded_constant_pool && - load_constant_pool_pointer_reg) { + if (V8_EMBEDDED_CONSTANT_POOL_BOOL && load_constant_pool_pointer_reg) { // Push type explicitly so we can leverage the constant pool. // This path cannot rely on ip containing code entry. PushCommonFrame(); @@ -1322,7 +1321,7 @@ int TurboAssembler::LeaveFrame(StackFrame::Type type, int stack_adjustment) { int frame_ends; LoadU64(r0, MemOperand(fp, StandardFrameConstants::kCallerPCOffset)); LoadU64(ip, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); - if (v8_flags.enable_embedded_constant_pool) { + if (V8_EMBEDDED_CONSTANT_POOL_BOOL) { LoadU64(kConstantPoolRegister, MemOperand(fp, StandardFrameConstants::kConstantPoolOffset)); } @@ -1373,7 +1372,7 @@ void MacroAssembler::EnterExitFrame(bool save_doubles, int stack_space, li(r8, Operand::Zero()); StoreU64(r8, MemOperand(fp, ExitFrameConstants::kSPOffset)); } - if (v8_flags.enable_embedded_constant_pool) { + if (V8_EMBEDDED_CONSTANT_POOL_BOOL) { StoreU64(kConstantPoolRegister, MemOperand(fp, ExitFrameConstants::kConstantPoolOffset)); } @@ -2153,7 +2152,7 @@ void MacroAssembler::LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing( bne(flags_need_processing, cr0); } -void MacroAssembler::MaybeOptimizeCodeOrTailCallOptimizedCodeSlot( +void MacroAssembler::OptimizeCodeOrTailCallOptimizedCodeSlot( Register flags, Register feedback_vector) { DCHECK(!AreAliased(flags, feedback_vector)); Label maybe_has_optimized_code, maybe_needs_logging; @@ -2654,7 +2653,7 @@ void TurboAssembler::LoadSmiLiteral(Register dst, Smi smi) { void TurboAssembler::LoadDoubleLiteral(DoubleRegister result, base::Double value, Register scratch) { - if (v8_flags.enable_embedded_constant_pool && is_constant_pool_available() && + if (V8_EMBEDDED_CONSTANT_POOL_BOOL && is_constant_pool_available() && !(scratch == r0 && ConstantPoolAccessIsInOverflow())) { ConstantPoolEntry::Access access = ConstantPoolAddEntry(value); if (access == ConstantPoolEntry::OVERFLOWED) { @@ -3715,6 +3714,10 @@ void TurboAssembler::StoreF32LE(DoubleRegister dst, const MemOperand& mem, V(I16x8Eq, vcmpequh) \ V(I16x8GtS, vcmpgtsh) \ V(I16x8GtU, vcmpgtuh) \ + V(I16x8AddSatS, vaddshs) \ + V(I16x8SubSatS, vsubshs) \ + V(I16x8AddSatU, vadduhs) \ + V(I16x8SubSatU, vsubuhs) \ V(I8x16Add, vaddubm) \ V(I8x16Sub, vsububm) \ V(I8x16MinS, vminsb) \ @@ -3723,7 +3726,11 @@ void TurboAssembler::StoreF32LE(DoubleRegister dst, const MemOperand& mem, V(I8x16MaxU, vmaxub) \ V(I8x16Eq, vcmpequb) \ V(I8x16GtS, vcmpgtsb) \ - V(I8x16GtU, vcmpgtub) + V(I8x16GtU, vcmpgtub) \ + V(I8x16AddSatS, vaddsbs) \ + V(I8x16SubSatS, vsubsbs) \ + V(I8x16AddSatU, vaddubs) \ + V(I8x16SubSatU, vsububs) #define EMIT_SIMD_BINOP(name, op) \ void TurboAssembler::name(Simd128Register dst, Simd128Register src1, \ @@ -3734,6 +3741,62 @@ SIMD_BINOP_LIST(EMIT_SIMD_BINOP) #undef EMIT_SIMD_BINOP #undef SIMD_BINOP_LIST +#define SIMD_SHIFT_LIST(V) \ + V(I64x2Shl, vsld) \ + V(I64x2ShrS, vsrad) \ + V(I64x2ShrU, vsrd) \ + V(I32x4Shl, vslw) \ + V(I32x4ShrS, vsraw) \ + V(I32x4ShrU, vsrw) \ + V(I16x8Shl, vslh) \ + V(I16x8ShrS, vsrah) \ + V(I16x8ShrU, vsrh) \ + V(I8x16Shl, vslb) \ + V(I8x16ShrS, vsrab) \ + V(I8x16ShrU, vsrb) + +#define EMIT_SIMD_SHIFT(name, op) \ + void TurboAssembler::name(Simd128Register dst, Simd128Register src1, \ + Register src2, Simd128Register scratch) { \ + mtvsrd(scratch, src2); \ + vspltb(scratch, scratch, Operand(7)); \ + op(dst, src1, scratch); \ + } \ + void TurboAssembler::name(Simd128Register dst, Simd128Register src1, \ + const Operand& src2, Register scratch1, \ + Simd128Register scratch2) { \ + mov(scratch1, src2); \ + name(dst, src1, scratch1, scratch2); \ + } +SIMD_SHIFT_LIST(EMIT_SIMD_SHIFT) +#undef EMIT_SIMD_SHIFT +#undef SIMD_SHIFT_LIST + +#define SIMD_UNOP_LIST(V) \ + V(F64x2Abs, xvabsdp) \ + V(F64x2Neg, xvnegdp) \ + V(F64x2Sqrt, xvsqrtdp) \ + V(F64x2Ceil, xvrdpip) \ + V(F64x2Floor, xvrdpim) \ + V(F64x2Trunc, xvrdpiz) \ + V(F32x4Abs, xvabssp) \ + V(F32x4Neg, xvnegsp) \ + V(F32x4Sqrt, xvsqrtsp) \ + V(F32x4Ceil, xvrspip) \ + V(F32x4Floor, xvrspim) \ + V(F32x4Trunc, xvrspiz) \ + V(I64x2Neg, vnegd) \ + V(I32x4Neg, vnegw) \ + V(I8x16Popcnt, vpopcntb) + +#define EMIT_SIMD_UNOP(name, op) \ + void TurboAssembler::name(Simd128Register dst, Simd128Register src) { \ + op(dst, src); \ + } +SIMD_UNOP_LIST(EMIT_SIMD_UNOP) +#undef EMIT_SIMD_UNOP +#undef SIMD_UNOP_LIST + void TurboAssembler::LoadSimd128(Simd128Register dst, const MemOperand& mem, Register scratch) { GenerateMemoryOperationRR(dst, mem, lxvx); @@ -4110,6 +4173,51 @@ void TurboAssembler::I8x16GeU(Simd128Register dst, Simd128Register src1, vor(dst, dst, scratch); } +void TurboAssembler::I64x2Abs(Simd128Register dst, Simd128Register src, + Simd128Register scratch) { + constexpr int shift_bits = 63; + xxspltib(scratch, Operand(shift_bits)); + vsrad(scratch, src, scratch); + vxor(dst, src, scratch); + vsubudm(dst, dst, scratch); +} +void TurboAssembler::I32x4Abs(Simd128Register dst, Simd128Register src, + Simd128Register scratch) { + constexpr int shift_bits = 31; + xxspltib(scratch, Operand(shift_bits)); + vsraw(scratch, src, scratch); + vxor(dst, src, scratch); + vsubuwm(dst, dst, scratch); +} +void TurboAssembler::I16x8Abs(Simd128Register dst, Simd128Register src, + Simd128Register scratch) { + constexpr int shift_bits = 15; + xxspltib(scratch, Operand(shift_bits)); + vsrah(scratch, src, scratch); + vxor(dst, src, scratch); + vsubuhm(dst, dst, scratch); +} +void TurboAssembler::I16x8Neg(Simd128Register dst, Simd128Register src, + Simd128Register scratch) { + vspltish(scratch, Operand(1)); + vnor(dst, src, src); + vadduhm(dst, scratch, dst); +} +void TurboAssembler::I8x16Abs(Simd128Register dst, Simd128Register src, + Simd128Register scratch) { + constexpr int shift_bits = 7; + xxspltib(scratch, Operand(shift_bits)); + vsrab(scratch, src, scratch); + vxor(dst, src, scratch); + vsububm(dst, dst, scratch); +} +void TurboAssembler::I8x16Neg(Simd128Register dst, Simd128Register src, + Simd128Register scratch) { + xxspltib(scratch, Operand(1)); + vnor(dst, src, src); + vaddubm(dst, scratch, dst); +} + Register GetRegisterThatIsNotOneOf(Register reg1, Register reg2, Register reg3, Register reg4, Register reg5, Register reg6) { diff --git a/deps/v8/src/codegen/ppc/macro-assembler-ppc.h b/deps/v8/src/codegen/ppc/macro-assembler-ppc.h index 0d0ef1f9bc1007..78f4346c7e13d3 100644 --- a/deps/v8/src/codegen/ppc/macro-assembler-ppc.h +++ b/deps/v8/src/codegen/ppc/macro-assembler-ppc.h @@ -1121,6 +1121,10 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase { V(I16x8Eq) \ V(I16x8GtS) \ V(I16x8GtU) \ + V(I16x8AddSatS) \ + V(I16x8SubSatS) \ + V(I16x8AddSatU) \ + V(I16x8SubSatU) \ V(I8x16Add) \ V(I8x16Sub) \ V(I8x16MinS) \ @@ -1129,7 +1133,11 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase { V(I8x16MaxU) \ V(I8x16Eq) \ V(I8x16GtS) \ - V(I8x16GtU) + V(I8x16GtU) \ + V(I8x16AddSatS) \ + V(I8x16SubSatS) \ + V(I8x16AddSatU) \ + V(I8x16SubSatU) #define PROTOTYPE_SIMD_BINOP(name) \ void name(Simd128Register dst, Simd128Register src1, Simd128Register src2); @@ -1137,6 +1145,74 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase { #undef PROTOTYPE_SIMD_BINOP #undef SIMD_BINOP_LIST +#define SIMD_BINOP_WITH_SCRATCH_LIST(V) \ + V(F64x2Ne) \ + V(F32x4Ne) \ + V(I64x2Ne) \ + V(I64x2GeS) \ + V(I32x4Ne) \ + V(I32x4GeS) \ + V(I32x4GeU) \ + V(I16x8Ne) \ + V(I16x8GeS) \ + V(I16x8GeU) \ + V(I8x16Ne) \ + V(I8x16GeS) \ + V(I8x16GeU) + +#define PROTOTYPE_SIMD_BINOP_WITH_SCRATCH(name) \ + void name(Simd128Register dst, Simd128Register src1, Simd128Register src2, \ + Simd128Register scratch); + SIMD_BINOP_WITH_SCRATCH_LIST(PROTOTYPE_SIMD_BINOP_WITH_SCRATCH) +#undef PROTOTYPE_SIMD_BINOP_WITH_SCRATCH +#undef SIMD_BINOP_WITH_SCRATCH_LIST + +#define SIMD_SHIFT_LIST(V) \ + V(I64x2Shl) \ + V(I64x2ShrS) \ + V(I64x2ShrU) \ + V(I32x4Shl) \ + V(I32x4ShrS) \ + V(I32x4ShrU) \ + V(I16x8Shl) \ + V(I16x8ShrS) \ + V(I16x8ShrU) \ + V(I8x16Shl) \ + V(I8x16ShrS) \ + V(I8x16ShrU) + +#define PROTOTYPE_SIMD_SHIFT(name) \ + void name(Simd128Register dst, Simd128Register src1, Register src2, \ + Simd128Register scratch); \ + void name(Simd128Register dst, Simd128Register src1, const Operand& src2, \ + Register scratch1, Simd128Register scratch2); + SIMD_SHIFT_LIST(PROTOTYPE_SIMD_SHIFT) +#undef PROTOTYPE_SIMD_SHIFT +#undef SIMD_SHIFT_LIST + +#define SIMD_UNOP_LIST(V) \ + V(F64x2Abs) \ + V(F64x2Neg) \ + V(F64x2Sqrt) \ + V(F64x2Ceil) \ + V(F64x2Floor) \ + V(F64x2Trunc) \ + V(F32x4Abs) \ + V(F32x4Neg) \ + V(F32x4Sqrt) \ + V(F32x4Ceil) \ + V(F32x4Floor) \ + V(F32x4Trunc) \ + V(I64x2Neg) \ + V(I32x4Neg) \ + V(I8x16Popcnt) + +#define PROTOTYPE_SIMD_UNOP(name) \ + void name(Simd128Register dst, Simd128Register src); + SIMD_UNOP_LIST(PROTOTYPE_SIMD_UNOP) +#undef PROTOTYPE_SIMD_UNOP +#undef SIMD_UNOP_LIST + void LoadSimd128(Simd128Register dst, const MemOperand& mem, Register scratch); void StoreSimd128(Simd128Register src, const MemOperand& mem, @@ -1196,31 +1272,17 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase { Simd128Register scratch1, Simd128Register scratch2); void F64x2Max(Simd128Register dst, Simd128Register src1, Simd128Register src2, Simd128Register scratch1, Simd128Register scratch2); - void F64x2Ne(Simd128Register dst, Simd128Register src1, Simd128Register src2, - Simd128Register scratch); - void F32x4Ne(Simd128Register dst, Simd128Register src1, Simd128Register src2, - Simd128Register scratch); - void I64x2Ne(Simd128Register dst, Simd128Register src1, Simd128Register src2, - Simd128Register scratch); - void I64x2GeS(Simd128Register dst, Simd128Register src1, Simd128Register src2, - Simd128Register scratch); - void I32x4Ne(Simd128Register dst, Simd128Register src1, Simd128Register src2, - Simd128Register scratch); - void I32x4GeS(Simd128Register dst, Simd128Register src1, Simd128Register src2, + void I64x2Abs(Simd128Register dst, Simd128Register src, Simd128Register scratch); - void I32x4GeU(Simd128Register dst, Simd128Register src1, Simd128Register src2, + void I32x4Abs(Simd128Register dst, Simd128Register src, Simd128Register scratch); - void I16x8Ne(Simd128Register dst, Simd128Register src1, Simd128Register src2, - Simd128Register scratch); - void I16x8GeS(Simd128Register dst, Simd128Register src1, Simd128Register src2, + void I16x8Abs(Simd128Register dst, Simd128Register src, Simd128Register scratch); - void I16x8GeU(Simd128Register dst, Simd128Register src1, Simd128Register src2, + void I16x8Neg(Simd128Register dst, Simd128Register src, Simd128Register scratch); - void I8x16Ne(Simd128Register dst, Simd128Register src1, Simd128Register src2, - Simd128Register scratch); - void I8x16GeS(Simd128Register dst, Simd128Register src1, Simd128Register src2, + void I8x16Abs(Simd128Register dst, Simd128Register src, Simd128Register scratch); - void I8x16GeU(Simd128Register dst, Simd128Register src1, Simd128Register src2, + void I8x16Neg(Simd128Register dst, Simd128Register src, Simd128Register scratch); private: @@ -1404,8 +1466,8 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler { void LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing( Register flags, Register feedback_vector, CodeKind current_code_kind, Label* flags_need_processing); - void MaybeOptimizeCodeOrTailCallOptimizedCodeSlot(Register flags, - Register feedback_vector); + void OptimizeCodeOrTailCallOptimizedCodeSlot(Register flags, + Register feedback_vector); // --------------------------------------------------------------------------- // Runtime calls diff --git a/deps/v8/src/codegen/ppc/register-ppc.h b/deps/v8/src/codegen/ppc/register-ppc.h index bbcaa525e49120..b85e7868e13dc7 100644 --- a/deps/v8/src/codegen/ppc/register-ppc.h +++ b/deps/v8/src/codegen/ppc/register-ppc.h @@ -17,7 +17,7 @@ namespace internal { V(r16) V(r17) V(r18) V(r19) V(r20) V(r21) V(r22) V(r23) \ V(r24) V(r25) V(r26) V(r27) V(r28) V(r29) V(r30) V(fp) -#if V8_EMBEDDED_CONSTANT_POOL +#if V8_EMBEDDED_CONSTANT_POOL_BOOL #define ALLOCATABLE_GENERAL_REGISTERS(V) \ V(r3) V(r4) V(r5) V(r6) V(r7) \ V(r8) V(r9) V(r10) V(r14) V(r15) \ diff --git a/deps/v8/src/codegen/reloc-info.cc b/deps/v8/src/codegen/reloc-info.cc index 12c2f9641a23d6..a2b1204ff72e56 100644 --- a/deps/v8/src/codegen/reloc-info.cc +++ b/deps/v8/src/codegen/reloc-info.cc @@ -345,7 +345,7 @@ void RelocInfo::set_target_address(Address target, WriteBarrierMode write_barrier_mode, ICacheFlushMode icache_flush_mode) { DCHECK(IsCodeTargetMode(rmode_) || IsNearBuiltinEntry(rmode_) || - IsRuntimeEntry(rmode_) || IsWasmCall(rmode_)); + IsWasmCall(rmode_)); Assembler::set_target_address_at(pc_, constant_pool_, target, icache_flush_mode); if (!host().is_null() && IsCodeTargetMode(rmode_) && @@ -369,13 +369,13 @@ bool RelocInfo::HasTargetAddressAddress() const { static constexpr int kTargetAddressAddressModeMask = ModeMask(CODE_TARGET) | ModeMask(FULL_EMBEDDED_OBJECT) | ModeMask(COMPRESSED_EMBEDDED_OBJECT) | ModeMask(EXTERNAL_REFERENCE) | - ModeMask(OFF_HEAP_TARGET) | ModeMask(RUNTIME_ENTRY) | - ModeMask(WASM_CALL) | ModeMask(WASM_STUB_CALL); + ModeMask(OFF_HEAP_TARGET) | ModeMask(WASM_CALL) | + ModeMask(WASM_STUB_CALL); #else static constexpr int kTargetAddressAddressModeMask = ModeMask(CODE_TARGET) | ModeMask(RELATIVE_CODE_TARGET) | ModeMask(FULL_EMBEDDED_OBJECT) | ModeMask(EXTERNAL_REFERENCE) | - ModeMask(OFF_HEAP_TARGET) | ModeMask(RUNTIME_ENTRY) | ModeMask(WASM_CALL); + ModeMask(OFF_HEAP_TARGET) | ModeMask(WASM_CALL); #endif return (ModeMask(rmode_) & kTargetAddressAddressModeMask) != 0; } @@ -399,14 +399,10 @@ const char* RelocInfo::RelocModeName(RelocInfo::Mode rmode) { return "compressed embedded object"; case FULL_EMBEDDED_OBJECT: return "full embedded object"; - case DATA_EMBEDDED_OBJECT: - return "data embedded object"; case CODE_TARGET: return "code target"; case RELATIVE_CODE_TARGET: return "relative code target"; - case RUNTIME_ENTRY: - return "runtime entry"; case EXTERNAL_REFERENCE: return "external reference"; case INTERNAL_REFERENCE: @@ -473,13 +469,6 @@ void RelocInfo::Print(Isolate* isolate, std::ostream& os) { os << " " << Builtins::name(code.builtin_id()); } os << ") (" << reinterpret_cast<const void*>(target_address()) << ")"; - } else if (IsRuntimeEntry(rmode_)) { - // Deoptimization bailouts are stored as runtime entries. - DeoptimizeKind type; - if (Deoptimizer::IsDeoptimizationEntry(isolate, target_address(), &type)) { - os << " (" << Deoptimizer::MessageFor(type) - << " deoptimization bailout)"; - } } else if (IsConstPool(rmode_)) { os << " (size " << static_cast<int>(data_) << ")"; } @@ -495,7 +484,6 @@ void RelocInfo::Verify(Isolate* isolate) { Object::VerifyPointer(isolate, target_object(isolate)); break; case FULL_EMBEDDED_OBJECT: - case DATA_EMBEDDED_OBJECT: Object::VerifyAnyTagged(isolate, target_object(isolate)); break; case CODE_TARGET: @@ -535,7 +523,6 @@ void RelocInfo::Verify(Isolate* isolate) { OffHeapInstructionStream::TryLookupCode(isolate, addr))); break; } - case RUNTIME_ENTRY: case EXTERNAL_REFERENCE: case DEOPT_SCRIPT_OFFSET: case DEOPT_INLINING_ID: diff --git a/deps/v8/src/codegen/reloc-info.h b/deps/v8/src/codegen/reloc-info.h index 12358d7b28d7a9..fd74413fc97113 100644 --- a/deps/v8/src/codegen/reloc-info.h +++ b/deps/v8/src/codegen/reloc-info.h @@ -59,15 +59,11 @@ class RelocInfo { // TODO(ishell): rename to NEAR_CODE_TARGET. RELATIVE_CODE_TARGET, // LAST_CODE_TARGET_MODE COMPRESSED_EMBEDDED_OBJECT, - FULL_EMBEDDED_OBJECT, - DATA_EMBEDDED_OBJECT, // LAST_GCED_ENUM + FULL_EMBEDDED_OBJECT, // LAST_GCED_ENUM WASM_CALL, // FIRST_SHAREABLE_RELOC_MODE WASM_STUB_CALL, - // TODO(ishell): This reloc info shouldn't be used anymore. Remove it. - RUNTIME_ENTRY, - EXTERNAL_REFERENCE, // The address of an external C++ function. INTERNAL_REFERENCE, // An address inside the same function. @@ -107,7 +103,7 @@ class RelocInfo { FIRST_REAL_RELOC_MODE = CODE_TARGET, LAST_REAL_RELOC_MODE = VENEER_POOL, FIRST_EMBEDDED_OBJECT_RELOC_MODE = COMPRESSED_EMBEDDED_OBJECT, - LAST_EMBEDDED_OBJECT_RELOC_MODE = DATA_EMBEDDED_OBJECT, + LAST_EMBEDDED_OBJECT_RELOC_MODE = FULL_EMBEDDED_OBJECT, LAST_GCED_ENUM = LAST_EMBEDDED_OBJECT_RELOC_MODE, FIRST_BUILTIN_ENTRY_MODE = OFF_HEAP_TARGET, LAST_BUILTIN_ENTRY_MODE = NEAR_BUILTIN_ENTRY, @@ -153,16 +149,10 @@ class RelocInfo { static constexpr bool IsCompressedEmbeddedObject(Mode mode) { return COMPRESS_POINTERS_BOOL && mode == COMPRESSED_EMBEDDED_OBJECT; } - static constexpr bool IsDataEmbeddedObject(Mode mode) { - return mode == DATA_EMBEDDED_OBJECT; - } static constexpr bool IsEmbeddedObjectMode(Mode mode) { return base::IsInRange(mode, FIRST_EMBEDDED_OBJECT_RELOC_MODE, LAST_EMBEDDED_OBJECT_RELOC_MODE); } - static constexpr bool IsRuntimeEntry(Mode mode) { - return mode == RUNTIME_ENTRY; - } static constexpr bool IsWasmCall(Mode mode) { return mode == WASM_CALL; } static constexpr bool IsWasmReference(Mode mode) { return mode == WASM_CALL; } static constexpr bool IsWasmStubCall(Mode mode) { @@ -265,7 +255,7 @@ class RelocInfo { ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED); // this relocation applies to; - // can only be called if IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_) + // can only be called if IsCodeTarget(rmode_) V8_INLINE Address target_address(); // Cage base value is used for decompressing compressed embedded references. V8_INLINE HeapObject target_object(PtrComprCageBase cage_base); @@ -279,11 +269,6 @@ class RelocInfo { // Decodes builtin ID encoded as a PC-relative offset. This encoding is used // during code generation of call/jump with NEAR_BUILTIN_ENTRY. V8_INLINE Builtin target_builtin_at(Assembler* origin); - V8_INLINE Address target_runtime_entry(Assembler* origin); - V8_INLINE void set_target_runtime_entry( - Address target, - WriteBarrierMode write_barrier_mode = UPDATE_WRITE_BARRIER, - ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED); V8_INLINE Address target_off_heap_target(); V8_INLINE void set_target_external_reference( Address, ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED); @@ -339,8 +324,6 @@ class RelocInfo { visitor->VisitExternalReference(host(), this); } else if (IsInternalReference(mode) || IsInternalReferenceEncoded(mode)) { visitor->VisitInternalReference(host(), this); - } else if (IsRuntimeEntry(mode)) { - visitor->VisitRuntimeEntry(host(), this); } else if (IsBuiltinEntryMode(mode)) { visitor->VisitOffHeapTarget(host(), this); } @@ -371,8 +354,7 @@ class RelocInfo { static int EmbeddedObjectModeMask() { return ModeMask(RelocInfo::FULL_EMBEDDED_OBJECT) | - ModeMask(RelocInfo::COMPRESSED_EMBEDDED_OBJECT) | - ModeMask(RelocInfo::DATA_EMBEDDED_OBJECT); + ModeMask(RelocInfo::COMPRESSED_EMBEDDED_OBJECT); } // In addition to modes covered by the apply mask (which is applied at GC @@ -382,9 +364,7 @@ class RelocInfo { return ModeMask(RelocInfo::CODE_TARGET) | ModeMask(RelocInfo::COMPRESSED_EMBEDDED_OBJECT) | ModeMask(RelocInfo::FULL_EMBEDDED_OBJECT) | - ModeMask(RelocInfo::DATA_EMBEDDED_OBJECT) | ModeMask(RelocInfo::NEAR_BUILTIN_ENTRY) | - ModeMask(RelocInfo::RUNTIME_ENTRY) | ModeMask(RelocInfo::RELATIVE_CODE_TARGET) | kApplyMask; } diff --git a/deps/v8/src/codegen/riscv/assembler-riscv-inl.h b/deps/v8/src/codegen/riscv/assembler-riscv-inl.h index ba5e345906968a..b8d3ededcd1d86 100644 --- a/deps/v8/src/codegen/riscv/assembler-riscv-inl.h +++ b/deps/v8/src/codegen/riscv/assembler-riscv-inl.h @@ -65,8 +65,7 @@ void RelocInfo::apply(intptr_t delta) { } Address RelocInfo::target_address() { - DCHECK(IsCodeTargetMode(rmode_) || IsRuntimeEntry(rmode_) || - IsWasmCall(rmode_)); + DCHECK(IsCodeTargetMode(rmode_) || IsWasmCall(rmode_)); return Assembler::target_address_at(pc_, constant_pool_); } @@ -161,10 +160,8 @@ void Assembler::deserialization_set_target_internal_reference_at( HeapObject RelocInfo::target_object(PtrComprCageBase cage_base) { DCHECK(IsCodeTarget(rmode_) || IsEmbeddedObjectMode(rmode_)); - if (IsDataEmbeddedObject(rmode_)) { - return HeapObject::cast(Object(ReadUnalignedValue<Address>(pc_))); - } else if (IsCompressedEmbeddedObject(rmode_)) { - return HeapObject::cast(Object(DecompressTaggedAny( + if (IsCompressedEmbeddedObject(rmode_)) { + return HeapObject::cast(Object(V8HeapCompressionScheme::DecompressTaggedAny( cage_base, Assembler::target_compressed_address_at(pc_, constant_pool_)))); } else { @@ -174,9 +171,7 @@ HeapObject RelocInfo::target_object(PtrComprCageBase cage_base) { } Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) { - if (IsDataEmbeddedObject(rmode_)) { - return Handle<HeapObject>::cast(ReadUnalignedValue<Handle<Object>>(pc_)); - } else if (IsCodeTarget(rmode_)) { + if (IsCodeTarget(rmode_)) { return Handle<HeapObject>::cast( origin->code_target_object_handle_at(pc_, constant_pool_)); } else if (IsCompressedEmbeddedObject(rmode_)) { @@ -194,12 +189,11 @@ void RelocInfo::set_target_object(Heap* heap, HeapObject target, WriteBarrierMode write_barrier_mode, ICacheFlushMode icache_flush_mode) { DCHECK(IsCodeTarget(rmode_) || IsEmbeddedObjectMode(rmode_)); - if (IsDataEmbeddedObject(rmode_)) { - WriteUnalignedValue(pc_, target.ptr()); - // No need to flush icache since no instructions were changed. - } else if (IsCompressedEmbeddedObject(rmode_)) { + if (IsCompressedEmbeddedObject(rmode_)) { Assembler::set_target_compressed_address_at( - pc_, constant_pool_, CompressTagged(target.ptr()), icache_flush_mode); + pc_, constant_pool_, + V8HeapCompressionScheme::CompressTagged(target.ptr()), + icache_flush_mode); } else { DCHECK(IsFullEmbeddedObject(rmode_)); Assembler::set_target_address_at(pc_, constant_pool_, target.ptr(), @@ -252,19 +246,6 @@ Handle<Code> Assembler::relative_code_target_object_handle_at( Builtin RelocInfo::target_builtin_at(Assembler* origin) { UNREACHABLE(); } -Address RelocInfo::target_runtime_entry(Assembler* origin) { - DCHECK(IsRuntimeEntry(rmode_)); - return target_address(); -} - -void RelocInfo::set_target_runtime_entry(Address target, - WriteBarrierMode write_barrier_mode, - ICacheFlushMode icache_flush_mode) { - DCHECK(IsRuntimeEntry(rmode_)); - if (target_address() != target) - set_target_address(target, write_barrier_mode, icache_flush_mode); -} - Address RelocInfo::target_off_heap_target() { DCHECK(IsOffHeapTarget(rmode_)); return Assembler::target_address_at(pc_, constant_pool_); @@ -272,9 +253,8 @@ Address RelocInfo::target_off_heap_target() { void RelocInfo::WipeOut() { DCHECK(IsFullEmbeddedObject(rmode_) || IsCodeTarget(rmode_) || - IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) || - IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_) || - IsOffHeapTarget(rmode_)); + IsExternalReference(rmode_) || IsInternalReference(rmode_) || + IsInternalReferenceEncoded(rmode_) || IsOffHeapTarget(rmode_)); if (IsInternalReference(rmode_)) { Memory<Address>(pc_) = kNullAddress; } else if (IsInternalReferenceEncoded(rmode_)) { diff --git a/deps/v8/src/codegen/riscv/assembler-riscv.cc b/deps/v8/src/codegen/riscv/assembler-riscv.cc index 1b56afe7b97fe7..4e2ffdaa6cfa15 100644 --- a/deps/v8/src/codegen/riscv/assembler-riscv.cc +++ b/deps/v8/src/codegen/riscv/assembler-riscv.cc @@ -132,8 +132,10 @@ Register ToRegister(int num) { const int RelocInfo::kApplyMask = RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE) | + RelocInfo::ModeMask(RelocInfo::NEAR_BUILTIN_ENTRY) | RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE_ENCODED) | - RelocInfo::ModeMask(RelocInfo::RELATIVE_CODE_TARGET); + RelocInfo::ModeMask(RelocInfo::RELATIVE_CODE_TARGET) | + RelocInfo::ModeMask(RelocInfo::CODE_TARGET); bool RelocInfo::IsCodedSpecially() { // The deserializer needs to know whether a pointer is specially coded. Being @@ -794,6 +796,7 @@ int32_t Assembler::branch_long_offset(Label* L) { else DCHECK_EQ(offset & 3, 0); DCHECK(is_int32(offset)); + VU.clear(); return static_cast<int32_t>(offset); } @@ -826,6 +829,7 @@ int32_t Assembler::branch_offset_helper(Label* L, OffsetSize bits) { DCHECK(is_intn(offset, bits)); DCHECK_EQ(offset & 1, 0); DEBUG_PRINTF("\toffset = %d\n", offset); + VU.clear(); return offset; } @@ -1414,8 +1418,7 @@ void Assembler::db(uint8_t data) { void Assembler::dd(uint32_t data, RelocInfo::Mode rmode) { if (!RelocInfo::IsNoInfo(rmode)) { - DCHECK(RelocInfo::IsDataEmbeddedObject(rmode) || - RelocInfo::IsLiteralConstant(rmode)); + DCHECK(RelocInfo::IsLiteralConstant(rmode)); RecordRelocInfo(rmode); } if (!is_buffer_growth_blocked()) CheckBuffer(); @@ -1425,8 +1428,7 @@ void Assembler::dd(uint32_t data, RelocInfo::Mode rmode) { void Assembler::dq(uint64_t data, RelocInfo::Mode rmode) { if (!RelocInfo::IsNoInfo(rmode)) { - DCHECK(RelocInfo::IsDataEmbeddedObject(rmode) || - RelocInfo::IsLiteralConstant(rmode)); + DCHECK(RelocInfo::IsLiteralConstant(rmode)); RecordRelocInfo(rmode); } if (!is_buffer_growth_blocked()) CheckBuffer(); diff --git a/deps/v8/src/codegen/riscv/assembler-riscv.h b/deps/v8/src/codegen/riscv/assembler-riscv.h index f383cbf92128bd..c08f82bc7797a6 100644 --- a/deps/v8/src/codegen/riscv/assembler-riscv.h +++ b/deps/v8/src/codegen/riscv/assembler-riscv.h @@ -580,9 +580,14 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase, } } + void clear() { + sew_ = kVsInvalid; + lmul_ = kVlInvalid; + } + private: - VSew sew_ = E8; - Vlmul lmul_ = m1; + VSew sew_ = kVsInvalid; + Vlmul lmul_ = kVlInvalid; int32_t vl = 0; Assembler* assm_; FPURoundingMode mode_ = RNE; diff --git a/deps/v8/src/codegen/riscv/base-constants-riscv.h b/deps/v8/src/codegen/riscv/base-constants-riscv.h index 077ae9110c3a5e..bc38bfabc9f323 100644 --- a/deps/v8/src/codegen/riscv/base-constants-riscv.h +++ b/deps/v8/src/codegen/riscv/base-constants-riscv.h @@ -73,6 +73,7 @@ enum Vlmul { #define DEFINE_FLAG(name) name, RVV_LMUL(DEFINE_FLAG) #undef DEFINE_FLAG + kVlInvalid }; #define RVV_SEW(V) \ @@ -85,6 +86,7 @@ enum Vlmul { enum VSew { RVV_SEW(DEFINE_FLAG) #undef DEFINE_FLAG + kVsInvalid }; constexpr size_t kMaxPCRelativeCodeRangeInMB = 4094; diff --git a/deps/v8/src/codegen/riscv/macro-assembler-riscv.cc b/deps/v8/src/codegen/riscv/macro-assembler-riscv.cc index 4f4b443c5149ce..11de91c06a7815 100644 --- a/deps/v8/src/codegen/riscv/macro-assembler-riscv.cc +++ b/deps/v8/src/codegen/riscv/macro-assembler-riscv.cc @@ -207,7 +207,7 @@ void MacroAssembler::LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing( Branch(flags_need_processing, ne, scratch, Operand(zero_reg)); } -void MacroAssembler::MaybeOptimizeCodeOrTailCallOptimizedCodeSlot( +void MacroAssembler::OptimizeCodeOrTailCallOptimizedCodeSlot( Register flags, Register feedback_vector) { ASM_CODE_COMMENT(this); DCHECK(!AreAliased(flags, feedback_vector)); @@ -699,6 +699,18 @@ void TurboAssembler::Mulh64(Register rd, Register rs, const Operand& rt) { } } +void TurboAssembler::Mulhu64(Register rd, Register rs, const Operand& rt) { + if (rt.is_reg()) { + mulhu(rd, rs, rt.rm()); + } else { + // li handles the relocation. + UseScratchRegisterScope temps(this); + Register scratch = temps.Acquire(); + Li(scratch, rt.immediate()); + mulhu(rd, rs, scratch); + } +} + void TurboAssembler::Div32(Register res, Register rs, const Operand& rt) { if (rt.is_reg()) { divw(res, rs, rt.rm()); @@ -2420,6 +2432,7 @@ void TurboAssembler::ShlPair(Register dst_low, Register dst_high, DCHECK_GE(63, shift); DCHECK_NE(dst_low, src_low); DCHECK_NE(dst_high, src_low); + shift &= 0x3F; if (shift == 0) { Move(dst_high, src_high); Move(dst_low, src_low); @@ -2489,7 +2502,7 @@ void TurboAssembler::ShrPair(Register dst_low, Register dst_high, DCHECK_GE(63, shift); DCHECK_NE(dst_low, src_high); DCHECK_NE(dst_high, src_high); - + shift &= 0x3F; if (shift == 32) { mv(dst_low, src_high); li(dst_high, Operand(0)); @@ -4392,7 +4405,7 @@ void TurboAssembler::CallBuiltin(Builtin builtin) { break; } case BuiltinCallJumpMode::kPCRelative: - Call(BuiltinEntry(builtin), RelocInfo::RUNTIME_ENTRY); + Call(BuiltinEntry(builtin), RelocInfo::NEAR_BUILTIN_ENTRY); break; case BuiltinCallJumpMode::kIndirect: { LoadEntryFromBuiltin(builtin, t6); @@ -4426,7 +4439,7 @@ void TurboAssembler::TailCallBuiltin(Builtin builtin) { break; } case BuiltinCallJumpMode::kPCRelative: - Jump(BuiltinEntry(builtin), RelocInfo::RUNTIME_ENTRY); + Jump(BuiltinEntry(builtin), RelocInfo::NEAR_BUILTIN_ENTRY); break; case BuiltinCallJumpMode::kIndirect: { LoadEntryFromBuiltin(builtin, t6); @@ -5199,6 +5212,37 @@ void TurboAssembler::MulOverflow32(Register dst, Register left, sext_w(dst, overflow); xor_(overflow, overflow, dst); } + +void TurboAssembler::MulOverflow64(Register dst, Register left, + const Operand& right, Register overflow) { + ASM_CODE_COMMENT(this); + UseScratchRegisterScope temps(this); + BlockTrampolinePoolScope block_trampoline_pool(this); + Register right_reg = no_reg; + Register scratch = temps.Acquire(); + Register scratch2 = temps.Acquire(); + if (!right.is_reg()) { + li(scratch, Operand(right)); + right_reg = scratch; + } else { + right_reg = right.rm(); + } + + DCHECK(left != scratch2 && right_reg != scratch2 && dst != scratch2 && + overflow != scratch2); + DCHECK(overflow != left && overflow != right_reg); + // use this sequence of "mulh/mul" according to recommendation of ISA Spec 7.1 + // upper part + mulh(scratch2, left, right_reg); + // lower part + mul(dst, left, right_reg); + // expand the sign of the lower part to 64bit + srai(overflow, dst, 63); + // if the upper part is not eqaul to the expanded sign bit of the lower part, + // overflow happens + xor_(overflow, overflow, scratch2); +} + #elif V8_TARGET_ARCH_RISCV32 void TurboAssembler::AddOverflow(Register dst, Register left, const Operand& right, Register overflow) { diff --git a/deps/v8/src/codegen/riscv/macro-assembler-riscv.h b/deps/v8/src/codegen/riscv/macro-assembler-riscv.h index c245e67606a9f4..eb4b71cb435291 100644 --- a/deps/v8/src/codegen/riscv/macro-assembler-riscv.h +++ b/deps/v8/src/codegen/riscv/macro-assembler-riscv.h @@ -447,6 +447,7 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase { DEFINE_INSTRUCTION(Mulh32) DEFINE_INSTRUCTION(Mul64) DEFINE_INSTRUCTION(Mulh64) + DEFINE_INSTRUCTION(Mulhu64) DEFINE_INSTRUCTION2(Div32) DEFINE_INSTRUCTION2(Div64) DEFINE_INSTRUCTION2(Divu32) @@ -871,7 +872,9 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase { // MulOverflow32 sets overflow register to zero if no overflow occured void MulOverflow32(Register dst, Register left, const Operand& right, Register overflow); - + // MulOverflow64 sets overflow register to zero if no overflow occured + void MulOverflow64(Register dst, Register left, const Operand& right, + Register overflow); // Number of instructions needed for calculation of switch table entry address static const int kSwitchTablePrologueSize = 6; @@ -1344,8 +1347,8 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler { void LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing( Register flags, Register feedback_vector, CodeKind current_code_kind, Label* flags_need_processing); - void MaybeOptimizeCodeOrTailCallOptimizedCodeSlot(Register flags, - Register feedback_vector); + void OptimizeCodeOrTailCallOptimizedCodeSlot(Register flags, + Register feedback_vector); // ------------------------------------------------------------------------- // Support functions. diff --git a/deps/v8/src/codegen/s390/assembler-s390-inl.h b/deps/v8/src/codegen/s390/assembler-s390-inl.h index 3a63c5abbc7e95..91f18727cd0edb 100644 --- a/deps/v8/src/codegen/s390/assembler-s390-inl.h +++ b/deps/v8/src/codegen/s390/assembler-s390-inl.h @@ -91,7 +91,7 @@ Address RelocInfo::target_internal_reference_address() { Address RelocInfo::target_address() { DCHECK(IsRelativeCodeTarget(rmode_) || IsCodeTarget(rmode_) || - IsRuntimeEntry(rmode_) || IsWasmCall(rmode_)); + IsWasmCall(rmode_)); return Assembler::target_address_at(pc_, constant_pool_); } @@ -141,10 +141,8 @@ Handle<Object> Assembler::code_target_object_handle_at(Address pc) { HeapObject RelocInfo::target_object(PtrComprCageBase cage_base) { DCHECK(IsCodeTarget(rmode_) || IsEmbeddedObjectMode(rmode_)); - if (IsDataEmbeddedObject(rmode_)) { - return HeapObject::cast(Object(ReadUnalignedValue<Address>(pc_))); - } else if (IsCompressedEmbeddedObject(rmode_)) { - return HeapObject::cast(Object(DecompressTaggedAny( + if (IsCompressedEmbeddedObject(rmode_)) { + return HeapObject::cast(Object(V8HeapCompressionScheme::DecompressTaggedAny( cage_base, Assembler::target_compressed_address_at(pc_, constant_pool_)))); } else { @@ -161,9 +159,7 @@ Handle<HeapObject> Assembler::compressed_embedded_object_handle_at( Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) { DCHECK(IsRelativeCodeTarget(rmode_) || IsCodeTarget(rmode_) || IsEmbeddedObjectMode(rmode_)); - if (IsDataEmbeddedObject(rmode_)) { - return Handle<HeapObject>::cast(ReadUnalignedValue<Handle<Object>>(pc_)); - } else if (IsCodeTarget(rmode_) || IsRelativeCodeTarget(rmode_)) { + if (IsCodeTarget(rmode_) || IsRelativeCodeTarget(rmode_)) { return Handle<HeapObject>::cast(origin->code_target_object_handle_at(pc_)); } else { if (IsCompressedEmbeddedObject(rmode_)) { @@ -178,12 +174,11 @@ void RelocInfo::set_target_object(Heap* heap, HeapObject target, WriteBarrierMode write_barrier_mode, ICacheFlushMode icache_flush_mode) { DCHECK(IsCodeTarget(rmode_) || IsEmbeddedObjectMode(rmode_)); - if (IsDataEmbeddedObject(rmode_)) { - WriteUnalignedValue(pc_, target.ptr()); - // No need to flush icache since no instructions were changed. - } else if (IsCompressedEmbeddedObject(rmode_)) { + if (IsCompressedEmbeddedObject(rmode_)) { Assembler::set_target_compressed_address_at( - pc_, constant_pool_, CompressTagged(target.ptr()), icache_flush_mode); + pc_, constant_pool_, + V8HeapCompressionScheme::CompressTagged(target.ptr()), + icache_flush_mode); } else { DCHECK(IsFullEmbeddedObject(rmode_)); Assembler::set_target_address_at(pc_, constant_pool_, target.ptr(), @@ -208,29 +203,15 @@ void RelocInfo::set_target_external_reference( Builtin RelocInfo::target_builtin_at(Assembler* origin) { UNREACHABLE(); } -Address RelocInfo::target_runtime_entry(Assembler* origin) { - DCHECK(IsRuntimeEntry(rmode_)); - return target_address(); -} - Address RelocInfo::target_off_heap_target() { DCHECK(IsOffHeapTarget(rmode_)); return Assembler::target_address_at(pc_, constant_pool_); } -void RelocInfo::set_target_runtime_entry(Address target, - WriteBarrierMode write_barrier_mode, - ICacheFlushMode icache_flush_mode) { - DCHECK(IsRuntimeEntry(rmode_)); - if (target_address() != target) - set_target_address(target, write_barrier_mode, icache_flush_mode); -} - void RelocInfo::WipeOut() { DCHECK(IsEmbeddedObjectMode(rmode_) || IsCodeTarget(rmode_) || - IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) || - IsInternalReference(rmode_) || IsInternalReferenceEncoded(rmode_) || - IsOffHeapTarget(rmode_)); + IsExternalReference(rmode_) || IsInternalReference(rmode_) || + IsInternalReferenceEncoded(rmode_) || IsOffHeapTarget(rmode_)); if (IsInternalReference(rmode_)) { // Jump table entry Memory<Address>(pc_) = kNullAddress; diff --git a/deps/v8/src/codegen/s390/assembler-s390.cc b/deps/v8/src/codegen/s390/assembler-s390.cc index 3f5811c0d46454..0b0bf2bc4363bf 100644 --- a/deps/v8/src/codegen/s390/assembler-s390.cc +++ b/deps/v8/src/codegen/s390/assembler-s390.cc @@ -782,8 +782,7 @@ void Assembler::db(uint8_t data) { void Assembler::dd(uint32_t data, RelocInfo::Mode rmode) { CheckBuffer(); if (!RelocInfo::IsNoInfo(rmode)) { - DCHECK(RelocInfo::IsDataEmbeddedObject(rmode) || - RelocInfo::IsLiteralConstant(rmode)); + DCHECK(RelocInfo::IsLiteralConstant(rmode)); RecordRelocInfo(rmode); } *reinterpret_cast<uint32_t*>(pc_) = data; @@ -793,8 +792,7 @@ void Assembler::dd(uint32_t data, RelocInfo::Mode rmode) { void Assembler::dq(uint64_t value, RelocInfo::Mode rmode) { CheckBuffer(); if (!RelocInfo::IsNoInfo(rmode)) { - DCHECK(RelocInfo::IsDataEmbeddedObject(rmode) || - RelocInfo::IsLiteralConstant(rmode)); + DCHECK(RelocInfo::IsLiteralConstant(rmode)); RecordRelocInfo(rmode); } *reinterpret_cast<uint64_t*>(pc_) = value; @@ -804,8 +802,7 @@ void Assembler::dq(uint64_t value, RelocInfo::Mode rmode) { void Assembler::dp(uintptr_t data, RelocInfo::Mode rmode) { CheckBuffer(); if (!RelocInfo::IsNoInfo(rmode)) { - DCHECK(RelocInfo::IsDataEmbeddedObject(rmode) || - RelocInfo::IsLiteralConstant(rmode)); + DCHECK(RelocInfo::IsLiteralConstant(rmode)); RecordRelocInfo(rmode); } *reinterpret_cast<uintptr_t*>(pc_) = data; diff --git a/deps/v8/src/codegen/s390/constants-s390.h b/deps/v8/src/codegen/s390/constants-s390.h index 0e7310d2a724d1..edf4eba214f813 100644 --- a/deps/v8/src/codegen/s390/constants-s390.h +++ b/deps/v8/src/codegen/s390/constants-s390.h @@ -270,6 +270,7 @@ using SixByteInstr = uint64_t; V(xgrk, XGRK, 0xB9E7) /* type = RRF_A EXCLUSIVE OR (64) */ \ V(agrk, AGRK, 0xB9E8) /* type = RRF_A ADD (64) */ \ V(sgrk, SGRK, 0xB9E9) /* type = RRF_A SUBTRACT (64) */ \ + V(mgrk, MGRK, 0xB9EC) /* type = RRF_A MULTIPLY (64->128) */ \ V(algrk, ALGRK, 0xB9EA) /* type = RRF_A ADD LOGICAL (64) */ \ V(slgrk, SLGRK, 0xB9EB) /* type = RRF_A SUBTRACT LOGICAL (64) */ \ V(nrk, NRK, 0xB9F4) /* type = RRF_A AND (32) */ \ @@ -874,6 +875,7 @@ using SixByteInstr = uint64_t; V(ay, AY, 0xE35A) /* type = RXY_A ADD (32) */ \ V(sy, SY, 0xE35B) /* type = RXY_A SUBTRACT (32) */ \ V(mfy, MFY, 0xE35C) /* type = RXY_A MULTIPLY (64<-32) */ \ + V(mg, MG, 0xE384) /* type = RXY_A MULTIPLY (128<-64) */ \ V(aly, ALY, 0xE35E) /* type = RXY_A ADD LOGICAL (32) */ \ V(sly, SLY, 0xE35F) /* type = RXY_A SUBTRACT LOGICAL (32) */ \ V(sthy, STHY, 0xE370) /* type = RXY_A STORE HALFWORD (16) */ \ diff --git a/deps/v8/src/codegen/s390/macro-assembler-s390.cc b/deps/v8/src/codegen/s390/macro-assembler-s390.cc index 95d124645554d7..48758093e2d4ec 100644 --- a/deps/v8/src/codegen/s390/macro-assembler-s390.cc +++ b/deps/v8/src/codegen/s390/macro-assembler-s390.cc @@ -1165,12 +1165,12 @@ void TurboAssembler::PushStandardFrame(Register function_reg) { } void TurboAssembler::RestoreFrameStateForTailCall() { - // if (v8_flags.enable_embedded_constant_pool) { + // if (V8_EMBEDDED_CONSTANT_POOL_BOOL) { // LoadU64(kConstantPoolRegister, // MemOperand(fp, StandardFrameConstants::kConstantPoolOffset)); // set_constant_pool_available(false); // } - DCHECK(!v8_flags.enable_embedded_constant_pool); + DCHECK(!V8_EMBEDDED_CONSTANT_POOL_BOOL); LoadU64(r14, MemOperand(fp, StandardFrameConstants::kCallerPCOffset)); LoadU64(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); } @@ -2146,7 +2146,7 @@ void MacroAssembler::LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing( b(Condition(7), flags_need_processing); } -void MacroAssembler::MaybeOptimizeCodeOrTailCallOptimizedCodeSlot( +void MacroAssembler::OptimizeCodeOrTailCallOptimizedCodeSlot( Register flags, Register feedback_vector) { DCHECK(!AreAliased(flags, feedback_vector)); Label maybe_has_optimized_code, maybe_needs_logging; @@ -2939,6 +2939,29 @@ void TurboAssembler::MulS64(Register dst, const MemOperand& opnd) { msg(dst, opnd); } +void TurboAssembler::MulHighS64(Register dst, Register src1, Register src2) { + mgrk(r0, src1, src2); + lgr(dst, r0); +} + +void TurboAssembler::MulHighS64(Register dst, Register src1, + const MemOperand& src2) { + // TODO(v8): implement this. + UNIMPLEMENTED(); +} + +void TurboAssembler::MulHighU64(Register dst, Register src1, Register src2) { + lgr(r1, src1); + mlgr(r0, src2); + lgr(dst, r0); +} + +void TurboAssembler::MulHighU64(Register dst, Register src1, + const MemOperand& src2) { + // TODO(v8): implement this. + UNIMPLEMENTED(); +} + void TurboAssembler::Sqrt(DoubleRegister result, DoubleRegister input) { sqdbr(result, input); } diff --git a/deps/v8/src/codegen/s390/macro-assembler-s390.h b/deps/v8/src/codegen/s390/macro-assembler-s390.h index 7d9a6ca09028a4..6a91f1c096b1d4 100644 --- a/deps/v8/src/codegen/s390/macro-assembler-s390.h +++ b/deps/v8/src/codegen/s390/macro-assembler-s390.h @@ -322,6 +322,10 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase { } } } + void MulHighS64(Register dst, Register src1, Register src2); + void MulHighS64(Register dst, Register src1, const MemOperand& src2); + void MulHighU64(Register dst, Register src1, Register src2); + void MulHighU64(Register dst, Register src1, const MemOperand& src2); void MulHighS32(Register dst, Register src1, const MemOperand& src2); void MulHighS32(Register dst, Register src1, Register src2); @@ -1764,8 +1768,8 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler { void LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing( Register flags, Register feedback_vector, CodeKind current_code_kind, Label* flags_need_processing); - void MaybeOptimizeCodeOrTailCallOptimizedCodeSlot(Register flags, - Register feedback_vector); + void OptimizeCodeOrTailCallOptimizedCodeSlot(Register flags, + Register feedback_vector); // --------------------------------------------------------------------------- // GC Support diff --git a/deps/v8/src/codegen/x64/assembler-x64-inl.h b/deps/v8/src/codegen/x64/assembler-x64-inl.h index a00c63ee00c6b0..8bddf1df9aa260 100644 --- a/deps/v8/src/codegen/x64/assembler-x64-inl.h +++ b/deps/v8/src/codegen/x64/assembler-x64-inl.h @@ -35,14 +35,6 @@ void Assembler::emitw(uint16_t x) { pc_ += sizeof(uint16_t); } -void Assembler::emit_runtime_entry(Address entry, RelocInfo::Mode rmode) { - DCHECK(RelocInfo::IsRuntimeEntry(rmode)); - DCHECK_NE(options().code_range_base, 0); - RecordRelocInfo(rmode); - uint32_t offset = static_cast<uint32_t>(entry - options().code_range_base); - emitl(offset); -} - void Assembler::emit(Immediate x) { if (!RelocInfo::IsNoInfo(x.rmode_)) { RecordRelocInfo(x.rmode_); @@ -277,17 +269,12 @@ Builtin Assembler::target_builtin_at(Address pc) { return static_cast<Builtin>(builtin_id); } -Address Assembler::runtime_entry_at(Address pc) { - return ReadUnalignedValue<int32_t>(pc) + options().code_range_base; -} - // ----------------------------------------------------------------------------- // Implementation of RelocInfo // The modes possibly affected by apply must be in kApplyMask. void RelocInfo::apply(intptr_t delta) { - if (IsCodeTarget(rmode_) || IsNearBuiltinEntry(rmode_) || - IsRuntimeEntry(rmode_)) { + if (IsCodeTarget(rmode_) || IsNearBuiltinEntry(rmode_)) { WriteUnalignedValue( pc_, ReadUnalignedValue<int32_t>(pc_) - static_cast<int32_t>(delta)); } else if (IsInternalReference(rmode_)) { @@ -298,15 +285,14 @@ void RelocInfo::apply(intptr_t delta) { Address RelocInfo::target_address() { DCHECK(IsCodeTarget(rmode_) || IsNearBuiltinEntry(rmode_) || - IsRuntimeEntry(rmode_) || IsWasmCall(rmode_)); + IsWasmCall(rmode_)); return Assembler::target_address_at(pc_, constant_pool_); } Address RelocInfo::target_address_address() { - DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_) || IsWasmCall(rmode_) || - IsWasmStubCall(rmode_) || IsFullEmbeddedObject(rmode_) || - IsCompressedEmbeddedObject(rmode_) || IsExternalReference(rmode_) || - IsOffHeapTarget(rmode_)); + DCHECK(IsCodeTarget(rmode_) || IsWasmCall(rmode_) || IsWasmStubCall(rmode_) || + IsFullEmbeddedObject(rmode_) || IsCompressedEmbeddedObject(rmode_) || + IsExternalReference(rmode_) || IsOffHeapTarget(rmode_)); return pc_; } @@ -326,14 +312,15 @@ HeapObject RelocInfo::target_object(PtrComprCageBase cage_base) { if (IsCompressedEmbeddedObject(rmode_)) { Tagged_t compressed = ReadUnalignedValue<Tagged_t>(pc_); DCHECK(!HAS_SMI_TAG(compressed)); - Object obj(DecompressTaggedPointer(cage_base, compressed)); + Object obj(V8HeapCompressionScheme::DecompressTaggedPointer(cage_base, + compressed)); // Embedding of compressed Code objects must not happen when external code // space is enabled, because CodeDataContainers must be used instead. DCHECK_IMPLIES(V8_EXTERNAL_CODE_SPACE_BOOL, !IsCodeSpaceObject(HeapObject::cast(obj))); return HeapObject::cast(obj); } - DCHECK(IsFullEmbeddedObject(rmode_) || IsDataEmbeddedObject(rmode_)); + DCHECK(IsFullEmbeddedObject(rmode_)); return HeapObject::cast(Object(ReadUnalignedValue<Address>(pc_))); } @@ -345,7 +332,7 @@ Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) { if (IsCompressedEmbeddedObject(rmode_)) { return origin->compressed_embedded_object_handle_at(pc_); } - DCHECK(IsFullEmbeddedObject(rmode_) || IsDataEmbeddedObject(rmode_)); + DCHECK(IsFullEmbeddedObject(rmode_)); return Handle<HeapObject>::cast(ReadUnalignedValue<Handle<Object>>(pc_)); } } @@ -380,10 +367,10 @@ void RelocInfo::set_target_object(Heap* heap, HeapObject target, DCHECK(IsCodeTarget(rmode_) || IsEmbeddedObjectMode(rmode_)); if (IsCompressedEmbeddedObject(rmode_)) { DCHECK(COMPRESS_POINTERS_BOOL); - Tagged_t tagged = CompressTagged(target.ptr()); + Tagged_t tagged = V8HeapCompressionScheme::CompressTagged(target.ptr()); WriteUnalignedValue(pc_, tagged); } else { - DCHECK(IsFullEmbeddedObject(rmode_) || IsDataEmbeddedObject(rmode_)); + DCHECK(IsFullEmbeddedObject(rmode_)); WriteUnalignedValue(pc_, target.ptr()); } if (icache_flush_mode != SKIP_ICACHE_FLUSH) { @@ -399,20 +386,6 @@ Builtin RelocInfo::target_builtin_at(Assembler* origin) { return Assembler::target_builtin_at(pc_); } -Address RelocInfo::target_runtime_entry(Assembler* origin) { - DCHECK(IsRuntimeEntry(rmode_)); - return target_address(); -} - -void RelocInfo::set_target_runtime_entry(Address target, - WriteBarrierMode write_barrier_mode, - ICacheFlushMode icache_flush_mode) { - DCHECK(IsRuntimeEntry(rmode_)); - if (target_address() != target) { - set_target_address(target, write_barrier_mode, icache_flush_mode); - } -} - Address RelocInfo::target_off_heap_target() { DCHECK(IsOffHeapTarget(rmode_)); return ReadUnalignedValue<Address>(pc_); @@ -424,9 +397,9 @@ void RelocInfo::WipeOut() { WriteUnalignedValue(pc_, kNullAddress); } else if (IsCompressedEmbeddedObject(rmode_)) { Address smi_address = Smi::FromInt(0).ptr(); - WriteUnalignedValue(pc_, CompressTagged(smi_address)); - } else if (IsCodeTarget(rmode_) || IsNearBuiltinEntry(rmode_) || - IsRuntimeEntry(rmode_)) { + WriteUnalignedValue(pc_, + V8HeapCompressionScheme::CompressTagged(smi_address)); + } else if (IsCodeTarget(rmode_) || IsNearBuiltinEntry(rmode_)) { // Effectively write zero into the relocation. Assembler::set_target_address_at(pc_, constant_pool_, pc_ + sizeof(int32_t)); diff --git a/deps/v8/src/codegen/x64/assembler-x64.cc b/deps/v8/src/codegen/x64/assembler-x64.cc index 2bbf6fda821187..c9ece3338fca5d 100644 --- a/deps/v8/src/codegen/x64/assembler-x64.cc +++ b/deps/v8/src/codegen/x64/assembler-x64.cc @@ -989,14 +989,6 @@ void Assembler::call(Label* L) { } } -void Assembler::call(Address entry, RelocInfo::Mode rmode) { - DCHECK(RelocInfo::IsRuntimeEntry(rmode)); - EnsureSpace ensure_space(this); - // 1110 1000 #32-bit disp. - emit(0xE8); - emit_runtime_entry(entry, rmode); -} - void Assembler::call(Handle<CodeT> target, RelocInfo::Mode rmode) { DCHECK(RelocInfo::IsCodeTarget(rmode)); DCHECK(FromCodeT(*target).IsExecutable()); @@ -1412,14 +1404,6 @@ void Assembler::j(Condition cc, Handle<CodeT> target, RelocInfo::Mode rmode) { emitl(code_target_index); } -void Assembler::jmp(Address entry, RelocInfo::Mode rmode) { - DCHECK(RelocInfo::IsRuntimeEntry(rmode)); - EnsureSpace ensure_space(this); - // 1110 1001 #32-bit disp. - emit(0xE9); - emit_runtime_entry(entry, rmode); -} - void Assembler::jmp_rel(int32_t offset) { EnsureSpace ensure_space(this); // The offset is encoded relative to the next instruction. @@ -1892,6 +1876,13 @@ void Assembler::mulq(Register src) { emit_modrm(0x4, src); } +void Assembler::mulq(Operand src) { + EnsureSpace ensure_space(this); + emit_rex_64(src); + emit(0xF7); + emit_operand(0x4, src); +} + void Assembler::negb(Register reg) { EnsureSpace ensure_space(this); emit_optional_rex_8(reg); @@ -4415,8 +4406,7 @@ void Assembler::db(uint8_t data) { void Assembler::dd(uint32_t data, RelocInfo::Mode rmode) { EnsureSpace ensure_space(this); if (!RelocInfo::IsNoInfo(rmode)) { - DCHECK(RelocInfo::IsDataEmbeddedObject(rmode) || - RelocInfo::IsLiteralConstant(rmode)); + DCHECK(RelocInfo::IsLiteralConstant(rmode)); RecordRelocInfo(rmode); } emitl(data); @@ -4425,8 +4415,7 @@ void Assembler::dd(uint32_t data, RelocInfo::Mode rmode) { void Assembler::dq(uint64_t data, RelocInfo::Mode rmode) { EnsureSpace ensure_space(this); if (!RelocInfo::IsNoInfo(rmode)) { - DCHECK(RelocInfo::IsDataEmbeddedObject(rmode) || - RelocInfo::IsLiteralConstant(rmode)); + DCHECK(RelocInfo::IsLiteralConstant(rmode)); RecordRelocInfo(rmode); } emitq(data); @@ -4464,7 +4453,6 @@ void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { const int RelocInfo::kApplyMask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) | RelocInfo::ModeMask(RelocInfo::NEAR_BUILTIN_ENTRY) | - RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY) | RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE) | RelocInfo::ModeMask(RelocInfo::WASM_CALL); diff --git a/deps/v8/src/codegen/x64/assembler-x64.h b/deps/v8/src/codegen/x64/assembler-x64.h index 708012bfce8d6d..ab79cf8ee6a613 100644 --- a/deps/v8/src/codegen/x64/assembler-x64.h +++ b/deps/v8/src/codegen/x64/assembler-x64.h @@ -476,7 +476,6 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase { inline Handle<CodeT> code_target_object_handle_at(Address pc); inline Handle<HeapObject> compressed_embedded_object_handle_at(Address pc); - inline Address runtime_entry_at(Address pc); // Number of bytes taken up by the branch target in the code. static constexpr int kSpecialTargetSize = 4; // 32-bit displacement. @@ -710,6 +709,7 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase { void mull(Operand src); // Multiply rax by src, put the result in rdx:rax. void mulq(Register src); + void mulq(Operand src); #define DECLARE_SHIFT_INSTRUCTION(instruction, subcode) \ void instruction##l(Register dst, Immediate imm8) { \ @@ -815,7 +815,6 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase { // Calls // Call near relative 32-bit displacement, relative to next instruction. void call(Label* L); - void call(Address entry, RelocInfo::Mode rmode); // Explicitly emit a near call / near jump. The displacement is relative to // the next instructions (which starts at {pc_offset() + kNearJmpInstrSize}). @@ -835,7 +834,6 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase { // Unconditional jump to L void jmp(Label* L, Label::Distance distance = Label::kFar); void jmp(Handle<CodeT> target, RelocInfo::Mode rmode); - void jmp(Address entry, RelocInfo::Mode rmode); // Jump near absolute indirect (r64) void jmp(Register adr); @@ -2116,6 +2114,8 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase { private: Address addr_at(int pos) { + DCHECK_GE(pos, 0); + DCHECK_LT(pos, pc_offset()); return reinterpret_cast<Address>(buffer_start_ + pos); } uint32_t long_at(int pos) { @@ -2132,7 +2132,6 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase { inline void emitl(uint32_t x); inline void emitq(uint64_t x); inline void emitw(uint16_t x); - inline void emit_runtime_entry(Address entry, RelocInfo::Mode rmode); inline void emit(Immediate x); inline void emit(Immediate64 x); diff --git a/deps/v8/src/codegen/x64/macro-assembler-x64.cc b/deps/v8/src/codegen/x64/macro-assembler-x64.cc index 8ef7f7be06b062..43c4961b40b717 100644 --- a/deps/v8/src/codegen/x64/macro-assembler-x64.cc +++ b/deps/v8/src/codegen/x64/macro-assembler-x64.cc @@ -908,7 +908,7 @@ void MacroAssembler::LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing( j(not_zero, flags_need_processing); } -void MacroAssembler::MaybeOptimizeCodeOrTailCallOptimizedCodeSlot( +void MacroAssembler::OptimizeCodeOrTailCallOptimizedCodeSlot( Register flags, Register feedback_vector, Register closure, JumpMode jump_mode) { ASM_CODE_COMMENT(this); @@ -918,12 +918,12 @@ void MacroAssembler::MaybeOptimizeCodeOrTailCallOptimizedCodeSlot( testl(flags, Immediate(FeedbackVector::kFlagsTieringStateIsAnyRequested)); j(zero, &maybe_needs_logging); - GenerateTailCallToReturnedCode(Runtime::kCompileOptimized); + GenerateTailCallToReturnedCode(Runtime::kCompileOptimized, jump_mode); bind(&maybe_needs_logging); testl(flags, Immediate(FeedbackVector::LogNextExecutionBit::kMask)); j(zero, &maybe_has_optimized_code); - GenerateTailCallToReturnedCode(Runtime::kFunctionLogNextExecution); + GenerateTailCallToReturnedCode(Runtime::kFunctionLogNextExecution, jump_mode); bind(&maybe_has_optimized_code); Register optimized_code_entry = flags; diff --git a/deps/v8/src/codegen/x64/macro-assembler-x64.h b/deps/v8/src/codegen/x64/macro-assembler-x64.h index a213f9e4a8e5d3..7f138fd9d9bc58 100644 --- a/deps/v8/src/codegen/x64/macro-assembler-x64.h +++ b/deps/v8/src/codegen/x64/macro-assembler-x64.h @@ -840,7 +840,7 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler { void LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing( Register flags, Register feedback_vector, CodeKind current_code_kind, Label* flags_need_processing); - void MaybeOptimizeCodeOrTailCallOptimizedCodeSlot( + void OptimizeCodeOrTailCallOptimizedCodeSlot( Register flags, Register feedback_vector, Register closure, JumpMode jump_mode = JumpMode::kJump); diff --git a/deps/v8/src/common/globals.h b/deps/v8/src/common/globals.h index 68634d7d1c9e9c..4e6994294ac31a 100644 --- a/deps/v8/src/common/globals.h +++ b/deps/v8/src/common/globals.h @@ -72,9 +72,9 @@ namespace internal { // Determine whether the architecture uses an embedded constant pool // (contiguous constant pool embedded in code object). #if V8_TARGET_ARCH_PPC || V8_TARGET_ARCH_PPC64 -#define V8_EMBEDDED_CONSTANT_POOL true +#define V8_EMBEDDED_CONSTANT_POOL_BOOL true #else -#define V8_EMBEDDED_CONSTANT_POOL false +#define V8_EMBEDDED_CONSTANT_POOL_BOOL false #endif #ifdef DEBUG @@ -113,9 +113,10 @@ namespace internal { #define COMPRESS_POINTERS_IN_SHARED_CAGE_BOOL false #endif -#if defined(V8_SHARED_RO_HEAP) && \ - (!defined(V8_COMPRESS_POINTERS) || \ - defined(V8_COMPRESS_POINTERS_IN_SHARED_CAGE)) +#if defined(V8_SHARED_RO_HEAP) && \ + (!defined(V8_COMPRESS_POINTERS) || \ + defined(V8_COMPRESS_POINTERS_IN_SHARED_CAGE)) && \ + !defined(V8_DISABLE_WRITE_BARRIERS) #define V8_CAN_CREATE_SHARED_HEAP_BOOL true #else #define V8_CAN_CREATE_SHARED_HEAP_BOOL false @@ -141,12 +142,7 @@ namespace internal { #define ENABLE_CONTROL_FLOW_INTEGRITY_BOOL false #endif -#if (V8_TARGET_ARCH_S390X && COMPRESS_POINTERS_BOOL) -// TODO(v8:11421): Enable Sparkplug for these architectures. -#define ENABLE_SPARKPLUG false -#else #define ENABLE_SPARKPLUG true -#endif #if V8_TARGET_ARCH_ARM || V8_TARGET_ARCH_ARM64 // Set stack limit lower for ARM and ARM64 than for other architectures because: @@ -156,9 +152,16 @@ namespace internal { // initializing V8 we already have a large stack and so have to set the // limit lower. See issue crbug.com/v8/10575 #define V8_DEFAULT_STACK_SIZE_KB 864 +#elif V8_TARGET_ARCH_IA32 +// In mid-2022, we're observing an increase in stack overflow crashes on +// 32-bit Windows; the suspicion is that some third-party software suddenly +// started to consume a lot more stack memory (before V8 is even initialized). +// So we speculatively lower the ia32 limit to the ARM limit for the time +// being. See crbug.com/1346791. +#define V8_DEFAULT_STACK_SIZE_KB 864 #else // Slightly less than 1MB, since Windows' default stack size for -// the main execution thread is 1MB for both 32 and 64-bit. +// the main execution thread is 1MB. #define V8_DEFAULT_STACK_SIZE_KB 984 #endif @@ -744,6 +747,13 @@ constexpr intptr_t kObjectAlignmentMask = kObjectAlignment - 1; constexpr intptr_t kObjectAlignment8GbHeap = 8; constexpr intptr_t kObjectAlignment8GbHeapMask = kObjectAlignment8GbHeap - 1; +#ifdef V8_COMPRESS_POINTERS_8GB +static_assert( + kObjectAlignment8GbHeap == 2 * kTaggedSize, + "When the 8GB heap is enabled, all allocations should be aligned to twice " + "the size of a tagged value."); +#endif + // Desired alignment for system pointers. constexpr intptr_t kPointerAlignment = (1 << kSystemPointerSizeLog2); constexpr intptr_t kPointerAlignmentMask = kPointerAlignment - 1; @@ -896,6 +906,8 @@ class CompressedObjectSlot; class CompressedMaybeObjectSlot; class CompressedMapWordSlot; class CompressedHeapObjectSlot; +class V8HeapCompressionScheme; +template <typename CompressionScheme> class OffHeapCompressedObjectSlot; class FullObjectSlot; class FullMaybeObjectSlot; @@ -924,15 +936,16 @@ struct SlotTraits { using TObjectSlot = CompressedObjectSlot; using TMaybeObjectSlot = CompressedMaybeObjectSlot; using THeapObjectSlot = CompressedHeapObjectSlot; - using TOffHeapObjectSlot = OffHeapCompressedObjectSlot; - using TCodeObjectSlot = OffHeapCompressedObjectSlot; + using TOffHeapObjectSlot = + OffHeapCompressedObjectSlot<V8HeapCompressionScheme>; + using TCodeObjectSlot = OffHeapCompressedObjectSlot<V8HeapCompressionScheme>; #else using TObjectSlot = FullObjectSlot; using TMaybeObjectSlot = FullMaybeObjectSlot; using THeapObjectSlot = FullHeapObjectSlot; using TOffHeapObjectSlot = OffHeapFullObjectSlot; using TCodeObjectSlot = OffHeapFullObjectSlot; -#endif +#endif // V8_COMPRESS_POINTERS }; // An ObjectSlot instance describes a kTaggedSize-sized on-heap field ("slot") @@ -969,22 +982,24 @@ using WeakSlotCallbackWithHeap = bool (*)(Heap* heap, FullObjectSlot pointer); // NOTE: SpaceIterator depends on AllocationSpace enumeration values being // consecutive. enum AllocationSpace { - RO_SPACE, // Immortal, immovable and immutable objects, - OLD_SPACE, // Old generation regular object space. - CODE_SPACE, // Old generation code object space, marked executable. - MAP_SPACE, // Old generation map object space, non-movable. - NEW_SPACE, // Young generation space for regular objects collected - // with Scavenger/MinorMC. - LO_SPACE, // Old generation large object space. - CODE_LO_SPACE, // Old generation large code object space. - NEW_LO_SPACE, // Young generation large object space. + RO_SPACE, // Immortal, immovable and immutable objects, + OLD_SPACE, // Old generation regular object space. + CODE_SPACE, // Old generation code object space, marked executable. + MAP_SPACE, // Old generation map object space, non-movable. + SHARED_SPACE, // Space shared between multiple isolates. Optional. + NEW_SPACE, // Young generation space for regular objects collected + // with Scavenger/MinorMC. + LO_SPACE, // Old generation large object space. + CODE_LO_SPACE, // Old generation large code object space. + NEW_LO_SPACE, // Young generation large object space. + SHARED_LO_SPACE, // Space shared between multiple isolates. Optional. FIRST_SPACE = RO_SPACE, - LAST_SPACE = NEW_LO_SPACE, + LAST_SPACE = SHARED_LO_SPACE, FIRST_MUTABLE_SPACE = OLD_SPACE, - LAST_MUTABLE_SPACE = NEW_LO_SPACE, + LAST_MUTABLE_SPACE = SHARED_LO_SPACE, FIRST_GROWABLE_PAGED_SPACE = OLD_SPACE, - LAST_GROWABLE_PAGED_SPACE = MAP_SPACE, + LAST_GROWABLE_PAGED_SPACE = SHARED_SPACE, FIRST_SWEEPABLE_SPACE = OLD_SPACE, LAST_SWEEPABLE_SPACE = NEW_SPACE }; @@ -1001,6 +1016,46 @@ enum class AllocationType : uint8_t { kSharedMap, // Map object in MAP_SPACE in the shared heap }; +// These values are persisted to logs. Entries should not be renumbered and +// numeric values should never be reused. If you add new items here, update +// src/tools/metrics/histograms/enums.xml in chromium. +enum class GarbageCollectionReason : int { + kUnknown = 0, + kAllocationFailure = 1, + kAllocationLimit = 2, + kContextDisposal = 3, + kCountersExtension = 4, + kDebugger = 5, + kDeserializer = 6, + kExternalMemoryPressure = 7, + kFinalizeMarkingViaStackGuard = 8, + kFinalizeMarkingViaTask = 9, + kFullHashtable = 10, + kHeapProfiler = 11, + kTask = 12, + kLastResort = 13, + kLowMemoryNotification = 14, + kMakeHeapIterable = 15, + kMemoryPressure = 16, + kMemoryReducer = 17, + kRuntime = 18, + kSamplingProfiler = 19, + kSnapshotCreator = 20, + kTesting = 21, + kExternalFinalize = 22, + kGlobalAllocationLimit = 23, + kMeasureMemory = 24, + kBackgroundAllocationFailure = 25, + kFinalizeMinorMC = 26, + kCppHeapAllocationFailure = 27, + + kLastReason = kCppHeapAllocationFailure, +}; + +static_assert(kGarbageCollectionReasonMaxValue == + static_cast<int>(GarbageCollectionReason::kLastReason), + "The value of kGarbageCollectionReasonMaxValue is inconsistent."); + inline size_t hash_value(AllocationType kind) { return static_cast<uint8_t>(kind); } @@ -1227,6 +1282,19 @@ constexpr int kIeeeDoubleExponentWordOffset = 0; #define OBJECT_POINTER_ALIGN(value) \ (((value) + ::i::kObjectAlignmentMask) & ~::i::kObjectAlignmentMask) +// OBJECT_POINTER_ALIGN is used to statically align object sizes to +// kObjectAlignment (which is kTaggedSize). ALIGN_TO_ALLOCATION_ALIGNMENT is +// used for dynamic allocations to align sizes and addresses to at least 8 bytes +// when an 8GB+ compressed heap is enabled. +// TODO(v8:13070): Consider merging this with OBJECT_POINTER_ALIGN. +#ifdef V8_COMPRESS_POINTERS_8GB +#define ALIGN_TO_ALLOCATION_ALIGNMENT(value) \ + (((value) + ::i::kObjectAlignment8GbHeapMask) & \ + ~::i::kObjectAlignment8GbHeapMask) +#else +#define ALIGN_TO_ALLOCATION_ALIGNMENT(value) (value) +#endif + // OBJECT_POINTER_PADDING returns the padding size required to align value // as a HeapObject pointer #define OBJECT_POINTER_PADDING(value) (OBJECT_POINTER_ALIGN(value) - (value)) @@ -1600,7 +1668,7 @@ inline uint32_t ObjectHash(Address address) { // // kSignedSmall -> kSignedSmallInputs -> kNumber -> kNumberOrOddball -> kAny // kString -> kAny -// kBigInt -> kAny +// kBigInt64 -> kBigInt -> kAny // // Technically we wouldn't need the separation between the kNumber and the // kNumberOrOddball values here, since for binary operations, we always @@ -1617,7 +1685,8 @@ class BinaryOperationFeedback { kNumber = 0x7, kNumberOrOddball = 0xF, kString = 0x10, - kBigInt = 0x20, + kBigInt64 = 0x20, + kBigInt = 0x60, kAny = 0x7F }; }; @@ -1804,6 +1873,15 @@ static constexpr uint32_t kNoneOrInProgressMask = 0b110; TIERING_STATE_LIST(V) #undef V +constexpr bool IsRequestMaglev(TieringState state) { + return IsRequestMaglev_Concurrent(state) || + IsRequestMaglev_Synchronous(state); +} +constexpr bool IsRequestTurbofan(TieringState state) { + return IsRequestTurbofan_Concurrent(state) || + IsRequestTurbofan_Synchronous(state); +} + constexpr const char* ToString(TieringState marker) { switch (marker) { #define V(Name, Value) \ diff --git a/deps/v8/src/common/message-template.h b/deps/v8/src/common/message-template.h index bb1b284d91b008..4412c83ea3209b 100644 --- a/deps/v8/src/common/message-template.h +++ b/deps/v8/src/common/message-template.h @@ -115,6 +115,7 @@ namespace internal { T(InvalidArgument, "invalid_argument") \ T(InvalidArgumentForTemporal, "Invalid argument for Temporal %") \ T(InvalidInOperatorUse, "Cannot use 'in' operator to search for '%' in %") \ + T(InvalidRawJsonValue, "Invalid value for JSON.rawJSON") \ T(InvalidRegExpExecResult, \ "RegExp exec method returned something other than an Object or null") \ T(InvalidUnit, "Invalid unit argument for %() '%'") \ @@ -658,6 +659,7 @@ namespace internal { T(WasmTrapStringIsolatedSurrogate, \ "Failed to encode string as UTF-8: contains unpaired surrogate") \ T(WasmExceptionError, "wasm exception") \ + T(WasmObjectsAreOpaque, "WebAssembly objects are opaque") \ /* Asm.js validation related */ \ T(AsmJsInvalid, "Invalid asm.js: %") \ T(AsmJsCompiled, "Converted asm.js to WebAssembly: %") \ @@ -685,17 +687,15 @@ namespace internal { T(TraceEventPhaseError, "Trace event phase must be a number.") \ T(TraceEventIDError, "Trace event id must be a number.") \ /* Weak refs */ \ - T(WeakRefsUnregisterTokenMustBeObject, \ - "unregisterToken ('%') must be an object") \ + T(InvalidWeakRefsUnregisterToken, "Invalid unregisterToken ('%')") \ T(WeakRefsCleanupMustBeCallable, \ "FinalizationRegistry: cleanup must be callable") \ - T(WeakRefsRegisterTargetMustBeObject, \ - "FinalizationRegistry.prototype.register: target must be an object") \ + T(InvalidWeakRefsRegisterTarget, \ + "FinalizationRegistry.prototype.register: invalid target") \ T(WeakRefsRegisterTargetAndHoldingsMustNotBeSame, \ "FinalizationRegistry.prototype.register: target and holdings must not " \ "be same") \ - T(WeakRefsWeakRefConstructorTargetMustBeObject, \ - "WeakRef: target must be an object") \ + T(InvalidWeakRefsWeakRefConstructorTarget, "WeakRef: invalid target") \ T(OptionalChainingNoNew, "Invalid optional chain from new expression") \ T(OptionalChainingNoSuper, "Invalid optional chain from super property") \ T(OptionalChainingNoTemplate, "Invalid tagged template on optional chain") \ diff --git a/deps/v8/src/common/ptr-compr-inl.h b/deps/v8/src/common/ptr-compr-inl.h index f5991ddcda2eb4..1322270e8f0439 100644 --- a/deps/v8/src/common/ptr-compr-inl.h +++ b/deps/v8/src/common/ptr-compr-inl.h @@ -26,74 +26,127 @@ Address PtrComprCageBase::address() const { return ret; } -// Compresses full-pointer representation of a tagged value to on-heap -// representation. -V8_INLINE Tagged_t CompressTagged(Address tagged) { - return static_cast<Tagged_t>(static_cast<uint32_t>(tagged)); -} +// +// V8HeapCompressionScheme +// -V8_INLINE constexpr Address GetPtrComprCageBaseAddress(Address on_heap_addr) { +// static +Address V8HeapCompressionScheme::GetPtrComprCageBaseAddress( + Address on_heap_addr) { return RoundDown<kPtrComprCageBaseAlignment>(on_heap_addr); } -V8_INLINE Address GetPtrComprCageBaseAddress(PtrComprCageBase cage_base) { +// static +Address V8HeapCompressionScheme::GetPtrComprCageBaseAddress( + PtrComprCageBase cage_base) { return cage_base.address(); } -V8_INLINE constexpr PtrComprCageBase GetPtrComprCageBaseFromOnHeapAddress( - Address address) { - return PtrComprCageBase(GetPtrComprCageBaseAddress(address)); +// static +Tagged_t V8HeapCompressionScheme::CompressTagged(Address tagged) { + return static_cast<Tagged_t>(static_cast<uint32_t>(tagged)); } -// Decompresses smi value. -V8_INLINE Address DecompressTaggedSigned(Tagged_t raw_value) { +// static +Address V8HeapCompressionScheme::DecompressTaggedSigned(Tagged_t raw_value) { // For runtime code the upper 32-bits of the Smi value do not matter. return static_cast<Address>(raw_value); } -// Decompresses weak or strong heap object pointer or forwarding pointer, -// preserving both weak- and smi- tags. +// static template <typename TOnHeapAddress> -V8_INLINE Address DecompressTaggedPointer(TOnHeapAddress on_heap_addr, - Tagged_t raw_value) { +Address V8HeapCompressionScheme::DecompressTaggedPointer( + TOnHeapAddress on_heap_addr, Tagged_t raw_value) { return GetPtrComprCageBaseAddress(on_heap_addr) + static_cast<Address>(raw_value); } -// Decompresses any tagged value, preserving both weak- and smi- tags. +// static template <typename TOnHeapAddress> -V8_INLINE Address DecompressTaggedAny(TOnHeapAddress on_heap_addr, - Tagged_t raw_value) { +Address V8HeapCompressionScheme::DecompressTaggedAny( + TOnHeapAddress on_heap_addr, Tagged_t raw_value) { return DecompressTaggedPointer(on_heap_addr, raw_value); } +// static +template <typename ProcessPointerCallback> +void V8HeapCompressionScheme::ProcessIntermediatePointers( + PtrComprCageBase cage_base, Address raw_value, + ProcessPointerCallback callback) { + // If pointer compression is enabled, we may have random compressed pointers + // on the stack that may be used for subsequent operations. + // Extract, decompress and trace both halfwords. + Address decompressed_low = V8HeapCompressionScheme::DecompressTaggedPointer( + cage_base, static_cast<Tagged_t>(raw_value)); + callback(decompressed_low); + Address decompressed_high = V8HeapCompressionScheme::DecompressTaggedPointer( + cage_base, + static_cast<Tagged_t>(raw_value >> (sizeof(Tagged_t) * CHAR_BIT))); + callback(decompressed_high); +} + +// +// Misc functions. +// + +V8_INLINE PtrComprCageBase +GetPtrComprCageBaseFromOnHeapAddress(Address address) { + return PtrComprCageBase( + V8HeapCompressionScheme::GetPtrComprCageBaseAddress(address)); +} + #else -V8_INLINE Tagged_t CompressTagged(Address tagged) { UNREACHABLE(); } +// +// V8HeapCompressionScheme +// -V8_INLINE constexpr PtrComprCageBase GetPtrComprCageBaseFromOnHeapAddress( - Address address) { - return PtrComprCageBase(); +// static +Address V8HeapCompressionScheme::GetPtrComprCageBaseAddress( + Address on_heap_addr) { + UNREACHABLE(); +} + +// static +Tagged_t V8HeapCompressionScheme::CompressTagged(Address tagged) { + UNREACHABLE(); } -V8_INLINE Address DecompressTaggedSigned(Tagged_t raw_value) { UNREACHABLE(); } +// static +Address V8HeapCompressionScheme::DecompressTaggedSigned(Tagged_t raw_value) { + UNREACHABLE(); +} template <typename TOnHeapAddress> -V8_INLINE Address DecompressTaggedPointer(TOnHeapAddress on_heap_addr, - Tagged_t raw_value) { +Address V8HeapCompressionScheme::DecompressTaggedPointer( + TOnHeapAddress on_heap_addr, Tagged_t raw_value) { UNREACHABLE(); } +// static template <typename TOnHeapAddress> -V8_INLINE Address DecompressTaggedAny(TOnHeapAddress on_heap_addr, - Tagged_t raw_value) { +Address V8HeapCompressionScheme::DecompressTaggedAny( + TOnHeapAddress on_heap_addr, Tagged_t raw_value) { UNREACHABLE(); } -V8_INLINE Address GetPtrComprCageBaseAddress(Address on_heap_addr) { +// static +template <typename ProcessPointerCallback> +void V8HeapCompressionScheme::ProcessIntermediatePointers( + PtrComprCageBase cage_base, Address raw_value, + ProcessPointerCallback callback) { UNREACHABLE(); } +// +// Misc functions. +// + +V8_INLINE constexpr PtrComprCageBase GetPtrComprCageBaseFromOnHeapAddress( + Address address) { + return PtrComprCageBase(); +} + #endif // V8_COMPRESS_POINTERS V8_INLINE PtrComprCageBase GetPtrComprCageBase(HeapObject object) { diff --git a/deps/v8/src/common/ptr-compr.h b/deps/v8/src/common/ptr-compr.h index 58d14602908e93..5a47331a5414a4 100644 --- a/deps/v8/src/common/ptr-compr.h +++ b/deps/v8/src/common/ptr-compr.h @@ -8,8 +8,52 @@ #include "src/base/memory.h" #include "src/common/globals.h" -namespace v8 { -namespace internal { +namespace v8::internal { + +// This is just a collection of compression scheme related functions. Having +// such a class allows plugging different decompression scheme in certain +// places by introducing another CompressionScheme class with a customized +// implementation. This is useful, for example, for CodeDataContainer::code +// field (see CodeObjectSlot). +class V8HeapCompressionScheme { + public: + V8_INLINE static Address GetPtrComprCageBaseAddress(Address on_heap_addr); + + V8_INLINE static Address GetPtrComprCageBaseAddress( + PtrComprCageBase cage_base); + + // Compresses full-pointer representation of a tagged value to on-heap + // representation. + V8_INLINE static Tagged_t CompressTagged(Address tagged); + + // Decompresses smi value. + V8_INLINE static Address DecompressTaggedSigned(Tagged_t raw_value); + + // Decompresses weak or strong heap object pointer or forwarding pointer, + // preserving both weak- and smi- tags. + template <typename TOnHeapAddress> + V8_INLINE static Address DecompressTaggedPointer(TOnHeapAddress on_heap_addr, + Tagged_t raw_value); + // Decompresses any tagged value, preserving both weak- and smi- tags. + template <typename TOnHeapAddress> + V8_INLINE static Address DecompressTaggedAny(TOnHeapAddress on_heap_addr, + Tagged_t raw_value); + + // Given a 64bit raw value, found on the stack, calls the callback function + // with all possible pointers that may be "contained" in compressed form in + // this value, either as complete compressed pointers or as intermediate + // (half-computed) results. + template <typename ProcessPointerCallback> + V8_INLINE static void ProcessIntermediatePointers( + PtrComprCageBase cage_base, Address raw_value, + ProcessPointerCallback callback); +}; + +#ifdef V8_EXTERNAL_CODE_SPACE +// Compression scheme used for fields containing Code objects (namely for the +// CodeDataContainer::code field). +using ExternalCodeCompressionScheme = V8HeapCompressionScheme; +#endif // V8_EXTERNAL_CODE_SPACE // Accessors for fields that may be unaligned due to pointer compression. @@ -49,7 +93,6 @@ static inline void WriteMaybeUnalignedValue(Address p, V value) { } } -} // namespace internal -} // namespace v8 +} // namespace v8::internal #endif // V8_COMMON_PTR_COMPR_H_ diff --git a/deps/v8/src/compiler-dispatcher/lazy-compile-dispatcher.cc b/deps/v8/src/compiler-dispatcher/lazy-compile-dispatcher.cc index c598180067a141..a8148b7700ddf7 100644 --- a/deps/v8/src/compiler-dispatcher/lazy-compile-dispatcher.cc +++ b/deps/v8/src/compiler-dispatcher/lazy-compile-dispatcher.cc @@ -43,9 +43,9 @@ class LazyCompileDispatcher::JobTask : public v8::JobTask { size_t GetMaxConcurrency(size_t worker_count) const final { size_t n = lazy_compile_dispatcher_->num_jobs_for_background_.load( std::memory_order_relaxed); - if (FLAG_lazy_compile_dispatcher_max_threads == 0) return n; + if (v8_flags.lazy_compile_dispatcher_max_threads == 0) return n; return std::min( - n, static_cast<size_t>(FLAG_lazy_compile_dispatcher_max_threads)); + n, static_cast<size_t>(v8_flags.lazy_compile_dispatcher_max_threads)); } private: @@ -69,7 +69,7 @@ LazyCompileDispatcher::LazyCompileDispatcher(Isolate* isolate, reinterpret_cast<v8::Isolate*>(isolate))), platform_(platform), max_stack_size_(max_stack_size), - trace_compiler_dispatcher_(FLAG_trace_compiler_dispatcher), + trace_compiler_dispatcher_(v8_flags.trace_compiler_dispatcher), idle_task_manager_(new CancelableTaskManager()), idle_task_scheduled_(false), num_jobs_for_background_(0), diff --git a/deps/v8/src/compiler-dispatcher/lazy-compile-dispatcher.h b/deps/v8/src/compiler-dispatcher/lazy-compile-dispatcher.h index 5eebde9e5b2510..7e6702851a10aa 100644 --- a/deps/v8/src/compiler-dispatcher/lazy-compile-dispatcher.h +++ b/deps/v8/src/compiler-dispatcher/lazy-compile-dispatcher.h @@ -189,7 +189,8 @@ class V8_EXPORT_PRIVATE LazyCompileDispatcher { std::unique_ptr<JobHandle> job_handle_; - // Copy of FLAG_trace_compiler_dispatcher to allow for access from any thread. + // Copy of v8_flags.trace_compiler_dispatcher to allow for access from any + // thread. bool trace_compiler_dispatcher_; std::unique_ptr<CancelableTaskManager> idle_task_manager_; diff --git a/deps/v8/src/compiler-dispatcher/optimizing-compile-dispatcher.cc b/deps/v8/src/compiler-dispatcher/optimizing-compile-dispatcher.cc index 892815a890a3aa..d85832bdcda650 100644 --- a/deps/v8/src/compiler-dispatcher/optimizing-compile-dispatcher.cc +++ b/deps/v8/src/compiler-dispatcher/optimizing-compile-dispatcher.cc @@ -50,8 +50,10 @@ class OptimizingCompileDispatcher::CompileTask : public CancelableTask { RuntimeCallCounterId::kOptimizeBackgroundDispatcherJob); TimerEventScope<TimerEventRecompileConcurrent> timer(isolate_); - TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"), - "V8.OptimizeBackground"); + TurbofanCompilationJob* job = dispatcher_->NextInput(&local_isolate); + TRACE_EVENT_WITH_FLOW0( + TRACE_DISABLED_BY_DEFAULT("v8.compile"), "V8.OptimizeBackground", job, + TRACE_EVENT_FLAG_FLOW_IN | TRACE_EVENT_FLAG_FLOW_OUT); if (dispatcher_->recompilation_delay_ != 0) { base::OS::Sleep(base::TimeDelta::FromMilliseconds( @@ -62,8 +64,7 @@ class OptimizingCompileDispatcher::CompileTask : public CancelableTask { // code space in order to be able to get a bytecode array from a baseline // code. See SharedFunctionInfo::GetActiveBytecodeArray() for details. RwxMemoryWriteScope::SetDefaultPermissionsForNewThread(); - dispatcher_->CompileNext(dispatcher_->NextInput(&local_isolate), - &local_isolate); + dispatcher_->CompileNext(job, &local_isolate); } { base::MutexGuard lock_guard(&dispatcher_->ref_count_mutex_); @@ -167,7 +168,7 @@ void OptimizingCompileDispatcher::FlushQueues( void OptimizingCompileDispatcher::Flush(BlockingBehavior blocking_behavior) { HandleScope handle_scope(isolate_); FlushQueues(blocking_behavior, true); - if (FLAG_trace_concurrent_recompilation) { + if (v8_flags.trace_concurrent_recompilation) { PrintF(" ** Flushed concurrent recompilation queues. (mode: %s)\n", (blocking_behavior == BlockingBehavior::kBlock) ? "blocking" : "non blocking"); @@ -199,7 +200,7 @@ void OptimizingCompileDispatcher::InstallOptimizedFunctions() { // If another racing task has already finished compiling and installing the // requested code kind on the function, throw out the current job. if (!info->is_osr() && function->HasAvailableCodeKind(info->code_kind())) { - if (FLAG_trace_concurrent_recompilation) { + if (v8_flags.trace_concurrent_recompilation) { PrintF(" ** Aborting compilation for "); function->ShortPrint(); PrintF(" as it has already been optimized.\n"); diff --git a/deps/v8/src/compiler-dispatcher/optimizing-compile-dispatcher.h b/deps/v8/src/compiler-dispatcher/optimizing-compile-dispatcher.h index 44c878d50ff472..9794ed1935e9f3 100644 --- a/deps/v8/src/compiler-dispatcher/optimizing-compile-dispatcher.h +++ b/deps/v8/src/compiler-dispatcher/optimizing-compile-dispatcher.h @@ -27,11 +27,11 @@ class V8_EXPORT_PRIVATE OptimizingCompileDispatcher { public: explicit OptimizingCompileDispatcher(Isolate* isolate) : isolate_(isolate), - input_queue_capacity_(FLAG_concurrent_recompilation_queue_length), + input_queue_capacity_(v8_flags.concurrent_recompilation_queue_length), input_queue_length_(0), input_queue_shift_(0), ref_count_(0), - recompilation_delay_(FLAG_concurrent_recompilation_delay) { + recompilation_delay_(v8_flags.concurrent_recompilation_delay) { input_queue_ = NewArray<TurbofanCompilationJob*>(input_queue_capacity_); } @@ -49,7 +49,7 @@ class V8_EXPORT_PRIVATE OptimizingCompileDispatcher { return input_queue_length_ < input_queue_capacity_; } - static bool Enabled() { return FLAG_concurrent_recompilation; } + static bool Enabled() { return v8_flags.concurrent_recompilation; } // This method must be called on the main thread. bool HasJobs(); @@ -101,7 +101,7 @@ class V8_EXPORT_PRIVATE OptimizingCompileDispatcher { base::Mutex ref_count_mutex_; ParkingConditionVariable ref_count_zero_; - // Copy of FLAG_concurrent_recompilation_delay that will be used from the + // Copy of v8_flags.concurrent_recompilation_delay that will be used from the // background thread. // // Since flags might get modified while the background thread is running, it diff --git a/deps/v8/src/compiler/access-builder.cc b/deps/v8/src/compiler/access-builder.cc index 7a8d7426be23dd..4b1f609025d919 100644 --- a/deps/v8/src/compiler/access-builder.cc +++ b/deps/v8/src/compiler/access-builder.cc @@ -376,39 +376,48 @@ FieldAccess AccessBuilder::ForJSArrayBufferViewBuffer() { // static FieldAccess AccessBuilder::ForJSArrayBufferViewByteLength() { FieldAccess access = {kTaggedBase, - JSArrayBufferView::kByteLengthOffset, + JSArrayBufferView::kRawByteLengthOffset, MaybeHandle<Name>(), MaybeHandle<Map>(), TypeCache::Get()->kJSArrayBufferViewByteLengthType, MachineType::UintPtr(), kNoWriteBarrier, "JSArrayBufferViewByteLength"}; +#ifdef V8_ENABLE_SANDBOX + access.is_bounded_size_access = true; +#endif return access; } // static FieldAccess AccessBuilder::ForJSArrayBufferViewByteOffset() { FieldAccess access = {kTaggedBase, - JSArrayBufferView::kByteOffsetOffset, + JSArrayBufferView::kRawByteOffsetOffset, MaybeHandle<Name>(), MaybeHandle<Map>(), TypeCache::Get()->kJSArrayBufferViewByteOffsetType, MachineType::UintPtr(), kNoWriteBarrier, "JSArrayBufferViewByteOffset"}; +#ifdef V8_ENABLE_SANDBOX + access.is_bounded_size_access = true; +#endif return access; } // static FieldAccess AccessBuilder::ForJSTypedArrayLength() { FieldAccess access = {kTaggedBase, - JSTypedArray::kLengthOffset, + JSTypedArray::kRawLengthOffset, MaybeHandle<Name>(), MaybeHandle<Map>(), TypeCache::Get()->kJSTypedArrayLengthType, MachineType::UintPtr(), kNoWriteBarrier, "JSTypedArrayLength"}; +#ifdef V8_ENABLE_SANDBOX + access.is_bounded_size_access = true; +#endif return access; } diff --git a/deps/v8/src/compiler/access-info.cc b/deps/v8/src/compiler/access-info.cc index 4c8923325b4d06..7c35df243af97d 100644 --- a/deps/v8/src/compiler/access-info.cc +++ b/deps/v8/src/compiler/access-info.cc @@ -139,18 +139,20 @@ PropertyAccessInfo PropertyAccessInfo::FastDataConstant( // static PropertyAccessInfo PropertyAccessInfo::FastAccessorConstant( - Zone* zone, MapRef receiver_map, base::Optional<ObjectRef> constant, - base::Optional<JSObjectRef> holder) { - return PropertyAccessInfo(zone, kFastAccessorConstant, holder, constant, {}, - {{receiver_map}, zone}); + Zone* zone, MapRef receiver_map, base::Optional<JSObjectRef> holder, + base::Optional<ObjectRef> constant, + base::Optional<JSObjectRef> api_holder) { + return PropertyAccessInfo(zone, kFastAccessorConstant, holder, constant, + api_holder, {} /* name */, {{receiver_map}, zone}); } // static PropertyAccessInfo PropertyAccessInfo::ModuleExport(Zone* zone, MapRef receiver_map, CellRef cell) { - return PropertyAccessInfo(zone, kModuleExport, {}, cell, {}, - {{receiver_map}, zone}); + return PropertyAccessInfo(zone, kModuleExport, {} /* holder */, + cell /* constant */, {} /* api_holder */, + {} /* name */, {{receiver_map}, zone}); } // static @@ -170,9 +172,11 @@ PropertyAccessInfo PropertyAccessInfo::DictionaryProtoDataConstant( // static PropertyAccessInfo PropertyAccessInfo::DictionaryProtoAccessorConstant( Zone* zone, MapRef receiver_map, base::Optional<JSObjectRef> holder, - ObjectRef constant, NameRef property_name) { + ObjectRef constant, base::Optional<JSObjectRef> api_holder, + NameRef property_name) { return PropertyAccessInfo(zone, kDictionaryProtoAccessorConstant, holder, - constant, property_name, {{receiver_map}, zone}); + constant, api_holder, property_name, + {{receiver_map}, zone}); } PropertyAccessInfo::PropertyAccessInfo(Zone* zone) @@ -196,12 +200,13 @@ PropertyAccessInfo::PropertyAccessInfo( PropertyAccessInfo::PropertyAccessInfo( Zone* zone, Kind kind, base::Optional<JSObjectRef> holder, - base::Optional<ObjectRef> constant, base::Optional<NameRef> name, - ZoneVector<MapRef>&& lookup_start_object_maps) + base::Optional<ObjectRef> constant, base::Optional<JSObjectRef> api_holder, + base::Optional<NameRef> name, ZoneVector<MapRef>&& lookup_start_object_maps) : kind_(kind), lookup_start_object_maps_(lookup_start_object_maps), constant_(constant), holder_(holder), + api_holder_(api_holder), unrecorded_dependencies_(zone), field_representation_(Representation::None()), field_type_(Type::Any()), @@ -544,8 +549,8 @@ PropertyAccessInfo AccessorAccessInfoHelper( DCHECK(!map.is_dictionary_map()); // HasProperty checks don't call getter/setters, existence is sufficient. - return PropertyAccessInfo::FastAccessorConstant(zone, receiver_map, {}, - holder); + return PropertyAccessInfo::FastAccessorConstant(zone, receiver_map, holder, + {}, {}); } Handle<Object> maybe_accessors = get_accessors(); if (!maybe_accessors->IsAccessorPair()) { @@ -559,6 +564,7 @@ PropertyAccessInfo AccessorAccessInfoHelper( base::Optional<ObjectRef> accessor_ref = TryMakeRef(broker, accessor); if (!accessor_ref.has_value()) return PropertyAccessInfo::Invalid(zone); + base::Optional<JSObjectRef> api_holder_ref; if (!accessor->IsJSFunction()) { CallOptimization optimization(broker->local_isolate_or_isolate(), accessor); if (!optimization.is_simple_api_call() || @@ -567,24 +573,22 @@ PropertyAccessInfo AccessorAccessInfoHelper( return PropertyAccessInfo::Invalid(zone); } - CallOptimization::HolderLookup lookup; - Handle<JSObject> holder_handle = broker->CanonicalPersistentHandle( + CallOptimization::HolderLookup holder_lookup; + Handle<JSObject> api_holder = broker->CanonicalPersistentHandle( optimization.LookupHolderOfExpectedType( broker->local_isolate_or_isolate(), receiver_map.object(), - &lookup)); - if (lookup == CallOptimization::kHolderNotFound) { + &holder_lookup)); + if (holder_lookup == CallOptimization::kHolderNotFound) { return PropertyAccessInfo::Invalid(zone); } - DCHECK_IMPLIES(lookup == CallOptimization::kHolderIsReceiver, - holder_handle.is_null()); - DCHECK_IMPLIES(lookup == CallOptimization::kHolderFound, - !holder_handle.is_null()); - - if (holder_handle.is_null()) { - holder = {}; - } else { - holder = TryMakeRef(broker, holder_handle); - if (!holder.has_value()) return PropertyAccessInfo::Invalid(zone); + DCHECK_IMPLIES(holder_lookup == CallOptimization::kHolderIsReceiver, + api_holder.is_null()); + DCHECK_IMPLIES(holder_lookup == CallOptimization::kHolderFound, + !api_holder.is_null()); + + if (!api_holder.is_null()) { + api_holder_ref = TryMakeRef(broker, api_holder); + if (!api_holder_ref.has_value()) return PropertyAccessInfo::Invalid(zone); } } if (access_mode == AccessMode::kLoad) { @@ -602,11 +606,12 @@ PropertyAccessInfo AccessorAccessInfoHelper( } if (map.is_dictionary_map()) { + CHECK(!api_holder_ref.has_value()); return PropertyAccessInfo::DictionaryProtoAccessorConstant( - zone, receiver_map, holder, accessor_ref.value(), name); + zone, receiver_map, holder, accessor_ref.value(), api_holder_ref, name); } else { return PropertyAccessInfo::FastAccessorConstant( - zone, receiver_map, accessor_ref.value(), holder); + zone, receiver_map, holder, accessor_ref.value(), api_holder_ref); } } @@ -876,7 +881,8 @@ PropertyAccessInfo AccessInfoFactory::ComputePropertyAccessInfo( if (!map_prototype_map.object()->IsJSObjectMap()) { // Don't allow proxies on the prototype chain. if (!prototype.IsNull()) { - DCHECK(prototype.object()->IsJSProxy()); + DCHECK(prototype.object()->IsJSProxy() || + prototype.object()->IsWasmObject()); return Invalid(); } diff --git a/deps/v8/src/compiler/access-info.h b/deps/v8/src/compiler/access-info.h index 5b0b9bee2ca8ba..d75e8d7b2b0d20 100644 --- a/deps/v8/src/compiler/access-info.h +++ b/deps/v8/src/compiler/access-info.h @@ -85,8 +85,9 @@ class PropertyAccessInfo final { base::Optional<JSObjectRef> holder, base::Optional<MapRef> transition_map); static PropertyAccessInfo FastAccessorConstant( - Zone* zone, MapRef receiver_map, base::Optional<ObjectRef> constant, - base::Optional<JSObjectRef> holder); + Zone* zone, MapRef receiver_map, base::Optional<JSObjectRef> holder, + base::Optional<ObjectRef> constant, + base::Optional<JSObjectRef> api_holder); static PropertyAccessInfo ModuleExport(Zone* zone, MapRef receiver_map, CellRef cell); static PropertyAccessInfo StringLength(Zone* zone, MapRef receiver_map); @@ -96,7 +97,7 @@ class PropertyAccessInfo final { InternalIndex dict_index, NameRef name); static PropertyAccessInfo DictionaryProtoAccessorConstant( Zone* zone, MapRef receiver_map, base::Optional<JSObjectRef> holder, - ObjectRef constant, NameRef name); + ObjectRef constant, base::Optional<JSObjectRef> api_holder, NameRef name); bool Merge(PropertyAccessInfo const* that, AccessMode access_mode, Zone* zone) V8_WARN_UNUSED_RESULT; @@ -127,12 +128,20 @@ class PropertyAccessInfo final { ConstFieldInfo GetConstFieldInfo() const; Kind kind() const { return kind_; } + + // The object where the property definition was found. base::Optional<JSObjectRef> holder() const { // TODO(neis): There was a CHECK here that tries to protect against // using the access info without recording its dependencies first. // Find a more suitable place for it. return holder_; } + // For accessor properties when the callback is an API function with a + // signature, this is the value that will be passed to the callback as + // FunctionCallbackInfo::Holder(). + // Don't mix it up with holder in a "object where the property was found" + // sense. + base::Optional<JSObjectRef> api_holder() const { return api_holder_; } base::Optional<MapRef> transition_map() const { DCHECK(!HasDictionaryHolder()); return transition_map_; @@ -180,6 +189,7 @@ class PropertyAccessInfo final { ZoneVector<MapRef>&& lookup_start_object_maps); PropertyAccessInfo(Zone* zone, Kind kind, base::Optional<JSObjectRef> holder, base::Optional<ObjectRef> constant, + base::Optional<JSObjectRef> api_holder, base::Optional<NameRef> name, ZoneVector<MapRef>&& lookup_start_object_maps); PropertyAccessInfo(Kind kind, base::Optional<JSObjectRef> holder, @@ -198,6 +208,7 @@ class PropertyAccessInfo final { ZoneVector<MapRef> lookup_start_object_maps_; base::Optional<ObjectRef> constant_; base::Optional<JSObjectRef> holder_; + base::Optional<JSObjectRef> api_holder_; // Members only used for fast mode holders: ZoneVector<CompilationDependency const*> unrecorded_dependencies_; diff --git a/deps/v8/src/compiler/backend/arm/code-generator-arm.cc b/deps/v8/src/compiler/backend/arm/code-generator-arm.cc index 8d8e5204b2969e..4c5accd7a8f352 100644 --- a/deps/v8/src/compiler/backend/arm/code-generator-arm.cc +++ b/deps/v8/src/compiler/backend/arm/code-generator-arm.cc @@ -747,7 +747,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( } case kArchCallJSFunction: { Register func = i.InputRegister(0); - if (FLAG_debug_code) { + if (v8_flags.debug_code) { UseScratchRegisterScope temps(tasm()); Register scratch = temps.Acquire(); // Check the function's context matches the context argument. @@ -938,7 +938,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( Register object = i.InputRegister(0); Register value = i.InputRegister(2); - if (FLAG_debug_code) { + if (v8_flags.debug_code) { // Checking that |value| is not a cleared weakref: our write barrier // does not support that for now. __ cmp(value, Operand(kClearedWeakHeapObjectLower32)); @@ -3588,7 +3588,7 @@ void CodeGenerator::AssembleArchTrap(Instruction* instr, ReferenceMap* reference_map = gen_->zone()->New<ReferenceMap>(gen_->zone()); gen_->RecordSafepoint(reference_map); - if (FLAG_debug_code) { + if (v8_flags.debug_code) { __ stop(); } } @@ -3743,7 +3743,7 @@ void CodeGenerator::AssembleConstructFrame() { // If the frame is bigger than the stack, we throw the stack overflow // exception unconditionally. Thereby we can avoid the integer overflow // check in the condition code. - if (required_slots * kSystemPointerSize < FLAG_stack_size * KB) { + if (required_slots * kSystemPointerSize < v8_flags.stack_size * KB) { UseScratchRegisterScope temps(tasm()); Register scratch = temps.Acquire(); __ ldr(scratch, FieldMemOperand( @@ -3760,7 +3760,7 @@ void CodeGenerator::AssembleConstructFrame() { // define an empty safepoint. ReferenceMap* reference_map = zone()->New<ReferenceMap>(zone()); RecordSafepoint(reference_map); - if (FLAG_debug_code) __ stop(); + if (v8_flags.debug_code) __ stop(); __ bind(&done); } @@ -3824,7 +3824,7 @@ void CodeGenerator::AssembleReturn(InstructionOperand* additional_pop_count) { if (parameter_slots != 0) { if (additional_pop_count->IsImmediate()) { DCHECK_EQ(g.ToConstant(additional_pop_count).ToInt32(), 0); - } else if (FLAG_debug_code) { + } else if (v8_flags.debug_code) { __ cmp(g.ToRegister(additional_pop_count), Operand(0)); __ Assert(eq, AbortReason::kUnexpectedAdditionalPopValue); } diff --git a/deps/v8/src/compiler/backend/arm/instruction-selector-arm.cc b/deps/v8/src/compiler/backend/arm/instruction-selector-arm.cc index 578581877e3ffc..8733aff7879641 100644 --- a/deps/v8/src/compiler/backend/arm/instruction-selector-arm.cc +++ b/deps/v8/src/compiler/backend/arm/instruction-selector-arm.cc @@ -700,11 +700,12 @@ void VisitStoreCommon(InstructionSelector* selector, Node* node, WriteBarrierKind write_barrier_kind = store_rep.write_barrier_kind(); MachineRepresentation rep = store_rep.representation(); - if (FLAG_enable_unconditional_write_barriers && CanBeTaggedPointer(rep)) { + if (v8_flags.enable_unconditional_write_barriers && CanBeTaggedPointer(rep)) { write_barrier_kind = kFullWriteBarrier; } - if (write_barrier_kind != kNoWriteBarrier && !FLAG_disable_write_barriers) { + if (write_barrier_kind != kNoWriteBarrier && + !v8_flags.disable_write_barriers) { DCHECK(CanBeTaggedPointer(rep)); AddressingMode addressing_mode; InstructionOperand inputs[3]; diff --git a/deps/v8/src/compiler/backend/arm/unwinding-info-writer-arm.h b/deps/v8/src/compiler/backend/arm/unwinding-info-writer-arm.h index 6b9ade0c4834cc..de81134bd14363 100644 --- a/deps/v8/src/compiler/backend/arm/unwinding-info-writer-arm.h +++ b/deps/v8/src/compiler/backend/arm/unwinding-info-writer-arm.h @@ -49,7 +49,7 @@ class UnwindingInfoWriter { } private: - bool enabled() const { return FLAG_perf_prof_unwinding_info; } + bool enabled() const { return v8_flags.perf_prof_unwinding_info; } class BlockInitialState : public ZoneObject { public: diff --git a/deps/v8/src/compiler/backend/arm64/code-generator-arm64.cc b/deps/v8/src/compiler/backend/arm64/code-generator-arm64.cc index 4a9654e8b9d96e..60d19c79307529 100644 --- a/deps/v8/src/compiler/backend/arm64/code-generator-arm64.cc +++ b/deps/v8/src/compiler/backend/arm64/code-generator-arm64.cc @@ -433,7 +433,7 @@ class WasmProtectedInstructionTrap final : public WasmOutOfLineTrap { : WasmOutOfLineTrap(gen, instr), pc_(pc) {} void Generate() override { - DCHECK(FLAG_wasm_bounds_checks && !FLAG_wasm_enforce_bounds_checks); + DCHECK(v8_flags.wasm_bounds_checks && !v8_flags.wasm_enforce_bounds_checks); gen_->AddProtectedInstructionLanding(pc_, __ pc_offset()); GenerateWithTrapId(TrapId::kTrapMemOutOfBounds); } @@ -767,7 +767,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( } case kArchCallJSFunction: { Register func = i.InputRegister(0); - if (FLAG_debug_code) { + if (v8_flags.debug_code) { // Check the function's context matches the context argument. UseScratchRegisterScope scope(tasm()); Register temp = scope.AcquireX(); @@ -961,7 +961,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( } Register value = i.InputRegister(2); - if (FLAG_debug_code) { + if (v8_flags.debug_code) { // Checking that |value| is not a cleared weakref: our write barrier // does not support that for now. __ cmp(value, Operand(kClearedWeakHeapObjectLower32)); @@ -1164,6 +1164,12 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( case kArm64Mul: __ Mul(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1)); break; + case kArm64Smulh: + __ Smulh(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1)); + break; + case kArm64Umulh: + __ Umulh(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1)); + break; case kArm64Mul32: __ Mul(i.OutputRegister32(), i.InputRegister32(0), i.InputRegister32(1)); break; @@ -3144,7 +3150,7 @@ void CodeGenerator::AssembleConstructFrame() { // If the frame is bigger than the stack, we throw the stack overflow // exception unconditionally. Thereby we can avoid the integer overflow // check in the condition code. - if (required_slots * kSystemPointerSize < FLAG_stack_size * KB) { + if (required_slots * kSystemPointerSize < v8_flags.stack_size * KB) { UseScratchRegisterScope scope(tasm()); Register scratch = scope.AcquireX(); __ Ldr(scratch, FieldMemOperand( @@ -3170,7 +3176,7 @@ void CodeGenerator::AssembleConstructFrame() { // define an empty safepoint. ReferenceMap* reference_map = zone()->New<ReferenceMap>(zone()); RecordSafepoint(reference_map); - if (FLAG_debug_code) __ Brk(0); + if (v8_flags.debug_code) __ Brk(0); __ Bind(&done); } #endif // V8_ENABLE_WEBASSEMBLY @@ -3287,7 +3293,7 @@ void CodeGenerator::AssembleReturn(InstructionOperand* additional_pop_count) { if (parameter_slots != 0) { if (additional_pop_count->IsImmediate()) { DCHECK_EQ(g.ToConstant(additional_pop_count).ToInt32(), 0); - } else if (FLAG_debug_code) { + } else if (v8_flags.debug_code) { __ cmp(g.ToRegister(additional_pop_count), Operand(0)); __ Assert(eq, AbortReason::kUnexpectedAdditionalPopValue); } diff --git a/deps/v8/src/compiler/backend/arm64/instruction-codes-arm64.h b/deps/v8/src/compiler/backend/arm64/instruction-codes-arm64.h index 2d5671927f0b66..6c4eafa1d9eff3 100644 --- a/deps/v8/src/compiler/backend/arm64/instruction-codes-arm64.h +++ b/deps/v8/src/compiler/backend/arm64/instruction-codes-arm64.h @@ -80,6 +80,7 @@ namespace compiler { V(Arm64Mul32) \ V(Arm64Smlal) \ V(Arm64Smlal2) \ + V(Arm64Smulh) \ V(Arm64Smull) \ V(Arm64Smull2) \ V(Arm64Uadalp) \ @@ -87,6 +88,7 @@ namespace compiler { V(Arm64Umlal) \ V(Arm64Umlal2) \ V(Arm64Umull) \ + V(Arm64Umulh) \ V(Arm64Umull2) \ V(Arm64Madd) \ V(Arm64Madd32) \ diff --git a/deps/v8/src/compiler/backend/arm64/instruction-scheduler-arm64.cc b/deps/v8/src/compiler/backend/arm64/instruction-scheduler-arm64.cc index 909bc24c902deb..eba6cdf75eecd0 100644 --- a/deps/v8/src/compiler/backend/arm64/instruction-scheduler-arm64.cc +++ b/deps/v8/src/compiler/backend/arm64/instruction-scheduler-arm64.cc @@ -46,12 +46,14 @@ int InstructionScheduler::GetTargetInstructionFlags( case kArm64Mul32: case kArm64Smlal: case kArm64Smlal2: + case kArm64Smulh: case kArm64Smull: case kArm64Smull2: case kArm64Uadalp: case kArm64Uaddlp: case kArm64Umlal: case kArm64Umlal2: + case kArm64Umulh: case kArm64Umull: case kArm64Umull2: case kArm64Madd: diff --git a/deps/v8/src/compiler/backend/arm64/instruction-selector-arm64.cc b/deps/v8/src/compiler/backend/arm64/instruction-selector-arm64.cc index 66c39e6c9dede0..455cb180f845cd 100644 --- a/deps/v8/src/compiler/backend/arm64/instruction-selector-arm64.cc +++ b/deps/v8/src/compiler/backend/arm64/instruction-selector-arm64.cc @@ -872,13 +872,14 @@ void InstructionSelector::VisitStore(Node* node) { WriteBarrierKind write_barrier_kind = store_rep.write_barrier_kind(); MachineRepresentation rep = store_rep.representation(); - if (FLAG_enable_unconditional_write_barriers && + if (v8_flags.enable_unconditional_write_barriers && CanBeTaggedOrCompressedPointer(rep)) { write_barrier_kind = kFullWriteBarrier; } // TODO(arm64): I guess this could be done in a better way. - if (write_barrier_kind != kNoWriteBarrier && !FLAG_disable_write_barriers) { + if (write_barrier_kind != kNoWriteBarrier && + !v8_flags.disable_write_barriers) { DCHECK(CanBeTaggedOrCompressedPointer(rep)); AddressingMode addressing_mode; InstructionOperand inputs[3]; @@ -1685,6 +1686,25 @@ void EmitInt32MulWithOverflow(InstructionSelector* selector, Node* node, selector->EmitWithContinuation(opcode, result, result, cont); } +void EmitInt64MulWithOverflow(InstructionSelector* selector, Node* node, + FlagsContinuation* cont) { + Arm64OperandGenerator g(selector); + Int64BinopMatcher m(node); + InstructionOperand result = g.DefineAsRegister(node); + InstructionOperand left = g.UseRegister(m.left().node()); + InstructionOperand high = g.TempRegister(); + + InstructionOperand right = g.UseRegister(m.right().node()); + selector->Emit(kArm64Mul, result, left, right); + selector->Emit(kArm64Smulh, high, left, right); + + // Test whether {high} is a sign-extension of {result}. + InstructionCode opcode = + kArm64Cmp | AddressingModeField::encode(kMode_Operand2_R_ASR_I); + selector->EmitWithContinuation(opcode, high, result, g.TempImmediate(63), + cont); +} + } // namespace void InstructionSelector::VisitInt32Mul(Node* node) { @@ -1858,6 +1878,10 @@ void InstructionSelector::VisitInt32MulHigh(Node* node) { Emit(kArm64Asr, g.DefineAsRegister(node), smull_operand, g.TempImmediate(32)); } +void InstructionSelector::VisitInt64MulHigh(Node* node) { + return VisitRRR(this, kArm64Smulh, node); +} + void InstructionSelector::VisitUint32MulHigh(Node* node) { Arm64OperandGenerator g(this); InstructionOperand const smull_operand = g.TempRegister(); @@ -1866,6 +1890,10 @@ void InstructionSelector::VisitUint32MulHigh(Node* node) { Emit(kArm64Lsr, g.DefineAsRegister(node), smull_operand, g.TempImmediate(32)); } +void InstructionSelector::VisitUint64MulHigh(Node* node) { + return VisitRRR(this, kArm64Umulh, node); +} + void InstructionSelector::VisitTruncateFloat32ToInt32(Node* node) { Arm64OperandGenerator g(this); @@ -2775,7 +2803,7 @@ void VisitAtomicStore(InstructionSelector* selector, Node* node, WriteBarrierKind write_barrier_kind = store_params.write_barrier_kind(); MachineRepresentation rep = store_params.representation(); - if (FLAG_enable_unconditional_write_barriers && + if (v8_flags.enable_unconditional_write_barriers && CanBeTaggedOrCompressedPointer(rep)) { write_barrier_kind = kFullWriteBarrier; } @@ -2785,7 +2813,8 @@ void VisitAtomicStore(InstructionSelector* selector, Node* node, InstructionOperand temps[] = {g.TempRegister()}; InstructionCode code; - if (write_barrier_kind != kNoWriteBarrier && !FLAG_disable_write_barriers) { + if (write_barrier_kind != kNoWriteBarrier && + !v8_flags.disable_write_barriers) { DCHECK(CanBeTaggedOrCompressedPointer(rep)); DCHECK_EQ(AtomicWidthSize(width), kTaggedSize); @@ -3018,6 +3047,14 @@ void InstructionSelector::VisitWordCompareZero(Node* user, Node* value, cont->OverwriteAndNegateIfEqual(kOverflow); return VisitBinop<Int64BinopMatcher>(this, node, kArm64Sub, kArithmeticImm, cont); + case IrOpcode::kInt64MulWithOverflow: + // ARM64 doesn't set the overflow flag for multiplication, so we + // need to test on kNotEqual. Here is the code sequence used: + // mul result, left, right + // smulh high, left, right + // cmp high, result, asr 63 + cont->OverwriteAndNegateIfEqual(kNotEqual); + return EmitInt64MulWithOverflow(this, node, cont); default: break; } @@ -3214,6 +3251,20 @@ void InstructionSelector::VisitInt64SubWithOverflow(Node* node) { VisitBinop<Int64BinopMatcher>(this, node, kArm64Sub, kArithmeticImm, &cont); } +void InstructionSelector::VisitInt64MulWithOverflow(Node* node) { + if (Node* ovf = NodeProperties::FindProjection(node, 1)) { + // ARM64 doesn't set the overflow flag for multiplication, so we need to + // test on kNotEqual. Here is the code sequence used: + // mul result, left, right + // smulh high, left, right + // cmp high, result, asr 63 + FlagsContinuation cont = FlagsContinuation::ForSet(kNotEqual, ovf); + return EmitInt64MulWithOverflow(this, node, &cont); + } + FlagsContinuation cont; + EmitInt64MulWithOverflow(this, node, &cont); +} + void InstructionSelector::VisitInt64LessThan(Node* node) { FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node); VisitWordCompare(this, node, kArm64Cmp, &cont, kArithmeticImm); diff --git a/deps/v8/src/compiler/backend/arm64/unwinding-info-writer-arm64.h b/deps/v8/src/compiler/backend/arm64/unwinding-info-writer-arm64.h index 36788735de1463..e142e13d69d920 100644 --- a/deps/v8/src/compiler/backend/arm64/unwinding-info-writer-arm64.h +++ b/deps/v8/src/compiler/backend/arm64/unwinding-info-writer-arm64.h @@ -49,7 +49,7 @@ class UnwindingInfoWriter { } private: - bool enabled() const { return FLAG_perf_prof_unwinding_info; } + bool enabled() const { return v8_flags.perf_prof_unwinding_info; } class BlockInitialState : public ZoneObject { public: diff --git a/deps/v8/src/compiler/backend/code-generator.cc b/deps/v8/src/compiler/backend/code-generator.cc index 6747d710c1474d..929fc7eb77886e 100644 --- a/deps/v8/src/compiler/backend/code-generator.cc +++ b/deps/v8/src/compiler/backend/code-generator.cc @@ -214,7 +214,7 @@ void CodeGenerator::AssembleCode() { tasm()->CodeEntry(); // Check that {kJavaScriptCallCodeStartRegister} has been set correctly. - if (FLAG_debug_code && info->called_with_code_start_register()) { + if (v8_flags.debug_code && info->called_with_code_start_register()) { tasm()->RecordComment("-- Prologue: check code start register --"); AssembleCodeStartRegisterCheck(); } @@ -274,7 +274,7 @@ void CodeGenerator::AssembleCode() { // Bind a label for a block. current_block_ = block->rpo_number(); unwinding_info_writer_.BeginInstructionBlock(tasm()->pc_offset(), block); - if (FLAG_code_comments) { + if (v8_flags.code_comments) { std::ostringstream buffer; buffer << "-- B" << block->rpo_number().ToInt() << " start"; if (block->IsDeferred()) buffer << " (deferred)"; @@ -306,8 +306,15 @@ void CodeGenerator::AssembleCode() { tasm()->InitializeRootRegister(); } } - - if (FLAG_enable_embedded_constant_pool && !block->needs_frame()) { +#ifdef V8_TARGET_ARCH_RISCV64 + // RVV uses VectorUnit to emit vset{i}vl{i}, reducing the static and dynamic + // overhead of the vset{i}vl{i} instruction. However there are some jumps + // back between blocks. the Rvv instruction may get an incorrect vtype. so + // here VectorUnit needs to be cleared to ensure that the vtype is correct + // within the block. + tasm()->VU.clear(); +#endif + if (V8_EMBEDDED_CONSTANT_POOL_BOOL && !block->needs_frame()) { ConstantPoolUnavailableScope constant_pool_unavailable(tasm()); result_ = AssembleBlock(block); } else { @@ -813,7 +820,7 @@ void CodeGenerator::AssembleSourcePosition(SourcePosition source_position) { if (!source_position.IsKnown()) return; source_position_table_builder_.AddPosition(tasm()->pc_offset(), source_position, false); - if (FLAG_code_comments) { + if (v8_flags.code_comments) { OptimizedCompilationInfo* info = this->info(); if (!info->IsOptimizing()) { #if V8_ENABLE_WEBASSEMBLY @@ -1171,6 +1178,12 @@ void CodeGenerator::AddTranslationForOperand(Instruction* instr, translations_.StoreUint32StackSlot(LocationOperand::cast(op)->index()); } else if (type == MachineType::Int64()) { translations_.StoreInt64StackSlot(LocationOperand::cast(op)->index()); + } else if (type == MachineType::SignedBigInt64()) { + translations_.StoreSignedBigInt64StackSlot( + LocationOperand::cast(op)->index()); + } else if (type == MachineType::UnsignedBigInt64()) { + translations_.StoreUnsignedBigInt64StackSlot( + LocationOperand::cast(op)->index()); } else { #if defined(V8_COMPRESS_POINTERS) CHECK(MachineRepresentation::kTagged == type.representation() || @@ -1199,6 +1212,10 @@ void CodeGenerator::AddTranslationForOperand(Instruction* instr, translations_.StoreUint32Register(converter.ToRegister(op)); } else if (type == MachineType::Int64()) { translations_.StoreInt64Register(converter.ToRegister(op)); + } else if (type == MachineType::SignedBigInt64()) { + translations_.StoreSignedBigInt64Register(converter.ToRegister(op)); + } else if (type == MachineType::UnsignedBigInt64()) { + translations_.StoreUnsignedBigInt64Register(converter.ToRegister(op)); } else { #if defined(V8_COMPRESS_POINTERS) CHECK(MachineRepresentation::kTagged == type.representation() || @@ -1229,7 +1246,7 @@ void CodeGenerator::AddTranslationForOperand(Instruction* instr, DCHECK_EQ(4, kSystemPointerSize); Smi smi(static_cast<Address>(constant.ToInt32())); DCHECK(smi.IsSmi()); - literal = DeoptimizationLiteral(smi.value()); + literal = DeoptimizationLiteral(static_cast<double>(smi.value())); } else if (type.representation() == MachineRepresentation::kBit) { if (constant.ToInt32() == 0) { literal = @@ -1247,15 +1264,24 @@ void CodeGenerator::AddTranslationForOperand(Instruction* instr, constant.ToInt32() == FrameStateDescriptor::kImpossibleValue); if (type == MachineType::Uint32()) { literal = DeoptimizationLiteral( - static_cast<uint32_t>(constant.ToInt32())); + static_cast<double>(static_cast<uint32_t>(constant.ToInt32()))); } else { - literal = DeoptimizationLiteral(constant.ToInt32()); + literal = + DeoptimizationLiteral(static_cast<double>(constant.ToInt32())); } } break; case Constant::kInt64: DCHECK_EQ(8, kSystemPointerSize); - if (type.representation() == MachineRepresentation::kWord64) { + if (type == MachineType::SignedBigInt64()) { + literal = DeoptimizationLiteral(constant.ToInt64()); + } else if (type == MachineType::UnsignedBigInt64()) { + literal = + DeoptimizationLiteral(static_cast<uint64_t>(constant.ToInt64())); + } else if (type.representation() == MachineRepresentation::kWord64) { + CHECK_EQ( + constant.ToInt64(), + static_cast<int64_t>(static_cast<double>(constant.ToInt64()))); literal = DeoptimizationLiteral(static_cast<double>(constant.ToInt64())); } else { @@ -1264,7 +1290,7 @@ void CodeGenerator::AddTranslationForOperand(Instruction* instr, DCHECK_EQ(MachineRepresentation::kTagged, type.representation()); Smi smi(static_cast<Address>(constant.ToInt64())); DCHECK(smi.IsSmi()); - literal = DeoptimizationLiteral(smi.value()); + literal = DeoptimizationLiteral(static_cast<double>(smi.value())); } break; case Constant::kFloat32: @@ -1325,6 +1351,12 @@ Handle<Object> DeoptimizationLiteral::Reify(Isolate* isolate) const { case DeoptimizationLiteralKind::kNumber: { return isolate->factory()->NewNumber(number_); } + case DeoptimizationLiteralKind::kSignedBigInt64: { + return BigInt::FromInt64(isolate, signed_bigint64_); + } + case DeoptimizationLiteralKind::kUnsignedBigInt64: { + return BigInt::FromUint64(isolate, unsigned_bigint64_); + } case DeoptimizationLiteralKind::kInvalid: { UNREACHABLE(); } diff --git a/deps/v8/src/compiler/backend/code-generator.h b/deps/v8/src/compiler/backend/code-generator.h index 4c7ae40fe2b301..ee36b75dae5622 100644 --- a/deps/v8/src/compiler/backend/code-generator.h +++ b/deps/v8/src/compiler/backend/code-generator.h @@ -55,26 +55,52 @@ class InstructionOperandIterator { size_t pos_; }; -enum class DeoptimizationLiteralKind { kObject, kNumber, kInvalid }; +enum class DeoptimizationLiteralKind { + kObject, + kNumber, + kSignedBigInt64, + kUnsignedBigInt64, + kInvalid +}; -// Either a non-null Handle<Object> or a double. +// A non-null Handle<Object>, a double, an int64_t, or a uint64_t. class DeoptimizationLiteral { public: DeoptimizationLiteral() - : kind_(DeoptimizationLiteralKind::kInvalid), object_(), number_(0) {} + : kind_(DeoptimizationLiteralKind::kInvalid), object_() {} explicit DeoptimizationLiteral(Handle<Object> object) : kind_(DeoptimizationLiteralKind::kObject), object_(object) { CHECK(!object_.is_null()); } explicit DeoptimizationLiteral(double number) : kind_(DeoptimizationLiteralKind::kNumber), number_(number) {} + explicit DeoptimizationLiteral(int64_t signed_bigint64) + : kind_(DeoptimizationLiteralKind::kSignedBigInt64), + signed_bigint64_(signed_bigint64) {} + explicit DeoptimizationLiteral(uint64_t unsigned_bigint64) + : kind_(DeoptimizationLiteralKind::kUnsignedBigInt64), + unsigned_bigint64_(unsigned_bigint64) {} Handle<Object> object() const { return object_; } bool operator==(const DeoptimizationLiteral& other) const { - return kind_ == other.kind_ && object_.equals(other.object_) && - base::bit_cast<uint64_t>(number_) == + if (kind_ != other.kind_) { + return false; + } + switch (kind_) { + case DeoptimizationLiteralKind::kObject: + return object_.equals(other.object_); + case DeoptimizationLiteralKind::kNumber: + return base::bit_cast<uint64_t>(number_) == base::bit_cast<uint64_t>(other.number_); + case DeoptimizationLiteralKind::kSignedBigInt64: + return signed_bigint64_ == other.signed_bigint64_; + case DeoptimizationLiteralKind::kUnsignedBigInt64: + return unsigned_bigint64_ == other.unsigned_bigint64_; + case DeoptimizationLiteralKind::kInvalid: + return true; + } + UNREACHABLE(); } Handle<Object> Reify(Isolate* isolate) const; @@ -91,8 +117,12 @@ class DeoptimizationLiteral { private: DeoptimizationLiteralKind kind_; - Handle<Object> object_; - double number_ = 0; + union { + Handle<Object> object_; + double number_; + int64_t signed_bigint64_; + uint64_t unsigned_bigint64_; + }; }; // These structs hold pc offsets for generated instructions and is only used diff --git a/deps/v8/src/compiler/backend/ia32/code-generator-ia32.cc b/deps/v8/src/compiler/backend/ia32/code-generator-ia32.cc index 8a53c5cd21a8b1..5afd119ff506dd 100644 --- a/deps/v8/src/compiler/backend/ia32/code-generator-ia32.cc +++ b/deps/v8/src/compiler/backend/ia32/code-generator-ia32.cc @@ -767,7 +767,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( } case kArchCallJSFunction: { Register func = i.InputRegister(0); - if (FLAG_debug_code) { + if (v8_flags.debug_code) { // Check the function's context matches the context argument. __ cmp(esi, FieldOperand(func, JSFunction::kContextOffset)); __ Assert(equal, AbortReason::kWrongFunctionContext); @@ -962,7 +962,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( Register scratch0 = i.TempRegister(0); Register scratch1 = i.TempRegister(1); - if (FLAG_debug_code) { + if (v8_flags.debug_code) { // Checking that |value| is not a cleared weakref: our write barrier // does not support that for now. __ cmp(value, Immediate(kClearedWeakHeapObjectLower32)); @@ -4066,7 +4066,7 @@ void CodeGenerator::AssembleConstructFrame() { // If the frame is bigger than the stack, we throw the stack overflow // exception unconditionally. Thereby we can avoid the integer overflow // check in the condition code. - if (required_slots * kSystemPointerSize < FLAG_stack_size * KB) { + if (required_slots * kSystemPointerSize < v8_flags.stack_size * KB) { Register scratch = esi; __ push(scratch); __ mov(scratch, @@ -4134,7 +4134,7 @@ void CodeGenerator::AssembleReturn(InstructionOperand* additional_pop_count) { if (parameter_slots != 0) { if (additional_pop_count->IsImmediate()) { DCHECK_EQ(g.ToConstant(additional_pop_count).ToInt32(), 0); - } else if (FLAG_debug_code) { + } else if (v8_flags.debug_code) { __ cmp(g.ToRegister(additional_pop_count), Immediate(0)); __ Assert(equal, AbortReason::kUnexpectedAdditionalPopValue); } diff --git a/deps/v8/src/compiler/backend/ia32/instruction-selector-ia32.cc b/deps/v8/src/compiler/backend/ia32/instruction-selector-ia32.cc index 84904062cfb5e5..bea1475584d226 100644 --- a/deps/v8/src/compiler/backend/ia32/instruction-selector-ia32.cc +++ b/deps/v8/src/compiler/backend/ia32/instruction-selector-ia32.cc @@ -695,11 +695,12 @@ void VisitStoreCommon(InstructionSelector* selector, Node* node, const bool is_seqcst = atomic_order && *atomic_order == AtomicMemoryOrder::kSeqCst; - if (FLAG_enable_unconditional_write_barriers && CanBeTaggedPointer(rep)) { + if (v8_flags.enable_unconditional_write_barriers && CanBeTaggedPointer(rep)) { write_barrier_kind = kFullWriteBarrier; } - if (write_barrier_kind != kNoWriteBarrier && !FLAG_disable_write_barriers) { + if (write_barrier_kind != kNoWriteBarrier && + !v8_flags.disable_write_barriers) { DCHECK(CanBeTaggedPointer(rep)); AddressingMode addressing_mode; InstructionOperand inputs[] = { diff --git a/deps/v8/src/compiler/backend/instruction-scheduler.cc b/deps/v8/src/compiler/backend/instruction-scheduler.cc index 3d0be78262e68a..af339c15585796 100644 --- a/deps/v8/src/compiler/backend/instruction-scheduler.cc +++ b/deps/v8/src/compiler/backend/instruction-scheduler.cc @@ -82,9 +82,9 @@ InstructionScheduler::InstructionScheduler(Zone* zone, last_live_in_reg_marker_(nullptr), last_deopt_or_trap_(nullptr), operands_map_(zone) { - if (FLAG_turbo_stress_instruction_scheduling) { + if (v8_flags.turbo_stress_instruction_scheduling) { random_number_generator_ = - base::Optional<base::RandomNumberGenerator>(FLAG_random_seed); + base::Optional<base::RandomNumberGenerator>(v8_flags.random_seed); } } @@ -99,7 +99,7 @@ void InstructionScheduler::StartBlock(RpoNumber rpo) { } void InstructionScheduler::EndBlock(RpoNumber rpo) { - if (FLAG_turbo_stress_instruction_scheduling) { + if (v8_flags.turbo_stress_instruction_scheduling) { Schedule<StressSchedulerQueue>(); } else { Schedule<CriticalPathFirstQueue>(); @@ -119,7 +119,7 @@ void InstructionScheduler::AddTerminator(Instruction* instr) { void InstructionScheduler::AddInstruction(Instruction* instr) { if (IsBarrier(instr)) { - if (FLAG_turbo_stress_instruction_scheduling) { + if (v8_flags.turbo_stress_instruction_scheduling) { Schedule<StressSchedulerQueue>(); } else { Schedule<CriticalPathFirstQueue>(); diff --git a/deps/v8/src/compiler/backend/instruction-selector.cc b/deps/v8/src/compiler/backend/instruction-selector.cc index 2ef46c02ebb4b4..a098254c7e5a32 100644 --- a/deps/v8/src/compiler/backend/instruction-selector.cc +++ b/deps/v8/src/compiler/backend/instruction-selector.cc @@ -14,12 +14,9 @@ #include "src/compiler/backend/instruction-selector-impl.h" #include "src/compiler/compiler-source-position-table.h" #include "src/compiler/js-heap-broker.h" -#include "src/compiler/node-matchers.h" #include "src/compiler/node-properties.h" -#include "src/compiler/pipeline.h" #include "src/compiler/schedule.h" #include "src/compiler/state-values-utils.h" -#include "src/deoptimizer/deoptimizer.h" #if V8_ENABLE_WEBASSEMBLY #include "src/wasm/simd-shuffle.h" @@ -1566,6 +1563,8 @@ void InstructionSelector::VisitNode(Node* node) { return MarkAsWord32(node), VisitInt32MulWithOverflow(node); case IrOpcode::kInt32MulHigh: return VisitInt32MulHigh(node); + case IrOpcode::kInt64MulHigh: + return VisitInt64MulHigh(node); case IrOpcode::kInt32Div: return MarkAsWord32(node), VisitInt32Div(node); case IrOpcode::kInt32Mod: @@ -1584,6 +1583,8 @@ void InstructionSelector::VisitNode(Node* node) { return MarkAsWord32(node), VisitUint32Mod(node); case IrOpcode::kUint32MulHigh: return VisitUint32MulHigh(node); + case IrOpcode::kUint64MulHigh: + return VisitUint64MulHigh(node); case IrOpcode::kInt64Add: return MarkAsWord64(node), VisitInt64Add(node); case IrOpcode::kInt64AddWithOverflow: @@ -1594,6 +1595,8 @@ void InstructionSelector::VisitNode(Node* node) { return MarkAsWord64(node), VisitInt64SubWithOverflow(node); case IrOpcode::kInt64Mul: return MarkAsWord64(node), VisitInt64Mul(node); + case IrOpcode::kInt64MulWithOverflow: + return MarkAsWord64(node), VisitInt64MulWithOverflow(node); case IrOpcode::kInt64Div: return MarkAsWord64(node), VisitInt64Div(node); case IrOpcode::kInt64Mod: @@ -2570,6 +2573,14 @@ void InstructionSelector::VisitInt64SubWithOverflow(Node* node) { void InstructionSelector::VisitInt64Mul(Node* node) { UNIMPLEMENTED(); } +void InstructionSelector::VisitInt64MulHigh(Node* node) { UNIMPLEMENTED(); } + +void InstructionSelector::VisitUint64MulHigh(Node* node) { UNIMPLEMENTED(); } + +void InstructionSelector::VisitInt64MulWithOverflow(Node* node) { + UNIMPLEMENTED(); +} + void InstructionSelector::VisitInt64Div(Node* node) { UNIMPLEMENTED(); } void InstructionSelector::VisitInt64LessThan(Node* node) { UNIMPLEMENTED(); } @@ -2887,6 +2898,7 @@ void InstructionSelector::VisitProjection(Node* node) { case IrOpcode::kInt32MulWithOverflow: case IrOpcode::kInt64AddWithOverflow: case IrOpcode::kInt64SubWithOverflow: + case IrOpcode::kInt64MulWithOverflow: case IrOpcode::kTryTruncateFloat32ToInt64: case IrOpcode::kTryTruncateFloat64ToInt64: case IrOpcode::kTryTruncateFloat32ToUint64: @@ -3137,14 +3149,12 @@ void InstructionSelector::VisitSelect(Node* node) { } void InstructionSelector::VisitTrapIf(Node* node, TrapId trap_id) { - FlagsContinuation cont = - FlagsContinuation::ForTrap(kNotEqual, trap_id, node->InputAt(1)); + FlagsContinuation cont = FlagsContinuation::ForTrap(kNotEqual, trap_id); VisitWordCompareZero(node, node->InputAt(0), &cont); } void InstructionSelector::VisitTrapUnless(Node* node, TrapId trap_id) { - FlagsContinuation cont = - FlagsContinuation::ForTrap(kEqual, trap_id, node->InputAt(1)); + FlagsContinuation cont = FlagsContinuation::ForTrap(kEqual, trap_id); VisitWordCompareZero(node, node->InputAt(0), &cont); } diff --git a/deps/v8/src/compiler/backend/instruction-selector.h b/deps/v8/src/compiler/backend/instruction-selector.h index 686ba68519739e..7cd22ee492ed97 100644 --- a/deps/v8/src/compiler/backend/instruction-selector.h +++ b/deps/v8/src/compiler/backend/instruction-selector.h @@ -8,7 +8,6 @@ #include <map> #include "src/codegen/cpu-features.h" -#include "src/common/globals.h" #include "src/compiler/backend/instruction-scheduler.h" #include "src/compiler/backend/instruction.h" #include "src/compiler/common-operator.h" @@ -78,9 +77,8 @@ class FlagsContinuation final { } // Creates a new flags continuation for a wasm trap. - static FlagsContinuation ForTrap(FlagsCondition condition, TrapId trap_id, - Node* result) { - return FlagsContinuation(condition, trap_id, result); + static FlagsContinuation ForTrap(FlagsCondition condition, TrapId trap_id) { + return FlagsContinuation(condition, trap_id); } static FlagsContinuation ForSelect(FlagsCondition condition, Node* result, @@ -218,13 +216,8 @@ class FlagsContinuation final { DCHECK_NOT_NULL(result); } - FlagsContinuation(FlagsCondition condition, TrapId trap_id, Node* result) - : mode_(kFlags_trap), - condition_(condition), - frame_state_or_result_(result), - trap_id_(trap_id) { - DCHECK_NOT_NULL(result); - } + FlagsContinuation(FlagsCondition condition, TrapId trap_id) + : mode_(kFlags_trap), condition_(condition), trap_id_(trap_id) {} FlagsContinuation(FlagsCondition condition, Node* result, Node* true_value, Node* false_value) @@ -292,7 +285,7 @@ class V8_EXPORT_PRIVATE InstructionSelector final { size_t* max_pushed_argument_count, SourcePositionMode source_position_mode = kCallSourcePositions, Features features = SupportedFeatures(), - EnableScheduling enable_scheduling = FLAG_turbo_instruction_scheduling + EnableScheduling enable_scheduling = v8_flags.turbo_instruction_scheduling ? kEnableScheduling : kDisableScheduling, EnableRootsRelativeAddressing enable_roots_relative_addressing = diff --git a/deps/v8/src/compiler/backend/instruction.cc b/deps/v8/src/compiler/backend/instruction.cc index 5477c9fb868963..0ec677294747ce 100644 --- a/deps/v8/src/compiler/backend/instruction.cc +++ b/deps/v8/src/compiler/backend/instruction.cc @@ -325,6 +325,20 @@ void ParallelMove::PrepareInsertAfter( if (replacement != nullptr) move->set_source(replacement->source()); } +bool ParallelMove::Equals(const ParallelMove& that) const { + if (this->size() != that.size()) return false; + for (size_t i = 0; i < this->size(); ++i) { + if (!(*this)[i]->Equals(*that[i])) return false; + } + return true; +} + +void ParallelMove::Eliminate() { + for (MoveOperands* move : *this) { + move->Eliminate(); + } +} + Instruction::Instruction(InstructionCode opcode) : opcode_(opcode), bit_field_(OutputCountField::encode(0) | InputCountField::encode(0) | @@ -803,7 +817,7 @@ void InstructionSequence::ComputeAssemblyOrder() { if (block->ao_number() != invalid) continue; // loop rotated. if (block->IsLoopHeader()) { bool header_align = true; - if (FLAG_turbo_loop_rotation) { + if (v8_flags.turbo_loop_rotation) { // Perform loop rotation for non-deferred loops. InstructionBlock* loop_end = instruction_blocks_->at(block->loop_end().ToSize() - 1); diff --git a/deps/v8/src/compiler/backend/instruction.h b/deps/v8/src/compiler/backend/instruction.h index aac7ac12c855d3..e9f0f9514bd1f5 100644 --- a/deps/v8/src/compiler/backend/instruction.h +++ b/deps/v8/src/compiler/backend/instruction.h @@ -775,6 +775,12 @@ class V8_EXPORT_PRIVATE MoveOperands final // APIs to aid debugging. For general-stream APIs, use operator<<. void Print() const; + bool Equals(const MoveOperands& that) const { + if (IsRedundant() && that.IsRedundant()) return true; + return source_.Equals(that.source_) && + destination_.Equals(that.destination_); + } + private: InstructionOperand source_; InstructionOperand destination_; @@ -813,6 +819,11 @@ class V8_EXPORT_PRIVATE ParallelMove final // to_eliminate must be Eliminated. void PrepareInsertAfter(MoveOperands* move, ZoneVector<MoveOperands*>* to_eliminate) const; + + bool Equals(const ParallelMove& that) const; + + // Eliminate all the MoveOperands in this ParallelMove. + void Eliminate(); }; std::ostream& operator<<(std::ostream&, const ParallelMove&); diff --git a/deps/v8/src/compiler/backend/jump-threading.cc b/deps/v8/src/compiler/backend/jump-threading.cc index f056cdc9451d66..9984c3e2b30740 100644 --- a/deps/v8/src/compiler/backend/jump-threading.cc +++ b/deps/v8/src/compiler/backend/jump-threading.cc @@ -9,9 +9,9 @@ namespace v8 { namespace internal { namespace compiler { -#define TRACE(...) \ - do { \ - if (FLAG_trace_turbo_jt) PrintF(__VA_ARGS__); \ +#define TRACE(...) \ + do { \ + if (v8_flags.trace_turbo_jt) PrintF(__VA_ARGS__); \ } while (false) namespace { @@ -55,6 +55,72 @@ struct JumpThreadingState { RpoNumber onstack() { return RpoNumber::FromInt(-2); } }; +struct GapJumpRecord { + GapJumpRecord(Zone* zone) : zone_(zone), gap_jump_records_(zone) {} + + struct Record { + RpoNumber block; + Instruction* instr; + }; + + struct RpoNumberHash { + std::size_t operator()(const RpoNumber& key) const { + return std::hash<int>()(key.ToInt()); + } + }; + + bool CanForwardGapJump(Instruction* instr, RpoNumber instr_block, + RpoNumber target_block, RpoNumber* forward_to) { + DCHECK_EQ(instr->arch_opcode(), kArchJmp); + bool can_forward = false; + auto search = gap_jump_records_.find(target_block); + if (search != gap_jump_records_.end()) { + for (Record& record : search->second) { + Instruction* record_instr = record.instr; + DCHECK_EQ(record_instr->arch_opcode(), kArchJmp); + bool is_same_instr = true; + for (int i = Instruction::FIRST_GAP_POSITION; + i <= Instruction::LAST_GAP_POSITION; i++) { + Instruction::GapPosition pos = + static_cast<Instruction::GapPosition>(i); + ParallelMove* record_move = record_instr->GetParallelMove(pos); + ParallelMove* instr_move = instr->GetParallelMove(pos); + if (record_move == nullptr && instr_move == nullptr) continue; + if (((record_move == nullptr) != (instr_move == nullptr)) || + !record_move->Equals(*instr_move)) { + is_same_instr = false; + break; + } + } + if (is_same_instr) { + // Found an instruction same as the recorded one. + *forward_to = record.block; + can_forward = true; + break; + } + } + if (!can_forward) { + // No recorded instruction has been found for this target block, + // so create a new record with the given instruction. + search->second.push_back({instr_block, instr}); + } + } else { + // This is the first explored gap jump to target block. + auto ins = + gap_jump_records_.insert({target_block, ZoneVector<Record>(zone_)}); + if (ins.second) { + ins.first->second.reserve(4); + ins.first->second.push_back({instr_block, instr}); + } + } + return can_forward; + } + + Zone* zone_; + ZoneUnorderedMap<RpoNumber, ZoneVector<Record>, RpoNumberHash> + gap_jump_records_; +}; + } // namespace bool JumpThreading::ComputeForwarding(Zone* local_zone, @@ -68,6 +134,7 @@ bool JumpThreading::ComputeForwarding(Zone* local_zone, int32_t empty_deconstruct_frame_return_size; RpoNumber empty_no_deconstruct_frame_return_block = RpoNumber::Invalid(); int32_t empty_no_deconstruct_frame_return_size; + GapJumpRecord record(local_zone); // Iterate over the blocks forward, pushing the blocks onto the stack. for (auto const instruction_block : code->instruction_blocks()) { @@ -85,8 +152,24 @@ bool JumpThreading::ComputeForwarding(Zone* local_zone, for (int i = block->code_start(); i < block->code_end(); ++i) { Instruction* instr = code->InstructionAt(i); if (!instr->AreMovesRedundant()) { - // can't skip instructions with non redundant moves. - TRACE(" parallel move\n"); + TRACE(" parallel move"); + // can't skip instructions with non redundant moves, except when we + // can forward to a block with identical gap-moves. + if (instr->arch_opcode() == kArchJmp) { + TRACE(" jmp"); + RpoNumber forward_to; + if ((frame_at_start || !(block->must_deconstruct_frame() || + block->must_construct_frame())) && + record.CanForwardGapJump(instr, block->rpo_number(), + code->InputRpo(instr, 0), + &forward_to)) { + DCHECK(forward_to.IsValid()); + fw = forward_to; + TRACE("\n merge B%d into B%d", block->rpo_number().ToInt(), + forward_to.ToInt()); + } + } + TRACE("\n"); fallthru = false; } else if (FlagsModeField::decode(instr->opcode()) != kFlags_none) { // can't skip instructions with flags continuations. @@ -166,7 +249,7 @@ bool JumpThreading::ComputeForwarding(Zone* local_zone, } #endif - if (FLAG_trace_turbo_jt) { + if (v8_flags.trace_turbo_jt) { for (int i = 0; i < static_cast<int>(result->size()); i++) { TRACE("B%d ", i); int to = (*result)[i].ToInt(); @@ -184,7 +267,7 @@ bool JumpThreading::ComputeForwarding(Zone* local_zone, void JumpThreading::ApplyForwarding(Zone* local_zone, ZoneVector<RpoNumber> const& result, InstructionSequence* code) { - if (!FLAG_turbo_jt) return; + if (!v8_flags.turbo_jt) return; ZoneVector<bool> skip(static_cast<int>(result.size()), false, local_zone); @@ -217,6 +300,16 @@ void JumpThreading::ApplyForwarding(Zone* local_zone, // Overwrite a redundant jump with a nop. TRACE("jt-fw nop @%d\n", i); instr->OverwriteWithNop(); + // Eliminate all the ParallelMoves. + for (int i = Instruction::FIRST_GAP_POSITION; + i <= Instruction::LAST_GAP_POSITION; i++) { + Instruction::GapPosition pos = + static_cast<Instruction::GapPosition>(i); + ParallelMove* instr_move = instr->GetParallelMove(pos); + if (instr_move != nullptr) { + instr_move->Eliminate(); + } + } // If this block was marked as a handler, it can be unmarked now. code->InstructionBlockAt(block_rpo)->UnmarkHandler(); } diff --git a/deps/v8/src/compiler/backend/loong64/code-generator-loong64.cc b/deps/v8/src/compiler/backend/loong64/code-generator-loong64.cc index 5e54abd3963f7f..74b25770d4850c 100644 --- a/deps/v8/src/compiler/backend/loong64/code-generator-loong64.cc +++ b/deps/v8/src/compiler/backend/loong64/code-generator-loong64.cc @@ -627,7 +627,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( } case kArchCallJSFunction: { Register func = i.InputRegister(0); - if (FLAG_debug_code) { + if (v8_flags.debug_code) { UseScratchRegisterScope temps(tasm()); Register scratch = temps.Acquire(); // Check the function's context matches the context argument. @@ -692,7 +692,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( int offset = __ root_array_available() ? 36 : 80; // 9 or 20 instrs #endif // V8_ENABLE_WEBASSEMBLY #if V8_HOST_ARCH_LOONG64 - if (FLAG_debug_code) { + if (v8_flags.debug_code) { offset += 12; // see CallCFunction } #endif @@ -849,7 +849,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( frame_access_state()->GetFrameOffset(i.InputInt32(0)); Register base_reg = offset.from_stack_pointer() ? sp : fp; __ Add_d(i.OutputRegister(), base_reg, Operand(offset.offset())); - if (FLAG_debug_code) { + if (v8_flags.debug_code) { // Verify that the output_register is properly aligned __ And(scratch, i.OutputRegister(), Operand(kSystemPointerSize - 1)); __ Assert(eq, AbortReason::kAllocationIsNotDoubleAligned, scratch, @@ -947,6 +947,10 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( __ MulOverflow_w(i.OutputRegister(), i.InputRegister(0), i.InputOperand(1), t8); break; + case kLoong64MulOvf_d: + __ MulOverflow_d(i.OutputRegister(), i.InputRegister(0), + i.InputOperand(1), t8); + break; case kLoong64Mulh_w: __ Mulh_w(i.OutputRegister(), i.InputRegister(0), i.InputOperand(1)); break; @@ -956,6 +960,9 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( case kLoong64Mulh_d: __ Mulh_d(i.OutputRegister(), i.InputRegister(0), i.InputOperand(1)); break; + case kLoong64Mulh_du: + __ Mulh_du(i.OutputRegister(), i.InputRegister(0), i.InputOperand(1)); + break; case kLoong64Div_w: __ Div_w(i.OutputRegister(), i.InputRegister(0), i.InputOperand(1)); __ maskeqz(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1)); @@ -1895,7 +1902,8 @@ void AssembleBranchToLabels(CodeGenerator* gen, TurboAssembler* tasm, default: UNSUPPORTED_COND(instr->arch_opcode(), condition); } - } else if (instr->arch_opcode() == kLoong64MulOvf_w) { + } else if (instr->arch_opcode() == kLoong64MulOvf_w || + instr->arch_opcode() == kLoong64MulOvf_d) { // Overflow occurs if overflow register is not zero switch (condition) { case kOverflow: @@ -1905,7 +1913,7 @@ void AssembleBranchToLabels(CodeGenerator* gen, TurboAssembler* tasm, __ Branch(tlabel, eq, t8, Operand(zero_reg)); break; default: - UNSUPPORTED_COND(kLoong64MulOvf_w, condition); + UNSUPPORTED_COND(instr->arch_opcode(), condition); } } else if (instr->arch_opcode() == kLoong64Cmp) { Condition cc = FlagsConditionToConditionCmp(condition); @@ -1996,7 +2004,7 @@ void CodeGenerator::AssembleArchTrap(Instruction* instr, ReferenceMap* reference_map = gen_->zone()->New<ReferenceMap>(gen_->zone()); gen_->RecordSafepoint(reference_map); - if (FLAG_debug_code) { + if (v8_flags.debug_code) { __ stop(); } } @@ -2047,7 +2055,8 @@ void CodeGenerator::AssembleArchBoolean(Instruction* instr, instr->arch_opcode() == kLoong64SubOvf_d) { // Overflow occurs if overflow register is negative __ slt(result, t8, zero_reg); - } else if (instr->arch_opcode() == kLoong64MulOvf_w) { + } else if (instr->arch_opcode() == kLoong64MulOvf_w || + instr->arch_opcode() == kLoong64MulOvf_d) { // Overflow occurs if overflow register is not zero __ Sgtu(result, t8, zero_reg); } else if (instr->arch_opcode() == kLoong64Cmp) { @@ -2275,7 +2284,7 @@ void CodeGenerator::AssembleConstructFrame() { // If the frame is bigger than the stack, we throw the stack overflow // exception unconditionally. Thereby we can avoid the integer overflow // check in the condition code. - if (required_slots * kSystemPointerSize < FLAG_stack_size * KB) { + if (required_slots * kSystemPointerSize < v8_flags.stack_size * KB) { UseScratchRegisterScope temps(tasm()); Register scratch = temps.Acquire(); __ Ld_d(scratch, FieldMemOperand( @@ -2292,7 +2301,7 @@ void CodeGenerator::AssembleConstructFrame() { // define an empty safepoint. ReferenceMap* reference_map = zone()->New<ReferenceMap>(zone()); RecordSafepoint(reference_map); - if (FLAG_debug_code) { + if (v8_flags.debug_code) { __ stop(); } @@ -2358,7 +2367,7 @@ void CodeGenerator::AssembleReturn(InstructionOperand* additional_pop_count) { if (parameter_slots != 0) { if (additional_pop_count->IsImmediate()) { DCHECK_EQ(g.ToConstant(additional_pop_count).ToInt32(), 0); - } else if (FLAG_debug_code) { + } else if (v8_flags.debug_code) { __ Assert(eq, AbortReason::kUnexpectedAdditionalPopValue, g.ToRegister(additional_pop_count), Operand(static_cast<int64_t>(0))); diff --git a/deps/v8/src/compiler/backend/loong64/instruction-codes-loong64.h b/deps/v8/src/compiler/backend/loong64/instruction-codes-loong64.h index dd7b9cebfeaeaf..5ad8096509a097 100644 --- a/deps/v8/src/compiler/backend/loong64/instruction-codes-loong64.h +++ b/deps/v8/src/compiler/backend/loong64/instruction-codes-loong64.h @@ -21,8 +21,10 @@ namespace compiler { V(Loong64SubOvf_d) \ V(Loong64Mul_d) \ V(Loong64MulOvf_w) \ + V(Loong64MulOvf_d) \ V(Loong64Mulh_d) \ V(Loong64Mulh_w) \ + V(Loong64Mulh_du) \ V(Loong64Mulh_wu) \ V(Loong64Mul_w) \ V(Loong64Div_d) \ diff --git a/deps/v8/src/compiler/backend/loong64/instruction-selector-loong64.cc b/deps/v8/src/compiler/backend/loong64/instruction-selector-loong64.cc index 2498cdcb072cd0..091c7ad9bc9b82 100644 --- a/deps/v8/src/compiler/backend/loong64/instruction-selector-loong64.cc +++ b/deps/v8/src/compiler/backend/loong64/instruction-selector-loong64.cc @@ -492,12 +492,13 @@ void InstructionSelector::VisitStore(Node* node) { WriteBarrierKind write_barrier_kind = store_rep.write_barrier_kind(); MachineRepresentation rep = store_rep.representation(); - if (FLAG_enable_unconditional_write_barriers && CanBeTaggedPointer(rep)) { + if (v8_flags.enable_unconditional_write_barriers && CanBeTaggedPointer(rep)) { write_barrier_kind = kFullWriteBarrier; } // TODO(loong64): I guess this could be done in a better way. - if (write_barrier_kind != kNoWriteBarrier && !FLAG_disable_write_barriers) { + if (write_barrier_kind != kNoWriteBarrier && + !v8_flags.disable_write_barriers) { DCHECK(CanBeTaggedPointer(rep)); AddressingMode addressing_mode; InstructionOperand inputs[3]; @@ -1079,10 +1080,18 @@ void InstructionSelector::VisitInt32MulHigh(Node* node) { VisitRRR(this, kLoong64Mulh_w, node); } +void InstructionSelector::VisitInt64MulHigh(Node* node) { + VisitRRR(this, kLoong64Mulh_d, node); +} + void InstructionSelector::VisitUint32MulHigh(Node* node) { VisitRRR(this, kLoong64Mulh_wu, node); } +void InstructionSelector::VisitUint64MulHigh(Node* node) { + VisitRRR(this, kLoong64Mulh_du, node); +} + void InstructionSelector::VisitInt64Mul(Node* node) { Loong64OperandGenerator g(this); Int64BinopMatcher m(node); @@ -2021,14 +2030,15 @@ void VisitAtomicStore(InstructionSelector* selector, Node* node, WriteBarrierKind write_barrier_kind = store_params.write_barrier_kind(); MachineRepresentation rep = store_params.representation(); - if (FLAG_enable_unconditional_write_barriers && + if (v8_flags.enable_unconditional_write_barriers && CanBeTaggedOrCompressedPointer(rep)) { write_barrier_kind = kFullWriteBarrier; } InstructionCode code; - if (write_barrier_kind != kNoWriteBarrier && !FLAG_disable_write_barriers) { + if (write_barrier_kind != kNoWriteBarrier && + !v8_flags.disable_write_barriers) { DCHECK(CanBeTaggedPointer(rep)); DCHECK_EQ(kTaggedSize, 8); @@ -2280,6 +2290,9 @@ void InstructionSelector::VisitWordCompareZero(Node* user, Node* value, case IrOpcode::kInt32MulWithOverflow: cont->OverwriteAndNegateIfEqual(kOverflow); return VisitBinop(this, node, kLoong64MulOvf_w, cont); + case IrOpcode::kInt64MulWithOverflow: + cont->OverwriteAndNegateIfEqual(kOverflow); + return VisitBinop(this, node, kLoong64MulOvf_d, cont); case IrOpcode::kInt64AddWithOverflow: cont->OverwriteAndNegateIfEqual(kOverflow); return VisitBinop(this, node, kLoong64AddOvf_d, cont); @@ -2397,6 +2410,15 @@ void InstructionSelector::VisitInt32MulWithOverflow(Node* node) { VisitBinop(this, node, kLoong64MulOvf_w, &cont); } +void InstructionSelector::VisitInt64MulWithOverflow(Node* node) { + if (Node* ovf = NodeProperties::FindProjection(node, 1)) { + FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf); + return VisitBinop(this, node, kLoong64MulOvf_d, &cont); + } + FlagsContinuation cont; + VisitBinop(this, node, kLoong64MulOvf_d, &cont); +} + void InstructionSelector::VisitInt64AddWithOverflow(Node* node) { if (Node* ovf = NodeProperties::FindProjection(node, 1)) { FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf); diff --git a/deps/v8/src/compiler/backend/mips64/code-generator-mips64.cc b/deps/v8/src/compiler/backend/mips64/code-generator-mips64.cc index 00f2cb709b6f4d..a31e8230cfdb92 100644 --- a/deps/v8/src/compiler/backend/mips64/code-generator-mips64.cc +++ b/deps/v8/src/compiler/backend/mips64/code-generator-mips64.cc @@ -646,7 +646,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( } case kArchCallJSFunction: { Register func = i.InputRegister(0); - if (FLAG_debug_code) { + if (v8_flags.debug_code) { // Check the function's context matches the context argument. __ Ld(kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset)); __ Assert(eq, AbortReason::kWrongFunctionContext, cp, @@ -708,7 +708,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( int offset = __ root_array_available() ? 64 : 112; #endif // V8_ENABLE_WEBASSEMBLY #if V8_HOST_ARCH_MIPS64 - if (FLAG_debug_code) { + if (v8_flags.debug_code) { offset += 16; } #endif @@ -857,7 +857,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( frame_access_state()->GetFrameOffset(i.InputInt32(0)); Register base_reg = offset.from_stack_pointer() ? sp : fp; __ Daddu(i.OutputRegister(), base_reg, Operand(offset.offset())); - if (FLAG_debug_code) { + if (v8_flags.debug_code) { // Verify that the output_register is properly aligned __ And(kScratchReg, i.OutputRegister(), Operand(kSystemPointerSize - 1)); @@ -956,6 +956,10 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( __ MulOverflow(i.OutputRegister(), i.InputRegister(0), i.InputOperand(1), kScratchReg); break; + case kMips64DMulOvf: + __ DMulOverflow(i.OutputRegister(), i.InputRegister(0), i.InputOperand(1), + kScratchReg); + break; case kMips64MulHigh: __ Mulh(i.OutputRegister(), i.InputRegister(0), i.InputOperand(1)); break; @@ -965,6 +969,9 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( case kMips64DMulHigh: __ Dmulh(i.OutputRegister(), i.InputRegister(0), i.InputOperand(1)); break; + case kMips64DMulHighU: + __ Dmulhu(i.OutputRegister(), i.InputRegister(0), i.InputOperand(1)); + break; case kMips64Div: __ Div(i.OutputRegister(), i.InputRegister(0), i.InputOperand(1)); if (kArchVariant == kMips64r6) { @@ -3426,7 +3433,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( if (src0 == src1) { // Unary S32x4 shuffles are handled with shf.w instruction unsigned lane = shuffle & 0xFF; - if (FLAG_debug_code) { + if (v8_flags.debug_code) { // range of all four lanes, for unary instruction, // should belong to the same range, which can be one of these: // [0, 3] or [4, 7] @@ -3825,7 +3832,8 @@ void AssembleBranchToLabels(CodeGenerator* gen, TurboAssembler* tasm, default: UNSUPPORTED_COND(instr->arch_opcode(), condition); } - } else if (instr->arch_opcode() == kMips64MulOvf) { + } else if (instr->arch_opcode() == kMips64MulOvf || + instr->arch_opcode() == kMips64DMulOvf) { // Overflow occurs if overflow register is not zero switch (condition) { case kOverflow: @@ -3835,7 +3843,7 @@ void AssembleBranchToLabels(CodeGenerator* gen, TurboAssembler* tasm, __ Branch(tlabel, eq, kScratchReg, Operand(zero_reg)); break; default: - UNSUPPORTED_COND(kMipsMulOvf, condition); + UNSUPPORTED_COND(instr->arch_opcode(), condition); } } else if (instr->arch_opcode() == kMips64Cmp) { Condition cc = FlagsConditionToConditionCmp(condition); @@ -3926,7 +3934,7 @@ void CodeGenerator::AssembleArchTrap(Instruction* instr, ReferenceMap* reference_map = gen_->zone()->New<ReferenceMap>(gen_->zone()); gen_->RecordSafepoint(reference_map); - if (FLAG_debug_code) { + if (v8_flags.debug_code) { __ stop(); } } @@ -3975,7 +3983,8 @@ void CodeGenerator::AssembleArchBoolean(Instruction* instr, instr->arch_opcode() == kMips64DsubOvf) { // Overflow occurs if overflow register is negative __ slt(result, kScratchReg, zero_reg); - } else if (instr->arch_opcode() == kMips64MulOvf) { + } else if (instr->arch_opcode() == kMips64MulOvf || + instr->arch_opcode() == kMips64MulOvf) { // Overflow occurs if overflow register is not zero __ Sgtu(result, kScratchReg, zero_reg); } else if (instr->arch_opcode() == kMips64Cmp) { @@ -4222,7 +4231,7 @@ void CodeGenerator::AssembleConstructFrame() { // If the frame is bigger than the stack, we throw the stack overflow // exception unconditionally. Thereby we can avoid the integer overflow // check in the condition code. - if (required_slots * kSystemPointerSize < FLAG_stack_size * KB) { + if (required_slots * kSystemPointerSize < v8_flags.stack_size * KB) { __ Ld( kScratchReg, FieldMemOperand(kWasmInstanceRegister, @@ -4238,7 +4247,7 @@ void CodeGenerator::AssembleConstructFrame() { // define an empty safepoint. ReferenceMap* reference_map = zone()->New<ReferenceMap>(zone()); RecordSafepoint(reference_map); - if (FLAG_debug_code) __ stop(); + if (v8_flags.debug_code) __ stop(); __ bind(&done); } @@ -4302,7 +4311,7 @@ void CodeGenerator::AssembleReturn(InstructionOperand* additional_pop_count) { if (parameter_slots != 0) { if (additional_pop_count->IsImmediate()) { DCHECK_EQ(g.ToConstant(additional_pop_count).ToInt32(), 0); - } else if (FLAG_debug_code) { + } else if (v8_flags.debug_code) { __ Assert(eq, AbortReason::kUnexpectedAdditionalPopValue, g.ToRegister(additional_pop_count), Operand(static_cast<int64_t>(0))); diff --git a/deps/v8/src/compiler/backend/mips64/instruction-codes-mips64.h b/deps/v8/src/compiler/backend/mips64/instruction-codes-mips64.h index 5e05046feba7b8..6b6181de595746 100644 --- a/deps/v8/src/compiler/backend/mips64/instruction-codes-mips64.h +++ b/deps/v8/src/compiler/backend/mips64/instruction-codes-mips64.h @@ -21,9 +21,11 @@ namespace compiler { V(Mips64DsubOvf) \ V(Mips64Mul) \ V(Mips64MulOvf) \ + V(Mips64DMulOvf) \ V(Mips64MulHigh) \ V(Mips64DMulHigh) \ V(Mips64MulHighU) \ + V(Mips64DMulHighU) \ V(Mips64Dmul) \ V(Mips64Div) \ V(Mips64Ddiv) \ diff --git a/deps/v8/src/compiler/backend/mips64/instruction-scheduler-mips64.cc b/deps/v8/src/compiler/backend/mips64/instruction-scheduler-mips64.cc index 597bb175701358..1d17d4bd58dd5d 100644 --- a/deps/v8/src/compiler/backend/mips64/instruction-scheduler-mips64.cc +++ b/deps/v8/src/compiler/backend/mips64/instruction-scheduler-mips64.cc @@ -44,6 +44,8 @@ int InstructionScheduler::GetTargetInstructionFlags( case kMips64CvtSUw: case kMips64CvtSW: case kMips64DMulHigh: + case kMips64DMulHighU: + case kMips64DMulOvf: case kMips64MulHighU: case kMips64Dadd: case kMips64DaddOvf: @@ -1274,7 +1276,7 @@ int InstructionScheduler::GetInstructionLatency(const Instruction* instr) { return JumpLatency(); case kArchCallJSFunction: { int latency = 0; - if (FLAG_debug_code) { + if (v8_flags.debug_code) { latency = 1 + AssertLatency(); } return latency + 1 + DadduLatency(false) + CallLatency(); @@ -1360,6 +1362,7 @@ int InstructionScheduler::GetInstructionLatency(const Instruction* instr) { case kMips64Mul: return MulLatency(); case kMips64MulOvf: + case kMips64DMulOvf: return MulOverflowLatency(); case kMips64MulHigh: return MulhLatency(); diff --git a/deps/v8/src/compiler/backend/mips64/instruction-selector-mips64.cc b/deps/v8/src/compiler/backend/mips64/instruction-selector-mips64.cc index fc6972fa99a4f3..8b4398eecbba0a 100644 --- a/deps/v8/src/compiler/backend/mips64/instruction-selector-mips64.cc +++ b/deps/v8/src/compiler/backend/mips64/instruction-selector-mips64.cc @@ -528,12 +528,13 @@ void InstructionSelector::VisitStore(Node* node) { WriteBarrierKind write_barrier_kind = store_rep.write_barrier_kind(); MachineRepresentation rep = store_rep.representation(); - if (FLAG_enable_unconditional_write_barriers && CanBeTaggedPointer(rep)) { + if (v8_flags.enable_unconditional_write_barriers && CanBeTaggedPointer(rep)) { write_barrier_kind = kFullWriteBarrier; } // TODO(mips): I guess this could be done in a better way. - if (write_barrier_kind != kNoWriteBarrier && !FLAG_disable_write_barriers) { + if (write_barrier_kind != kNoWriteBarrier && + !v8_flags.disable_write_barriers) { DCHECK(CanBeTaggedPointer(rep)); InstructionOperand inputs[3]; size_t input_count = 0; @@ -1119,10 +1120,18 @@ void InstructionSelector::VisitInt32MulHigh(Node* node) { VisitRRR(this, kMips64MulHigh, node); } +void InstructionSelector::VisitInt64MulHigh(Node* node) { + VisitRRR(this, kMips64DMulHigh, node); +} + void InstructionSelector::VisitUint32MulHigh(Node* node) { VisitRRR(this, kMips64MulHighU, node); } +void InstructionSelector::VisitUint64MulHigh(Node* node) { + VisitRRR(this, kMips64DMulHighU, node); +} + void InstructionSelector::VisitInt64Mul(Node* node) { Mips64OperandGenerator g(this); Int64BinopMatcher m(node); @@ -2121,7 +2130,7 @@ void VisitFullWord32Compare(InstructionSelector* selector, Node* node, void VisitOptimizedWord32Compare(InstructionSelector* selector, Node* node, InstructionCode opcode, FlagsContinuation* cont) { - if (FLAG_debug_code) { + if (v8_flags.debug_code) { Mips64OperandGenerator g(selector); InstructionOperand leftOp = g.TempRegister(); InstructionOperand rightOp = g.TempRegister(); @@ -2256,14 +2265,15 @@ void VisitAtomicStore(InstructionSelector* selector, Node* node, WriteBarrierKind write_barrier_kind = store_params.write_barrier_kind(); MachineRepresentation rep = store_params.representation(); - if (FLAG_enable_unconditional_write_barriers && + if (v8_flags.enable_unconditional_write_barriers && CanBeTaggedOrCompressedPointer(rep)) { write_barrier_kind = kFullWriteBarrier; } InstructionCode code; - if (write_barrier_kind != kNoWriteBarrier && !FLAG_disable_write_barriers) { + if (write_barrier_kind != kNoWriteBarrier && + !v8_flags.disable_write_barriers) { DCHECK(CanBeTaggedPointer(rep)); DCHECK_EQ(AtomicWidthSize(width), kTaggedSize); @@ -2524,6 +2534,9 @@ void InstructionSelector::VisitWordCompareZero(Node* user, Node* value, case IrOpcode::kInt32MulWithOverflow: cont->OverwriteAndNegateIfEqual(kOverflow); return VisitBinop(this, node, kMips64MulOvf, cont); + case IrOpcode::kInt64MulWithOverflow: + cont->OverwriteAndNegateIfEqual(kOverflow); + return VisitBinop(this, node, kMips64DMulOvf, cont); case IrOpcode::kInt64AddWithOverflow: cont->OverwriteAndNegateIfEqual(kOverflow); return VisitBinop(this, node, kMips64DaddOvf, cont); @@ -2641,6 +2654,15 @@ void InstructionSelector::VisitInt32MulWithOverflow(Node* node) { VisitBinop(this, node, kMips64MulOvf, &cont); } +void InstructionSelector::VisitInt64MulWithOverflow(Node* node) { + if (Node* ovf = NodeProperties::FindProjection(node, 1)) { + FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf); + return VisitBinop(this, node, kMips64DMulOvf, &cont); + } + FlagsContinuation cont; + VisitBinop(this, node, kMips64DMulOvf, &cont); +} + void InstructionSelector::VisitInt64AddWithOverflow(Node* node) { if (Node* ovf = NodeProperties::FindProjection(node, 1)) { FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf); diff --git a/deps/v8/src/compiler/backend/ppc/code-generator-ppc.cc b/deps/v8/src/compiler/backend/ppc/code-generator-ppc.cc index cd37671dd3c0f9..0ab9564168776e 100644 --- a/deps/v8/src/compiler/backend/ppc/code-generator-ppc.cc +++ b/deps/v8/src/compiler/backend/ppc/code-generator-ppc.cc @@ -785,7 +785,7 @@ void CodeGenerator::AssembleCodeStartRegisterCheck() { // 2. test kMarkedForDeoptimizationBit in those flags; and // 3. if it is not zero then it jumps to the builtin. void CodeGenerator::BailoutIfDeoptimized() { - if (FLAG_debug_code) { + if (v8_flags.debug_code) { // Check that {kJavaScriptCallCodeStartRegister} is correct. __ ComputeCodeStartAddress(ip); __ CmpS64(ip, kJavaScriptCallCodeStartRegister); @@ -908,7 +908,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( v8::internal::Assembler::BlockTrampolinePoolScope block_trampoline_pool( tasm()); Register func = i.InputRegister(0); - if (FLAG_debug_code) { + if (v8_flags.debug_code) { // Check the function's context matches the context argument. __ LoadTaggedPointerField( kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset), r0); @@ -1131,7 +1131,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( Register scratch1 = i.TempRegister(1); OutOfLineRecordWrite* ool; - if (FLAG_debug_code) { + if (v8_flags.debug_code) { // Checking that |value| is not a cleared weakref: our write barrier // does not support that for now. __ CmpS64(value, Operand(kClearedWeakHeapObjectLower32), kScratchReg); @@ -1450,6 +1450,14 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( i.InputRegister(1)); // high } break; + case kPPC_MulHighS64: + __ mulhd(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1), + i.OutputRCBit()); + break; + case kPPC_MulHighU64: + __ mulhdu(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1), + i.OutputRCBit()); + break; case kPPC_MulHigh32: __ mulhw(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1), i.OutputRCBit()); @@ -2235,6 +2243,10 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( V(I16x8Eq) \ V(I16x8GtS) \ V(I16x8GtU) \ + V(I16x8AddSatS) \ + V(I16x8SubSatS) \ + V(I16x8AddSatU) \ + V(I16x8SubSatU) \ V(I8x16Add) \ V(I8x16Sub) \ V(I8x16MinS) \ @@ -2243,7 +2255,11 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( V(I8x16MaxU) \ V(I8x16Eq) \ V(I8x16GtS) \ - V(I8x16GtU) + V(I8x16GtU) \ + V(I8x16AddSatS) \ + V(I8x16SubSatS) \ + V(I8x16AddSatU) \ + V(I8x16SubSatU) #define EMIT_SIMD_BINOP(name) \ case kPPC_##name: { \ @@ -2255,6 +2271,81 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( #undef EMIT_SIMD_BINOP #undef SIMD_BINOP_LIST +#define SIMD_BINOP_WITH_SCRATCH_LIST(V) \ + V(F64x2Ne) \ + V(F32x4Ne) \ + V(I64x2Ne) \ + V(I64x2GeS) \ + V(I32x4Ne) \ + V(I32x4GeS) \ + V(I32x4GeU) \ + V(I16x8Ne) \ + V(I16x8GeS) \ + V(I16x8GeU) \ + V(I8x16Ne) \ + V(I8x16GeS) \ + V(I8x16GeU) + +#define EMIT_SIMD_BINOP_WITH_SCRATCH(name) \ + case kPPC_##name: { \ + __ name(i.OutputSimd128Register(), i.InputSimd128Register(0), \ + i.InputSimd128Register(1), kScratchSimd128Reg); \ + break; \ + } + SIMD_BINOP_WITH_SCRATCH_LIST(EMIT_SIMD_BINOP_WITH_SCRATCH) +#undef EMIT_SIMD_BINOP_WITH_SCRATCH +#undef SIMD_BINOP_WITH_SCRATCH_LIST + +#define SIMD_SHIFT_LIST(V) \ + V(I64x2Shl) \ + V(I64x2ShrS) \ + V(I64x2ShrU) \ + V(I32x4Shl) \ + V(I32x4ShrS) \ + V(I32x4ShrU) \ + V(I16x8Shl) \ + V(I16x8ShrS) \ + V(I16x8ShrU) \ + V(I8x16Shl) \ + V(I8x16ShrS) \ + V(I8x16ShrU) + +#define EMIT_SIMD_SHIFT(name) \ + case kPPC_##name: { \ + __ name(i.OutputSimd128Register(), i.InputSimd128Register(0), \ + i.InputRegister(1), kScratchSimd128Reg); \ + break; \ + } + SIMD_SHIFT_LIST(EMIT_SIMD_SHIFT) +#undef EMIT_SIMD_SHIFT +#undef SIMD_SHIFT_LIST + +#define SIMD_UNOP_LIST(V) \ + V(F64x2Abs) \ + V(F64x2Neg) \ + V(F64x2Sqrt) \ + V(F64x2Ceil) \ + V(F64x2Floor) \ + V(F64x2Trunc) \ + V(F32x4Abs) \ + V(F32x4Neg) \ + V(I64x2Neg) \ + V(I32x4Neg) \ + V(F32x4Sqrt) \ + V(F32x4Ceil) \ + V(F32x4Floor) \ + V(F32x4Trunc) \ + V(I8x16Popcnt) + +#define EMIT_SIMD_UNOP(name) \ + case kPPC_##name: { \ + __ name(i.OutputSimd128Register(), i.InputSimd128Register(0)); \ + break; \ + } + SIMD_UNOP_LIST(EMIT_SIMD_UNOP) +#undef EMIT_SIMD_UNOP +#undef SIMD_UNOP_LIST + case kPPC_F64x2Splat: { __ F64x2Splat(i.OutputSimd128Register(), i.InputDoubleRegister(0), kScratchReg); @@ -2381,127 +2472,36 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( kScratchSimd128Reg2); break; } - case kPPC_F64x2Ne: { - __ F64x2Ne(i.OutputSimd128Register(), i.InputSimd128Register(0), - i.InputSimd128Register(1), kScratchSimd128Reg); - break; - } - case kPPC_F32x4Ne: { - __ F32x4Ne(i.OutputSimd128Register(), i.InputSimd128Register(0), - i.InputSimd128Register(1), kScratchSimd128Reg); - break; - } - case kPPC_I64x2Ne: { - __ I64x2Ne(i.OutputSimd128Register(), i.InputSimd128Register(0), - i.InputSimd128Register(1), kScratchSimd128Reg); - break; - } - case kPPC_I64x2GeS: { - __ I64x2GeS(i.OutputSimd128Register(), i.InputSimd128Register(0), - i.InputSimd128Register(1), kScratchSimd128Reg); - break; - } - case kPPC_I32x4Ne: { - __ I32x4Ne(i.OutputSimd128Register(), i.InputSimd128Register(0), - i.InputSimd128Register(1), kScratchSimd128Reg); - break; - } - case kPPC_I32x4GeS: { - __ I32x4GeS(i.OutputSimd128Register(), i.InputSimd128Register(0), - i.InputSimd128Register(1), kScratchSimd128Reg); - break; - } - case kPPC_I32x4GeU: { - __ I32x4GeU(i.OutputSimd128Register(), i.InputSimd128Register(0), - i.InputSimd128Register(1), kScratchSimd128Reg); - break; - } - case kPPC_I16x8Ne: { - __ I16x8Ne(i.OutputSimd128Register(), i.InputSimd128Register(0), - i.InputSimd128Register(1), kScratchSimd128Reg); - break; - } - case kPPC_I16x8GeS: { - __ I16x8GeS(i.OutputSimd128Register(), i.InputSimd128Register(0), - i.InputSimd128Register(1), kScratchSimd128Reg); - break; - } - case kPPC_I16x8GeU: { - __ I16x8GeU(i.OutputSimd128Register(), i.InputSimd128Register(0), - i.InputSimd128Register(1), kScratchSimd128Reg); - break; - } - case kPPC_I8x16Ne: { - __ I8x16Ne(i.OutputSimd128Register(), i.InputSimd128Register(0), - i.InputSimd128Register(1), kScratchSimd128Reg); - break; - } - case kPPC_I8x16GeS: { - __ I8x16GeS(i.OutputSimd128Register(), i.InputSimd128Register(0), - i.InputSimd128Register(1), kScratchSimd128Reg); - break; - } - case kPPC_I8x16GeU: { - __ I8x16GeU(i.OutputSimd128Register(), i.InputSimd128Register(0), - i.InputSimd128Register(1), kScratchSimd128Reg); - break; - } -#define VECTOR_SHIFT(op) \ - { \ - __ mtvsrd(kScratchSimd128Reg, i.InputRegister(1)); \ - __ vspltb(kScratchSimd128Reg, kScratchSimd128Reg, Operand(7)); \ - __ op(i.OutputSimd128Register(), i.InputSimd128Register(0), \ - kScratchSimd128Reg); \ - } - case kPPC_I64x2Shl: { - VECTOR_SHIFT(vsld) - break; - } - case kPPC_I64x2ShrS: { - VECTOR_SHIFT(vsrad) - break; - } - case kPPC_I64x2ShrU: { - VECTOR_SHIFT(vsrd) - break; - } - case kPPC_I32x4Shl: { - VECTOR_SHIFT(vslw) - break; - } - case kPPC_I32x4ShrS: { - VECTOR_SHIFT(vsraw) - break; - } - case kPPC_I32x4ShrU: { - VECTOR_SHIFT(vsrw) - break; - } - case kPPC_I16x8Shl: { - VECTOR_SHIFT(vslh) + case kPPC_I64x2Abs: { + __ I64x2Abs(i.OutputSimd128Register(), i.InputSimd128Register(0), + kScratchSimd128Reg); break; } - case kPPC_I16x8ShrS: { - VECTOR_SHIFT(vsrah) + case kPPC_I32x4Abs: { + __ I32x4Abs(i.OutputSimd128Register(), i.InputSimd128Register(0), + kScratchSimd128Reg); break; } - case kPPC_I16x8ShrU: { - VECTOR_SHIFT(vsrh) + case kPPC_I16x8Abs: { + __ I16x8Abs(i.OutputSimd128Register(), i.InputSimd128Register(0), + kScratchSimd128Reg); break; } - case kPPC_I8x16Shl: { - VECTOR_SHIFT(vslb) + case kPPC_I16x8Neg: { + __ I16x8Neg(i.OutputSimd128Register(), i.InputSimd128Register(0), + kScratchSimd128Reg); break; } - case kPPC_I8x16ShrS: { - VECTOR_SHIFT(vsrab) + case kPPC_I8x16Abs: { + __ I8x16Abs(i.OutputSimd128Register(), i.InputSimd128Register(0), + kScratchSimd128Reg); break; } - case kPPC_I8x16ShrU: { - VECTOR_SHIFT(vsrb) + case kPPC_I8x16Neg: { + __ I8x16Neg(i.OutputSimd128Register(), i.InputSimd128Register(0), + kScratchSimd128Reg); break; } -#undef VECTOR_SHIFT case kPPC_S128And: { Simd128Register dst = i.OutputSimd128Register(); Simd128Register src = i.InputSimd128Register(1); @@ -2552,92 +2552,6 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( __ vsel(dst, src2, src1, mask); break; } - case kPPC_F64x2Abs: { - __ xvabsdp(i.OutputSimd128Register(), i.InputSimd128Register(0)); - break; - } - case kPPC_F64x2Neg: { - __ xvnegdp(i.OutputSimd128Register(), i.InputSimd128Register(0)); - break; - } - case kPPC_F64x2Sqrt: { - __ xvsqrtdp(i.OutputSimd128Register(), i.InputSimd128Register(0)); - break; - } - case kPPC_F32x4Abs: { - __ xvabssp(i.OutputSimd128Register(), i.InputSimd128Register(0)); - break; - } - case kPPC_F32x4Neg: { - __ xvnegsp(i.OutputSimd128Register(), i.InputSimd128Register(0)); - break; - } - case kPPC_F32x4Sqrt: { - __ xvsqrtsp(i.OutputSimd128Register(), i.InputSimd128Register(0)); - break; - } - case kPPC_I64x2Neg: { - __ vnegd(i.OutputSimd128Register(), i.InputSimd128Register(0)); - break; - } - case kPPC_I32x4Neg: { - __ vnegw(i.OutputSimd128Register(), i.InputSimd128Register(0)); - break; - } - case kPPC_I64x2Abs: { - Simd128Register src = i.InputSimd128Register(0); - Simd128Register dst = i.OutputSimd128Register(); - constexpr int shift_bits = 63; - __ xxspltib(kScratchSimd128Reg, Operand(shift_bits)); - __ vsrad(kScratchSimd128Reg, src, kScratchSimd128Reg); - __ vxor(dst, src, kScratchSimd128Reg); - __ vsubudm(dst, dst, kScratchSimd128Reg); - break; - } - case kPPC_I32x4Abs: { - Simd128Register src = i.InputSimd128Register(0); - Simd128Register dst = i.OutputSimd128Register(); - constexpr int shift_bits = 31; - __ xxspltib(kScratchSimd128Reg, Operand(shift_bits)); - __ vsraw(kScratchSimd128Reg, src, kScratchSimd128Reg); - __ vxor(dst, src, kScratchSimd128Reg); - __ vsubuwm(dst, dst, kScratchSimd128Reg); - break; - } - case kPPC_I16x8Neg: { - Simd128Register dst = i.OutputSimd128Register(); - __ vspltish(kScratchSimd128Reg, Operand(1)); - __ vnor(dst, i.InputSimd128Register(0), i.InputSimd128Register(0)); - __ vadduhm(dst, kScratchSimd128Reg, dst); - break; - } - case kPPC_I16x8Abs: { - Simd128Register src = i.InputSimd128Register(0); - Simd128Register dst = i.OutputSimd128Register(); - constexpr int shift_bits = 15; - __ xxspltib(kScratchSimd128Reg, Operand(shift_bits)); - __ vsrah(kScratchSimd128Reg, src, kScratchSimd128Reg); - __ vxor(dst, src, kScratchSimd128Reg); - __ vsubuhm(dst, dst, kScratchSimd128Reg); - break; - } - case kPPC_I8x16Neg: { - Simd128Register dst = i.OutputSimd128Register(); - __ xxspltib(kScratchSimd128Reg, Operand(1)); - __ vnor(dst, i.InputSimd128Register(0), i.InputSimd128Register(0)); - __ vaddubm(dst, kScratchSimd128Reg, dst); - break; - } - case kPPC_I8x16Abs: { - Simd128Register src = i.InputSimd128Register(0); - Simd128Register dst = i.OutputSimd128Register(); - constexpr int shift_bits = 7; - __ xxspltib(kScratchSimd128Reg, Operand(shift_bits)); - __ vsrab(kScratchSimd128Reg, src, kScratchSimd128Reg); - __ vxor(dst, src, kScratchSimd128Reg); - __ vsububm(dst, dst, kScratchSimd128Reg); - break; - } case kPPC_V128AnyTrue: { Simd128Register src = i.InputSimd128Register(0); Register dst = i.OutputRegister(); @@ -2825,46 +2739,6 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( __ vperm(dst, src0, src1, kScratchSimd128Reg); break; } - case kPPC_I16x8AddSatS: { - __ vaddshs(i.OutputSimd128Register(), i.InputSimd128Register(0), - i.InputSimd128Register(1)); - break; - } - case kPPC_I16x8SubSatS: { - __ vsubshs(i.OutputSimd128Register(), i.InputSimd128Register(0), - i.InputSimd128Register(1)); - break; - } - case kPPC_I16x8AddSatU: { - __ vadduhs(i.OutputSimd128Register(), i.InputSimd128Register(0), - i.InputSimd128Register(1)); - break; - } - case kPPC_I16x8SubSatU: { - __ vsubuhs(i.OutputSimd128Register(), i.InputSimd128Register(0), - i.InputSimd128Register(1)); - break; - } - case kPPC_I8x16AddSatS: { - __ vaddsbs(i.OutputSimd128Register(), i.InputSimd128Register(0), - i.InputSimd128Register(1)); - break; - } - case kPPC_I8x16SubSatS: { - __ vsubsbs(i.OutputSimd128Register(), i.InputSimd128Register(0), - i.InputSimd128Register(1)); - break; - } - case kPPC_I8x16AddSatU: { - __ vaddubs(i.OutputSimd128Register(), i.InputSimd128Register(0), - i.InputSimd128Register(1)); - break; - } - case kPPC_I8x16SubSatU: { - __ vsububs(i.OutputSimd128Register(), i.InputSimd128Register(0), - i.InputSimd128Register(1)); - break; - } case kPPC_I8x16Swizzle: { Simd128Register dst = i.OutputSimd128Register(), src0 = i.InputSimd128Register(0), @@ -2935,30 +2809,6 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( __ vandc(dst, src, i.InputSimd128Register(1)); break; } - case kPPC_F64x2Ceil: { - __ xvrdpip(i.OutputSimd128Register(), i.InputSimd128Register(0)); - break; - } - case kPPC_F64x2Floor: { - __ xvrdpim(i.OutputSimd128Register(), i.InputSimd128Register(0)); - break; - } - case kPPC_F64x2Trunc: { - __ xvrdpiz(i.OutputSimd128Register(), i.InputSimd128Register(0)); - break; - } - case kPPC_F32x4Ceil: { - __ xvrspip(i.OutputSimd128Register(), i.InputSimd128Register(0)); - break; - } - case kPPC_F32x4Floor: { - __ xvrspim(i.OutputSimd128Register(), i.InputSimd128Register(0)); - break; - } - case kPPC_F32x4Trunc: { - __ xvrspiz(i.OutputSimd128Register(), i.InputSimd128Register(0)); - break; - } case kPPC_I64x2BitMask: { if (CpuFeatures::IsSupported(PPC_10_PLUS)) { __ vextractdm(i.OutputRegister(), i.InputSimd128Register(0)); @@ -3449,10 +3299,6 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( __ vinsertd(dst, kScratchSimd128Reg, Operand(lane_number)); break; } - case kPPC_I8x16Popcnt: { - __ vpopcntb(i.OutputSimd128Register(), i.InputSimd128Register(0)); - break; - } case kPPC_StoreCompressTagged: { size_t index = 0; AddressingMode mode = kMode_None; @@ -3561,7 +3407,7 @@ void CodeGenerator::AssembleArchTrap(Instruction* instr, ReferenceMap* reference_map = gen_->zone()->New<ReferenceMap>(gen_->zone()); gen_->RecordSafepoint(reference_map); - if (FLAG_debug_code) { + if (v8_flags.debug_code) { __ stop(); } } @@ -3694,13 +3540,13 @@ void CodeGenerator::FinishFrame(Frame* frame) { } // Save callee-saved registers. const RegList saves = - FLAG_enable_embedded_constant_pool + V8_EMBEDDED_CONSTANT_POOL_BOOL ? call_descriptor->CalleeSavedRegisters() - kConstantPoolRegister : call_descriptor->CalleeSavedRegisters(); if (!saves.is_empty()) { // register save area does not include the fp or constant pool pointer. const int num_saves = - kNumCalleeSaved - 1 - (FLAG_enable_embedded_constant_pool ? 1 : 0); + kNumCalleeSaved - 1 - (V8_EMBEDDED_CONSTANT_POOL_BOOL ? 1 : 0); frame->AllocateSavedCalleeRegisterSlots(num_saves); } } @@ -3720,7 +3566,7 @@ void CodeGenerator::AssembleConstructFrame() { #endif // V8_ENABLE_WEBASSEMBLY } else { __ mflr(r0); - if (FLAG_enable_embedded_constant_pool) { + if (V8_EMBEDDED_CONSTANT_POOL_BOOL) { __ Push(r0, fp, kConstantPoolRegister); // Adjust FP to point to saved FP. __ SubS64(fp, sp, @@ -3769,7 +3615,7 @@ void CodeGenerator::AssembleConstructFrame() { const DoubleRegList saves_fp = call_descriptor->CalleeSavedFPRegisters(); const RegList saves = - FLAG_enable_embedded_constant_pool + V8_EMBEDDED_CONSTANT_POOL_BOOL ? call_descriptor->CalleeSavedRegisters() - kConstantPoolRegister : call_descriptor->CalleeSavedRegisters(); @@ -3785,7 +3631,7 @@ void CodeGenerator::AssembleConstructFrame() { // If the frame is bigger than the stack, we throw the stack overflow // exception unconditionally. Thereby we can avoid the integer overflow // check in the condition code. - if (required_slots * kSystemPointerSize < FLAG_stack_size * KB) { + if (required_slots * kSystemPointerSize < v8_flags.stack_size * KB) { Register scratch = ip; __ LoadU64( scratch, @@ -3804,7 +3650,7 @@ void CodeGenerator::AssembleConstructFrame() { // define an empty safepoint. ReferenceMap* reference_map = zone()->New<ReferenceMap>(zone()); RecordSafepoint(reference_map); - if (FLAG_debug_code) __ stop(); + if (v8_flags.debug_code) __ stop(); __ bind(&done); } @@ -3845,7 +3691,7 @@ void CodeGenerator::AssembleReturn(InstructionOperand* additional_pop_count) { // Restore registers. const RegList saves = - FLAG_enable_embedded_constant_pool + V8_EMBEDDED_CONSTANT_POOL_BOOL ? call_descriptor->CalleeSavedRegisters() - kConstantPoolRegister : call_descriptor->CalleeSavedRegisters(); if (!saves.is_empty()) { @@ -3869,7 +3715,7 @@ void CodeGenerator::AssembleReturn(InstructionOperand* additional_pop_count) { if (parameter_slots != 0) { if (additional_pop_count->IsImmediate()) { DCHECK_EQ(g.ToConstant(additional_pop_count).ToInt32(), 0); - } else if (FLAG_debug_code) { + } else if (v8_flags.debug_code) { __ cmpi(g.ToRegister(additional_pop_count), Operand(0)); __ Assert(eq, AbortReason::kUnexpectedAdditionalPopValue); } diff --git a/deps/v8/src/compiler/backend/ppc/instruction-codes-ppc.h b/deps/v8/src/compiler/backend/ppc/instruction-codes-ppc.h index 8ca4b0c8520e5d..5710aa313dfd8b 100644 --- a/deps/v8/src/compiler/backend/ppc/instruction-codes-ppc.h +++ b/deps/v8/src/compiler/backend/ppc/instruction-codes-ppc.h @@ -48,6 +48,8 @@ namespace compiler { V(PPC_Mul32) \ V(PPC_Mul32WithHigh32) \ V(PPC_Mul64) \ + V(PPC_MulHighS64) \ + V(PPC_MulHighU64) \ V(PPC_MulHigh32) \ V(PPC_MulHighU32) \ V(PPC_MulPair) \ diff --git a/deps/v8/src/compiler/backend/ppc/instruction-scheduler-ppc.cc b/deps/v8/src/compiler/backend/ppc/instruction-scheduler-ppc.cc index 52d82b680294a5..e1d195f25352ca 100644 --- a/deps/v8/src/compiler/backend/ppc/instruction-scheduler-ppc.cc +++ b/deps/v8/src/compiler/backend/ppc/instruction-scheduler-ppc.cc @@ -46,6 +46,8 @@ int InstructionScheduler::GetTargetInstructionFlags( case kPPC_Mul32: case kPPC_Mul32WithHigh32: case kPPC_Mul64: + case kPPC_MulHighS64: + case kPPC_MulHighU64: case kPPC_MulHigh32: case kPPC_MulHighU32: case kPPC_MulPair: diff --git a/deps/v8/src/compiler/backend/ppc/instruction-selector-ppc.cc b/deps/v8/src/compiler/backend/ppc/instruction-selector-ppc.cc index d1e492c2288391..a5069d22fdcb33 100644 --- a/deps/v8/src/compiler/backend/ppc/instruction-selector-ppc.cc +++ b/deps/v8/src/compiler/backend/ppc/instruction-selector-ppc.cc @@ -286,12 +286,13 @@ void VisitStoreCommon(InstructionSelector* selector, Node* node, write_barrier_kind = store_rep.write_barrier_kind(); } - if (FLAG_enable_unconditional_write_barriers && + if (v8_flags.enable_unconditional_write_barriers && CanBeTaggedOrCompressedPointer(rep)) { write_barrier_kind = kFullWriteBarrier; } - if (write_barrier_kind != kNoWriteBarrier && !FLAG_disable_write_barriers) { + if (write_barrier_kind != kNoWriteBarrier && + !v8_flags.disable_write_barriers) { DCHECK(CanBeTaggedOrCompressedPointer(rep)); AddressingMode addressing_mode; InstructionOperand inputs[3]; @@ -1108,6 +1109,23 @@ void EmitInt32MulWithOverflow(InstructionSelector* selector, Node* node, VisitCompare(selector, kPPC_Cmp32, high32_operand, temp_operand, cont); } +void EmitInt64MulWithOverflow(InstructionSelector* selector, Node* node, + FlagsContinuation* cont) { + PPCOperandGenerator g(selector); + Int64BinopMatcher m(node); + InstructionOperand result = g.DefineAsRegister(node); + InstructionOperand left = g.UseRegister(m.left().node()); + InstructionOperand high = g.TempRegister(); + InstructionOperand result_sign = g.TempRegister(); + InstructionOperand right = g.UseRegister(m.right().node()); + selector->Emit(kPPC_Mul64, result, left, right); + selector->Emit(kPPC_MulHighS64, high, left, right); + selector->Emit(kPPC_ShiftRightAlg64, result_sign, result, + g.TempImmediate(63)); + // Test whether {high} is a sign-extension of {result}. + selector->EmitWithContinuation(kPPC_Cmp64, high, result_sign, cont); +} + } // namespace void InstructionSelector::VisitInt32Mul(Node* node) { @@ -1132,6 +1150,18 @@ void InstructionSelector::VisitUint32MulHigh(Node* node) { g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1))); } +void InstructionSelector::VisitInt64MulHigh(Node* node) { + PPCOperandGenerator g(this); + Emit(kPPC_MulHighS64, g.DefineAsRegister(node), + g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1))); +} + +void InstructionSelector::VisitUint64MulHigh(Node* node) { + PPCOperandGenerator g(this); + Emit(kPPC_MulHighU64, g.DefineAsRegister(node), + g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1))); +} + void InstructionSelector::VisitInt32Div(Node* node) { VisitRRR(this, kPPC_Div32, node); } @@ -1541,6 +1571,15 @@ void InstructionSelector::VisitInt64SubWithOverflow(Node* node) { FlagsContinuation cont; VisitBinop<Int64BinopMatcher>(this, node, kPPC_Sub, kInt16Imm_Negate, &cont); } + +void InstructionSelector::VisitInt64MulWithOverflow(Node* node) { + if (Node* ovf = NodeProperties::FindProjection(node, 1)) { + FlagsContinuation cont = FlagsContinuation::ForSet(kNotEqual, ovf); + return EmitInt64MulWithOverflow(this, node, &cont); + } + FlagsContinuation cont; + EmitInt64MulWithOverflow(this, node, &cont); +} #endif static bool CompareLogical(FlagsContinuation* cont) { @@ -1721,6 +1760,9 @@ void InstructionSelector::VisitWordCompareZero(Node* user, Node* value, cont->OverwriteAndNegateIfEqual(kOverflow); return VisitBinop<Int64BinopMatcher>(this, node, kPPC_Sub, kInt16Imm_Negate, cont); + case IrOpcode::kInt64MulWithOverflow: + cont->OverwriteAndNegateIfEqual(kNotEqual); + return EmitInt64MulWithOverflow(this, node, cont); #endif default: break; diff --git a/deps/v8/src/compiler/backend/ppc/unwinding-info-writer-ppc.h b/deps/v8/src/compiler/backend/ppc/unwinding-info-writer-ppc.h index e96a48308fb609..b11e00be2d12d3 100644 --- a/deps/v8/src/compiler/backend/ppc/unwinding-info-writer-ppc.h +++ b/deps/v8/src/compiler/backend/ppc/unwinding-info-writer-ppc.h @@ -49,7 +49,7 @@ class UnwindingInfoWriter { } private: - bool enabled() const { return FLAG_perf_prof_unwinding_info; } + bool enabled() const { return v8_flags.perf_prof_unwinding_info; } class BlockInitialState : public ZoneObject { public: diff --git a/deps/v8/src/compiler/backend/register-allocator.h b/deps/v8/src/compiler/backend/register-allocator.h index 165a4db273739c..e7a228e6f91362 100644 --- a/deps/v8/src/compiler/backend/register-allocator.h +++ b/deps/v8/src/compiler/backend/register-allocator.h @@ -730,6 +730,9 @@ struct LiveRangeOrdering { return left->Start() < right->Start(); } }; +// Bundle live ranges that are connected by phis and do not overlap. This tries +// to restore some pre-SSA information and is used as a hint to allocate the +// same spill slot or reuse the same register for connected live ranges. class LiveRangeBundle : public ZoneObject { public: void MergeSpillRangesAndClear(); @@ -799,6 +802,10 @@ class LiveRangeBundle : public ZoneObject { int reg_ = kUnassignedRegister; }; +// Register allocation splits LiveRanges so it can make more fine-grained +// allocation and spilling decisions. The LiveRanges that belong to the same +// virtual register form a linked-list, and the head of this list is a +// TopLevelLiveRange. class V8_EXPORT_PRIVATE TopLevelLiveRange final : public LiveRange { public: explicit TopLevelLiveRange(int vreg, MachineRepresentation rep); @@ -1069,6 +1076,9 @@ struct PrintableLiveRange { std::ostream& operator<<(std::ostream& os, const PrintableLiveRange& printable_range); +// Represent the spill operand of a LiveRange and its use intervals. After +// register allocation, disjoint spill ranges are merged and they get assigned +// the same spill slot by OperandAssigner::AssignSpillSlots(). class SpillRange final : public ZoneObject { public: static const int kUnassignedSlot = -1; @@ -1111,6 +1121,8 @@ class SpillRange final : public ZoneObject { int byte_width_; }; +// A live range with the start and end position, and helper methods for the +// ResolveControlFlow phase. class LiveRangeBound { public: explicit LiveRangeBound(LiveRange* range, bool skip) @@ -1135,6 +1147,8 @@ struct FindResult { LiveRange* pred_cover_; }; +// An array of LiveRangeBounds belonging to the same TopLevelLiveRange. Sorted +// by their start position for quick binary search. class LiveRangeBoundArray { public: LiveRangeBoundArray() : length_(0), start_(nullptr) {} @@ -1478,6 +1492,11 @@ class LinearScanAllocator final : public RegisterAllocator { RangeWithRegisterSet* to_be_live); // Helper methods for allocating registers. + + // Spilling a phi at range start can be beneficial when the phi input is + // already spilled and shares the same spill slot. This function tries to + // guess if spilling the phi is beneficial based on live range bundles and + // spilled phi inputs. bool TryReuseSpillForPhi(TopLevelLiveRange* range); int PickRegisterThatIsAvailableLongest( LiveRange* current, int hint_reg, diff --git a/deps/v8/src/compiler/backend/riscv/code-generator-riscv.cc b/deps/v8/src/compiler/backend/riscv/code-generator-riscv.cc index 7ff828b0ec8cd5..0f227a3fc60940 100644 --- a/deps/v8/src/compiler/backend/riscv/code-generator-riscv.cc +++ b/deps/v8/src/compiler/backend/riscv/code-generator-riscv.cc @@ -721,7 +721,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( } case kArchCallJSFunction: { Register func = i.InputOrZeroRegister(0); - if (FLAG_debug_code) { + if (v8_flags.debug_code) { // Check the function's context matches the context argument. __ LoadTaggedPointerField( kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset)); @@ -905,7 +905,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( int alignment = i.InputInt32(1); DCHECK(alignment == 0 || alignment == 4 || alignment == 8 || alignment == 16); - if (FLAG_debug_code && alignment > 0) { + if (v8_flags.debug_code && alignment > 0) { // Verify that the output_register is properly aligned __ And(kScratchReg, i.OutputRegister(), Operand(kSystemPointerSize - 1)); @@ -1036,6 +1036,14 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( __ Mulh64(i.OutputRegister(), i.InputOrZeroRegister(0), i.InputOperand(1)); break; + case kRiscvMulHighU64: + __ Mulhu64(i.OutputRegister(), i.InputOrZeroRegister(0), + i.InputOperand(1)); + break; + case kRiscvMulOvf64: + __ MulOverflow64(i.OutputRegister(), i.InputOrZeroRegister(0), + i.InputOperand(1), kScratchReg); + break; case kRiscvDiv32: { __ Div32(i.OutputRegister(), i.InputOrZeroRegister(0), i.InputOperand(1)); // Set ouput to zero if divisor == 0 @@ -3745,7 +3753,13 @@ void AssembleBranchToLabels(CodeGenerator* gen, TurboAssembler* tasm, default: UNSUPPORTED_COND(instr->arch_opcode(), condition); } +#if V8_TARGET_ARCH_RISCV64 + // kRiscvMulOvf64 is only for RISCV64 + } else if (instr->arch_opcode() == kRiscvMulOvf32 || + instr->arch_opcode() == kRiscvMulOvf64) { +#elif V8_TARGET_ARCH_RISCV32 } else if (instr->arch_opcode() == kRiscvMulOvf32) { +#endif // Overflow occurs if overflow register is not zero switch (condition) { case kOverflow: @@ -3755,7 +3769,7 @@ void AssembleBranchToLabels(CodeGenerator* gen, TurboAssembler* tasm, __ Branch(tlabel, eq, kScratchReg, Operand(zero_reg)); break; default: - UNSUPPORTED_COND(kRiscvMulOvf32, condition); + UNSUPPORTED_COND(instr->arch_opcode(), condition); } } else if (instr->arch_opcode() == kRiscvCmp) { Condition cc = FlagsConditionToConditionCmp(condition); @@ -3855,7 +3869,7 @@ void CodeGenerator::AssembleArchTrap(Instruction* instr, ReferenceMap* reference_map = gen_->zone()->New<ReferenceMap>(gen_->zone()); gen_->RecordSafepoint(reference_map); - if (FLAG_debug_code) { + if (v8_flags.debug_code) { __ stop(); } } @@ -3908,7 +3922,13 @@ void CodeGenerator::AssembleArchBoolean(Instruction* instr, #endif // Overflow occurs if overflow register is negative __ Slt(result, kScratchReg, zero_reg); +#if V8_TARGET_ARCH_RISCV64 + // kRiscvMulOvf64 is only for RISCV64 + } else if (instr->arch_opcode() == kRiscvMulOvf32 || + instr->arch_opcode() == kRiscvMulOvf64) { +#elif V8_TARGET_ARCH_RISCV32 } else if (instr->arch_opcode() == kRiscvMulOvf32) { +#endif // Overflow occurs if overflow register is not zero __ Sgtu(result, kScratchReg, zero_reg); } else if (instr->arch_opcode() == kRiscvCmp) { @@ -4193,7 +4213,8 @@ void CodeGenerator::AssembleConstructFrame() { // If the frame is bigger than the stack, we throw the stack overflow // exception unconditionally. Thereby we can avoid the integer overflow // check in the condition code. - if ((required_slots * kSystemPointerSize) < (FLAG_stack_size * 1024)) { + if ((required_slots * kSystemPointerSize) < + (v8_flags.stack_size * 1024)) { __ LoadWord( kScratchReg, FieldMemOperand(kWasmInstanceRegister, @@ -4208,7 +4229,7 @@ void CodeGenerator::AssembleConstructFrame() { // We come from WebAssembly, there are no references for the GC. ReferenceMap* reference_map = zone()->New<ReferenceMap>(zone()); RecordSafepoint(reference_map); - if (FLAG_debug_code) { + if (v8_flags.debug_code) { __ stop(); } @@ -4273,7 +4294,7 @@ void CodeGenerator::AssembleReturn(InstructionOperand* additional_pop_count) { if (parameter_slots != 0) { if (additional_pop_count->IsImmediate()) { DCHECK_EQ(g.ToConstant(additional_pop_count).ToInt32(), 0); - } else if (FLAG_debug_code) { + } else if (v8_flags.debug_code) { __ Assert(eq, AbortReason::kUnexpectedAdditionalPopValue, g.ToRegister(additional_pop_count), Operand(static_cast<intptr_t>(0))); diff --git a/deps/v8/src/compiler/backend/riscv/instruction-codes-riscv.h b/deps/v8/src/compiler/backend/riscv/instruction-codes-riscv.h index 6e2e69d8ce46f0..efe7a23267d0d4 100644 --- a/deps/v8/src/compiler/backend/riscv/instruction-codes-riscv.h +++ b/deps/v8/src/compiler/backend/riscv/instruction-codes-riscv.h @@ -18,7 +18,9 @@ namespace compiler { V(RiscvSub64) \ V(RiscvSubOvf64) \ V(RiscvMulHigh64) \ + V(RiscvMulHighU64) \ V(RiscvMul64) \ + V(RiscvMulOvf64) \ V(RiscvDiv64) \ V(RiscvDivU64) \ V(RiscvMod64) \ diff --git a/deps/v8/src/compiler/backend/riscv/instruction-scheduler-riscv.cc b/deps/v8/src/compiler/backend/riscv/instruction-scheduler-riscv.cc index 879ac4393fd56c..ea9e603920184f 100644 --- a/deps/v8/src/compiler/backend/riscv/instruction-scheduler-riscv.cc +++ b/deps/v8/src/compiler/backend/riscv/instruction-scheduler-riscv.cc @@ -24,6 +24,7 @@ int InstructionScheduler::GetTargetInstructionFlags( case kRiscvCvtSL: case kRiscvCvtSUl: case kRiscvMulHigh64: + case kRiscvMulHighU64: case kRiscvAdd64: case kRiscvAddOvf64: case kRiscvClz64: @@ -35,6 +36,7 @@ int InstructionScheduler::GetTargetInstructionFlags( case kRiscvMod64: case kRiscvModU64: case kRiscvMul64: + case kRiscvMulOvf64: case kRiscvPopcnt64: case kRiscvRor64: case kRiscvSar64: @@ -946,6 +948,11 @@ int MulOverflow32Latency() { return Mul32Latency() + Mulh32Latency() + 2; } +int MulOverflow64Latency() { + // Estimated max. + return Mul64Latency() + Mulh64Latency() + 2; +} + // TODO(RISCV): This is incorrect for RISC-V. int Clz64Latency() { return 1; } @@ -1128,7 +1135,7 @@ int InstructionScheduler::GetInstructionLatency(const Instruction* instr) { return JumpLatency(); case kArchCallJSFunction: { int latency = 0; - if (FLAG_debug_code) { + if (v8_flags.debug_code) { latency = 1 + AssertLatency(); } return latency + 1 + Add64Latency(false) + CallLatency(); @@ -1216,6 +1223,8 @@ int InstructionScheduler::GetInstructionLatency(const Instruction* instr) { return Mulh64Latency(); case kRiscvMul64: return Mul64Latency(); + case kRiscvMulOvf64: + return MulOverflow64Latency(); case kRiscvDiv64: { int latency = Div64Latency(); return latency + MovzLatency(); diff --git a/deps/v8/src/compiler/backend/riscv/instruction-selector-riscv32.cc b/deps/v8/src/compiler/backend/riscv/instruction-selector-riscv32.cc index 4d01a47b7042f1..a8db8248b3e3d6 100644 --- a/deps/v8/src/compiler/backend/riscv/instruction-selector-riscv32.cc +++ b/deps/v8/src/compiler/backend/riscv/instruction-selector-riscv32.cc @@ -204,7 +204,7 @@ void InstructionSelector::VisitStore(Node* node) { // TODO(riscv): I guess this could be done in a better way. if (write_barrier_kind != kNoWriteBarrier && - V8_LIKELY(!FLAG_disable_write_barriers)) { + V8_LIKELY(!v8_flags.disable_write_barriers)) { DCHECK(CanBeTaggedPointer(rep)); InstructionOperand inputs[3]; size_t input_count = 0; diff --git a/deps/v8/src/compiler/backend/riscv/instruction-selector-riscv64.cc b/deps/v8/src/compiler/backend/riscv/instruction-selector-riscv64.cc index e1b6ff7eeef49d..83f5b5ecb456d1 100644 --- a/deps/v8/src/compiler/backend/riscv/instruction-selector-riscv64.cc +++ b/deps/v8/src/compiler/backend/riscv/instruction-selector-riscv64.cc @@ -326,7 +326,7 @@ void InstructionSelector::VisitStore(Node* node) { // TODO(riscv): I guess this could be done in a better way. if (write_barrier_kind != kNoWriteBarrier && - V8_LIKELY(!FLAG_disable_write_barriers)) { + V8_LIKELY(!v8_flags.disable_write_barriers)) { DCHECK(CanBeTaggedPointer(rep)); InstructionOperand inputs[3]; size_t input_count = 0; @@ -667,10 +667,18 @@ void InstructionSelector::VisitInt32MulHigh(Node* node) { VisitRRR(this, kRiscvMulHigh32, node); } +void InstructionSelector::VisitInt64MulHigh(Node* node) { + VisitRRR(this, kRiscvMulHigh64, node); +} + void InstructionSelector::VisitUint32MulHigh(Node* node) { VisitRRR(this, kRiscvMulHighU32, node); } +void InstructionSelector::VisitUint64MulHigh(Node* node) { + VisitRRR(this, kRiscvMulHighU64, node); +} + void InstructionSelector::VisitInt64Mul(Node* node) { RiscvOperandGenerator g(this); Int64BinopMatcher m(node); @@ -1441,7 +1449,7 @@ void VisitFullWord32Compare(InstructionSelector* selector, Node* node, void VisitOptimizedWord32Compare(InstructionSelector* selector, Node* node, InstructionCode opcode, FlagsContinuation* cont) { - if (FLAG_debug_code) { + if (v8_flags.debug_code) { RiscvOperandGenerator g(selector); InstructionOperand leftOp = g.TempRegister(); InstructionOperand rightOp = g.TempRegister(); @@ -1610,6 +1618,18 @@ void InstructionSelector::VisitStackPointerGreaterThan( temp_count, temps, cont); } +bool CanCoverTrap(Node* user, Node* value) { + if (user->opcode() != IrOpcode::kTrapUnless && + user->opcode() != IrOpcode::kTrapIf) + return true; + if (value->opcode() == IrOpcode::kWord32Equal || + value->opcode() == IrOpcode::kInt32LessThanOrEqual || + value->opcode() == IrOpcode::kInt32LessThanOrEqual || + value->opcode() == IrOpcode::kUint32LessThan || + value->opcode() == IrOpcode::kUint32LessThanOrEqual) + return false; + return true; +} // Shared routine for word comparisons against zero. void InstructionSelector::VisitWordCompareZero(Node* user, Node* value, FlagsContinuation* cont) { @@ -1632,7 +1652,7 @@ void InstructionSelector::VisitWordCompareZero(Node* user, Node* value, cont->Negate(); } - if (CanCover(user, value)) { + if (CanCoverTrap(user, value) && CanCover(user, value)) { switch (value->opcode()) { case IrOpcode::kWord32Equal: cont->OverwriteAndNegateIfEqual(kEqual); @@ -1808,6 +1828,21 @@ void InstructionSelector::VisitInt64SubWithOverflow(Node* node) { VisitBinop(this, node, kRiscvSubOvf64, &cont); } +void InstructionSelector::VisitInt64MulWithOverflow(Node* node) { + if (Node* ovf = NodeProperties::FindProjection(node, 1)) { + // RISCV64 doesn't set the overflow flag for multiplication, so we need to + // test on kNotEqual. Here is the code sequence used: + // mulh rdh, left, right + // mul rdl, left, right + // srai temp, rdl, 63 + // xor overflow, rdl, temp + FlagsContinuation cont = FlagsContinuation::ForSet(kNotEqual, ovf); + return VisitBinop(this, node, kRiscvMulOvf64, &cont); + } + FlagsContinuation cont; + VisitBinop(this, node, kRiscvMulOvf64, &cont); +} + void InstructionSelector::VisitWord64Equal(Node* const node) { FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node); Int64BinopMatcher m(node); diff --git a/deps/v8/src/compiler/backend/s390/code-generator-s390.cc b/deps/v8/src/compiler/backend/s390/code-generator-s390.cc index 6f5ed9b9812aeb..27b2a5a853799e 100644 --- a/deps/v8/src/compiler/backend/s390/code-generator-s390.cc +++ b/deps/v8/src/compiler/backend/s390/code-generator-s390.cc @@ -306,6 +306,7 @@ Condition FlagsConditionToCondition(FlagsCondition condition, ArchOpcode op) { case kS390_Abs64: case kS390_Abs32: case kS390_Mul32: + case kS390_Mul64WithOverflow: return overflow; default: break; @@ -1124,7 +1125,7 @@ void CodeGenerator::AssembleCodeStartRegisterCheck() { // 2. test kMarkedForDeoptimizationBit in those flags; and // 3. if it is not zero then it jumps to the builtin. void CodeGenerator::BailoutIfDeoptimized() { - if (FLAG_debug_code) { + if (v8_flags.debug_code) { // Check that {kJavaScriptCallCodeStartRegister} is correct. __ ComputeCodeStartAddress(ip); __ CmpS64(ip, kJavaScriptCallCodeStartRegister); @@ -1237,7 +1238,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( } case kArchCallJSFunction: { Register func = i.InputRegister(0); - if (FLAG_debug_code) { + if (v8_flags.debug_code) { // Check the function's context matches the context argument. __ LoadTaggedPointerField( kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset)); @@ -1417,7 +1418,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( Register scratch1 = i.TempRegister(1); OutOfLineRecordWrite* ool; - if (FLAG_debug_code) { + if (v8_flags.debug_code) { // Checking that |value| is not a cleared weakref: our write barrier // does not support that for now. __ CmpS64(value, Operand(kClearedWeakHeapObjectLower32)); @@ -1695,6 +1696,21 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( case kS390_Mul64: ASSEMBLE_BIN_OP(RRInstr(MulS64), RM64Instr(MulS64), RIInstr(MulS64)); break; + case kS390_Mul64WithOverflow: { + Register dst = i.OutputRegister(), src1 = i.InputRegister(0), + src2 = i.InputRegister(1); + DCHECK(!AreAliased(dst, src1, src2)); + if (CpuFeatures::IsSupported(MISC_INSTR_EXT2)) { + __ msgrkc(dst, src1, src2); + } else { + __ mgrk(r0, src1, src2); // r0 = high 64-bits, r1 = low 64-bits. + __ lgr(dst, r1); + __ ShiftRightS64(r1, r1, Operand(63)); + // Test whether {high} is a sign-extension of {result}. + __ CmpU64(r0, r1); + } + break; + } case kS390_MulHigh32: // zero-ext ASSEMBLE_BIN_OP(RRRInstr(MulHighS32), RRM32Instr(MulHighS32), @@ -1705,6 +1721,12 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ASSEMBLE_BIN_OP(RRRInstr(MulHighU32), RRM32Instr(MulHighU32), RRIInstr(MulHighU32)); break; + case kS390_MulHighU64: + ASSEMBLE_BIN_OP(RRRInstr(MulHighU64), nullInstr, nullInstr); + break; + case kS390_MulHighS64: + ASSEMBLE_BIN_OP(RRRInstr(MulHighS64), nullInstr, nullInstr); + break; case kS390_MulFloat: ASSEMBLE_BIN_OP(DDInstr(meebr), DMTInstr(MulFloat32), nullInstr); break; @@ -3240,7 +3262,7 @@ void CodeGenerator::AssembleArchTrap(Instruction* instr, ReferenceMap* reference_map = gen_->zone()->New<ReferenceMap>(gen_->zone()); gen_->RecordSafepoint(reference_map); - if (FLAG_debug_code) { + if (v8_flags.debug_code) { __ stop(); } } @@ -3423,7 +3445,7 @@ void CodeGenerator::AssembleConstructFrame() { // If the frame is bigger than the stack, we throw the stack overflow // exception unconditionally. Thereby we can avoid the integer overflow // check in the condition code. - if (required_slots * kSystemPointerSize < FLAG_stack_size * KB) { + if (required_slots * kSystemPointerSize < v8_flags.stack_size * KB) { Register scratch = r1; __ LoadU64( scratch, @@ -3441,7 +3463,7 @@ void CodeGenerator::AssembleConstructFrame() { // define an empty safepoint. ReferenceMap* reference_map = zone()->New<ReferenceMap>(zone()); RecordSafepoint(reference_map); - if (FLAG_debug_code) __ stop(); + if (v8_flags.debug_code) __ stop(); __ bind(&done); } @@ -3503,7 +3525,7 @@ void CodeGenerator::AssembleReturn(InstructionOperand* additional_pop_count) { if (parameter_slots != 0) { if (additional_pop_count->IsImmediate()) { DCHECK_EQ(g.ToConstant(additional_pop_count).ToInt32(), 0); - } else if (FLAG_debug_code) { + } else if (v8_flags.debug_code) { __ CmpS64(g.ToRegister(additional_pop_count), Operand(0)); __ Assert(eq, AbortReason::kUnexpectedAdditionalPopValue); } diff --git a/deps/v8/src/compiler/backend/s390/instruction-codes-s390.h b/deps/v8/src/compiler/backend/s390/instruction-codes-s390.h index 45502c89422359..f362cddcf7a224 100644 --- a/deps/v8/src/compiler/backend/s390/instruction-codes-s390.h +++ b/deps/v8/src/compiler/backend/s390/instruction-codes-s390.h @@ -47,6 +47,9 @@ namespace compiler { V(S390_Mul32) \ V(S390_Mul32WithOverflow) \ V(S390_Mul64) \ + V(S390_Mul64WithOverflow) \ + V(S390_MulHighS64) \ + V(S390_MulHighU64) \ V(S390_MulHigh32) \ V(S390_MulHighU32) \ V(S390_MulFloat) \ diff --git a/deps/v8/src/compiler/backend/s390/instruction-scheduler-s390.cc b/deps/v8/src/compiler/backend/s390/instruction-scheduler-s390.cc index 320c12574df19a..fa0a60a0193c45 100644 --- a/deps/v8/src/compiler/backend/s390/instruction-scheduler-s390.cc +++ b/deps/v8/src/compiler/backend/s390/instruction-scheduler-s390.cc @@ -46,6 +46,9 @@ int InstructionScheduler::GetTargetInstructionFlags( case kS390_Mul32: case kS390_Mul32WithOverflow: case kS390_Mul64: + case kS390_Mul64WithOverflow: + case kS390_MulHighS64: + case kS390_MulHighU64: case kS390_MulHigh32: case kS390_MulHighU32: case kS390_MulFloat: diff --git a/deps/v8/src/compiler/backend/s390/instruction-selector-s390.cc b/deps/v8/src/compiler/backend/s390/instruction-selector-s390.cc index fc49a1376e55a8..a0192b0022bace 100644 --- a/deps/v8/src/compiler/backend/s390/instruction-selector-s390.cc +++ b/deps/v8/src/compiler/backend/s390/instruction-selector-s390.cc @@ -725,7 +725,8 @@ static void VisitGeneralStore( Node* base = node->InputAt(0); Node* offset = node->InputAt(1); Node* value = node->InputAt(2); - if (write_barrier_kind != kNoWriteBarrier && !FLAG_disable_write_barriers) { + if (write_barrier_kind != kNoWriteBarrier && + !v8_flags.disable_write_barriers) { DCHECK(CanBeTaggedOrCompressedPointer(rep)); AddressingMode addressing_mode; InstructionOperand inputs[3]; @@ -824,7 +825,7 @@ void InstructionSelector::VisitStore(Node* node) { WriteBarrierKind write_barrier_kind = store_rep.write_barrier_kind(); MachineRepresentation rep = store_rep.representation(); - if (FLAG_enable_unconditional_write_barriers && + if (v8_flags.enable_unconditional_write_barriers && CanBeTaggedOrCompressedPointer(rep)) { write_barrier_kind = kFullWriteBarrier; } @@ -1258,6 +1259,23 @@ static inline bool TryMatchInt64SubWithOverflow(InstructionSelector* selector, return TryMatchInt64OpWithOverflow<kS390_Sub64>(selector, node, SubOperandMode); } + +void EmitInt64MulWithOverflow(InstructionSelector* selector, Node* node, + FlagsContinuation* cont) { + S390OperandGenerator g(selector); + Int64BinopMatcher m(node); + InstructionOperand inputs[2]; + size_t input_count = 0; + InstructionOperand outputs[1]; + size_t output_count = 0; + + inputs[input_count++] = g.UseUniqueRegister(m.left().node()); + inputs[input_count++] = g.UseUniqueRegister(m.right().node()); + outputs[output_count++] = g.DefineAsRegister(node); + selector->EmitWithContinuation(kS390_Mul64WithOverflow, output_count, outputs, + input_count, inputs, cont); +} + #endif static inline bool TryMatchDoubleConstructFromInsert( @@ -1471,6 +1489,8 @@ static inline bool TryMatchDoubleConstructFromInsert( #define WORD64_BIN_OP_LIST(V) \ V(Word64, Int64Add, kS390_Add64, AddOperandMode, null) \ + V(Word64, Int64MulHigh, kS390_MulHighS64, OperandMode::kAllowRRR, null) \ + V(Word64, Uint64MulHigh, kS390_MulHighU64, OperandMode::kAllowRRR, null) \ V(Word64, Int64Sub, kS390_Sub64, SubOperandMode, ([&]() { \ return TryMatchNegFromSub<Int64BinopMatcher, kS390_Neg64>(this, node); \ })) \ @@ -1584,6 +1604,16 @@ void InstructionSelector::VisitFloat64Ieee754Binop(Node* node, ->MarkAsCall(); } +void InstructionSelector::VisitInt64MulWithOverflow(Node* node) { + if (Node* ovf = NodeProperties::FindProjection(node, 1)) { + FlagsContinuation cont = FlagsContinuation::ForSet( + CpuFeatures::IsSupported(MISC_INSTR_EXT2) ? kOverflow : kNotEqual, ovf); + return EmitInt64MulWithOverflow(this, node, &cont); + } + FlagsContinuation cont; + EmitInt64MulWithOverflow(this, node, &cont); +} + static bool CompareLogical(FlagsContinuation* cont) { switch (cont->condition()) { case kUnsignedLessThan: @@ -1901,6 +1931,11 @@ void InstructionSelector::VisitWordCompareZero(Node* user, Node* value, cont->OverwriteAndNegateIfEqual(kOverflow); return VisitWord64BinOp(this, node, kS390_Sub64, SubOperandMode, cont); + case IrOpcode::kInt64MulWithOverflow: + cont->OverwriteAndNegateIfEqual( + CpuFeatures::IsSupported(MISC_INSTR_EXT2) ? kOverflow + : kNotEqual); + return EmitInt64MulWithOverflow(this, node, cont); #endif default: break; diff --git a/deps/v8/src/compiler/backend/s390/unwinding-info-writer-s390.h b/deps/v8/src/compiler/backend/s390/unwinding-info-writer-s390.h index 2202c285957cf8..7c472723c488ea 100644 --- a/deps/v8/src/compiler/backend/s390/unwinding-info-writer-s390.h +++ b/deps/v8/src/compiler/backend/s390/unwinding-info-writer-s390.h @@ -49,7 +49,7 @@ class UnwindingInfoWriter { } private: - bool enabled() const { return FLAG_perf_prof_unwinding_info; } + bool enabled() const { return v8_flags.perf_prof_unwinding_info; } class BlockInitialState : public ZoneObject { public: diff --git a/deps/v8/src/compiler/backend/spill-placer.cc b/deps/v8/src/compiler/backend/spill-placer.cc index 01d130ff596c76..5cf4861577d0d7 100644 --- a/deps/v8/src/compiler/backend/spill-placer.cc +++ b/deps/v8/src/compiler/backend/spill-placer.cc @@ -45,7 +45,7 @@ void SpillPlacer::Add(TopLevelLiveRange* range) { // increasing the code size for no benefit. if (range->GetSpillMoveInsertionLocations(data()) == nullptr || range->spilled() || top_start_block->IsDeferred() || - (!FLAG_stress_turbo_late_spilling && !range->is_loop_phi())) { + (!v8_flags.stress_turbo_late_spilling && !range->is_loop_phi())) { range->CommitSpillMoves(data(), spill_operand); return; } diff --git a/deps/v8/src/compiler/backend/unwinding-info-writer.h b/deps/v8/src/compiler/backend/unwinding-info-writer.h index ecc9658d33ea2f..446bd82f57350c 100644 --- a/deps/v8/src/compiler/backend/unwinding-info-writer.h +++ b/deps/v8/src/compiler/backend/unwinding-info-writer.h @@ -33,7 +33,7 @@ namespace compiler { class InstructionBlock; -static_assert(!FLAG_perf_prof_unwinding_info.value(), +static_assert(!v8_flags.perf_prof_unwinding_info.value(), "--perf-prof-unwinding-info should be statically disabled if not " "supported"); diff --git a/deps/v8/src/compiler/backend/x64/code-generator-x64.cc b/deps/v8/src/compiler/backend/x64/code-generator-x64.cc index 7d66748d13b512..6a29cb308e593a 100644 --- a/deps/v8/src/compiler/backend/x64/code-generator-x64.cc +++ b/deps/v8/src/compiler/backend/x64/code-generator-x64.cc @@ -481,7 +481,7 @@ class WasmProtectedInstructionTrap final : public WasmOutOfLineTrap { : WasmOutOfLineTrap(gen, instr), pc_(pc) {} void Generate() final { - DCHECK(FLAG_wasm_bounds_checks && !FLAG_wasm_enforce_bounds_checks); + DCHECK(v8_flags.wasm_bounds_checks && !v8_flags.wasm_enforce_bounds_checks); gen_->AddProtectedInstructionLanding(pc_, __ pc_offset()); GenerateWithTrapId(TrapId::kTrapMemOutOfBounds); } @@ -789,7 +789,41 @@ void EmitTSANRelaxedLoadOOLIfNeeded(Zone* zone, CodeGenerator* codegen, } \ } while (false) -#define ASSEMBLE_COMPARE(asm_instr) \ +#define ASSEMBLE_COMPARE(cmp_instr, test_instr) \ + do { \ + if (HasAddressingMode(instr)) { \ + size_t index = 0; \ + Operand left = i.MemoryOperand(&index); \ + if (HasImmediateInput(instr, index)) { \ + __ cmp_instr(left, i.InputImmediate(index)); \ + } else { \ + __ cmp_instr(left, i.InputRegister(index)); \ + } \ + } else { \ + if (HasImmediateInput(instr, 1)) { \ + Immediate right = i.InputImmediate(1); \ + if (HasRegisterInput(instr, 0)) { \ + if (right.value() == 0 && \ + (FlagsConditionField::decode(opcode) == kEqual || \ + FlagsConditionField::decode(opcode) == kNotEqual)) { \ + __ test_instr(i.InputRegister(0), i.InputRegister(0)); \ + } else { \ + __ cmp_instr(i.InputRegister(0), right); \ + } \ + } else { \ + __ cmp_instr(i.InputOperand(0), right); \ + } \ + } else { \ + if (HasRegisterInput(instr, 1)) { \ + __ cmp_instr(i.InputRegister(0), i.InputRegister(1)); \ + } else { \ + __ cmp_instr(i.InputRegister(0), i.InputOperand(1)); \ + } \ + } \ + } \ + } while (false) + +#define ASSEMBLE_TEST(asm_instr) \ do { \ if (HasAddressingMode(instr)) { \ size_t index = 0; \ @@ -1318,7 +1352,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( } case kArchCallJSFunction: { Register func = i.InputRegister(0); - if (FLAG_debug_code) { + if (v8_flags.debug_code) { // Check the function's context matches the context argument. __ cmp_tagged(rsi, FieldOperand(func, JSFunction::kContextOffset)); __ Assert(equal, AbortReason::kWrongFunctionContext); @@ -1515,7 +1549,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( Register scratch0 = i.TempRegister(0); Register scratch1 = i.TempRegister(1); - if (FLAG_debug_code) { + if (v8_flags.debug_code) { // Checking that |value| is not a cleared weakref: our write barrier // does not support that for now. __ Cmp(value, kClearedWeakHeapObjectLower32); @@ -1639,28 +1673,28 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ASSEMBLE_BINOP(andq); break; case kX64Cmp8: - ASSEMBLE_COMPARE(cmpb); + ASSEMBLE_COMPARE(cmpb, testb); break; case kX64Cmp16: - ASSEMBLE_COMPARE(cmpw); + ASSEMBLE_COMPARE(cmpw, testw); break; case kX64Cmp32: - ASSEMBLE_COMPARE(cmpl); + ASSEMBLE_COMPARE(cmpl, testl); break; case kX64Cmp: - ASSEMBLE_COMPARE(cmpq); + ASSEMBLE_COMPARE(cmpq, testq); break; case kX64Test8: - ASSEMBLE_COMPARE(testb); + ASSEMBLE_TEST(testb); break; case kX64Test16: - ASSEMBLE_COMPARE(testw); + ASSEMBLE_TEST(testw); break; case kX64Test32: - ASSEMBLE_COMPARE(testl); + ASSEMBLE_TEST(testl); break; case kX64Test: - ASSEMBLE_COMPARE(testq); + ASSEMBLE_TEST(testq); break; case kX64Imul32: ASSEMBLE_MULT(imull); @@ -1682,6 +1716,20 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( __ mull(i.InputOperand(1)); } break; + case kX64ImulHigh64: + if (HasRegisterInput(instr, 1)) { + __ imulq(i.InputRegister(1)); + } else { + __ imulq(i.InputOperand(1)); + } + break; + case kX64UmulHigh64: + if (HasRegisterInput(instr, 1)) { + __ mulq(i.InputRegister(1)); + } else { + __ mulq(i.InputOperand(1)); + } + break; case kX64Idiv32: __ cdq(); __ idivl(i.InputRegister(1)); @@ -4605,7 +4653,7 @@ void CodeGenerator::AssembleArchDeoptBranch(Instruction* instr, } __ j(FlagsConditionToCondition(branch->condition), tlabel); - if (FLAG_deopt_every_n_times > 0) { + if (v8_flags.deopt_every_n_times > 0) { ExternalReference counter = ExternalReference::stress_deopt_count(isolate()); @@ -4615,7 +4663,7 @@ void CodeGenerator::AssembleArchDeoptBranch(Instruction* instr, __ decl(rax); __ j(not_zero, &nodeopt, Label::kNear); - __ Move(rax, FLAG_deopt_every_n_times); + __ Move(rax, v8_flags.deopt_every_n_times); __ store_rax(counter); __ popq(rax); __ popfq(); @@ -4840,7 +4888,7 @@ void CodeGenerator::AssembleConstructFrame() { // If the frame is bigger than the stack, we throw the stack overflow // exception unconditionally. Thereby we can avoid the integer overflow // check in the condition code. - if (required_slots * kSystemPointerSize < FLAG_stack_size * KB) { + if (required_slots * kSystemPointerSize < v8_flags.stack_size * KB) { __ movq(kScratchRegister, FieldOperand(kWasmInstanceRegister, WasmInstanceObject::kRealStackLimitAddressOffset)); @@ -4934,7 +4982,7 @@ void CodeGenerator::AssembleReturn(InstructionOperand* additional_pop_count) { if (parameter_slots != 0) { if (additional_pop_count->IsImmediate()) { DCHECK_EQ(g.ToConstant(additional_pop_count).ToInt32(), 0); - } else if (FLAG_debug_code) { + } else if (v8_flags.debug_code) { __ cmpq(g.ToRegister(additional_pop_count), Immediate(0)); __ Assert(equal, AbortReason::kUnexpectedAdditionalPopValue); } @@ -5015,7 +5063,7 @@ void CodeGenerator::PrepareForDeoptimizationExits( void CodeGenerator::IncrementStackAccessCounter( InstructionOperand* source, InstructionOperand* destination) { - DCHECK(FLAG_trace_turbo_stack_accesses); + DCHECK(v8_flags.trace_turbo_stack_accesses); if (!info()->IsOptimizing()) { #if V8_ENABLE_WEBASSEMBLY if (!info()->IsWasm()) return; @@ -5203,7 +5251,7 @@ void CodeGenerator::AssembleMove(InstructionOperand* source, __ movq(dst, kScratchRegister); }; - if (FLAG_trace_turbo_stack_accesses) { + if (v8_flags.trace_turbo_stack_accesses) { IncrementStackAccessCounter(source, destination); } @@ -5322,7 +5370,7 @@ void CodeGenerator::AssembleMove(InstructionOperand* source, void CodeGenerator::AssembleSwap(InstructionOperand* source, InstructionOperand* destination) { - if (FLAG_trace_turbo_stack_accesses) { + if (v8_flags.trace_turbo_stack_accesses) { IncrementStackAccessCounter(source, destination); IncrementStackAccessCounter(destination, source); } diff --git a/deps/v8/src/compiler/backend/x64/instruction-codes-x64.h b/deps/v8/src/compiler/backend/x64/instruction-codes-x64.h index ed69ab876199e3..173a16316cb171 100644 --- a/deps/v8/src/compiler/backend/x64/instruction-codes-x64.h +++ b/deps/v8/src/compiler/backend/x64/instruction-codes-x64.h @@ -81,7 +81,9 @@ namespace compiler { V(X64Imul) \ V(X64Imul32) \ V(X64ImulHigh32) \ + V(X64ImulHigh64) \ V(X64UmulHigh32) \ + V(X64UmulHigh64) \ V(X64Idiv) \ V(X64Idiv32) \ V(X64Udiv) \ diff --git a/deps/v8/src/compiler/backend/x64/instruction-scheduler-x64.cc b/deps/v8/src/compiler/backend/x64/instruction-scheduler-x64.cc index f50fc8f9dcf358..8bbb7e2519e232 100644 --- a/deps/v8/src/compiler/backend/x64/instruction-scheduler-x64.cc +++ b/deps/v8/src/compiler/backend/x64/instruction-scheduler-x64.cc @@ -37,6 +37,8 @@ int InstructionScheduler::GetTargetInstructionFlags( case kX64Imul32: case kX64ImulHigh32: case kX64UmulHigh32: + case kX64ImulHigh64: + case kX64UmulHigh64: case kX64Not: case kX64Not32: case kX64Neg: @@ -458,6 +460,8 @@ int InstructionScheduler::GetInstructionLatency(const Instruction* instr) { case kX64Imul32: case kX64ImulHigh32: case kX64UmulHigh32: + case kX64ImulHigh64: + case kX64UmulHigh64: case kX64Float32Abs: case kX64Float32Neg: case kX64Float64Abs: diff --git a/deps/v8/src/compiler/backend/x64/instruction-selector-x64.cc b/deps/v8/src/compiler/backend/x64/instruction-selector-x64.cc index 74f38608ef3d39..0da7336254b34d 100644 --- a/deps/v8/src/compiler/backend/x64/instruction-selector-x64.cc +++ b/deps/v8/src/compiler/backend/x64/instruction-selector-x64.cc @@ -339,7 +339,7 @@ ArchOpcode GetLoadOpcode(LoadRepresentation load_rep) { break; case MachineRepresentation::kSimd256: // Fall through. case MachineRepresentation::kNone: // Fall through. - case MachineRepresentation::kMapWord: + case MachineRepresentation::kMapWord: // Fall through. UNREACHABLE(); } return opcode; @@ -377,7 +377,7 @@ ArchOpcode GetStoreOpcode(StoreRepresentation store_rep) { return kX64Movdqu; case MachineRepresentation::kSimd256: // Fall through. case MachineRepresentation::kNone: // Fall through. - case MachineRepresentation::kMapWord: + case MachineRepresentation::kMapWord: // Fall through. UNREACHABLE(); } UNREACHABLE(); @@ -597,7 +597,7 @@ void VisitStoreCommon(InstructionSelector* selector, Node* node, const bool is_seqcst = atomic_order && *atomic_order == AtomicMemoryOrder::kSeqCst; - if (FLAG_enable_unconditional_write_barriers && + if (v8_flags.enable_unconditional_write_barriers && CanBeTaggedOrCompressedPointer(store_rep.representation())) { write_barrier_kind = kFullWriteBarrier; } @@ -606,7 +606,8 @@ void VisitStoreCommon(InstructionSelector* selector, Node* node, ? MemoryAccessMode::kMemoryAccessProtected : MemoryAccessMode::kMemoryAccessDirect; - if (write_barrier_kind != kNoWriteBarrier && !FLAG_disable_write_barriers) { + if (write_barrier_kind != kNoWriteBarrier && + !v8_flags.disable_write_barriers) { DCHECK(CanBeTaggedOrCompressedPointer(store_rep.representation())); AddressingMode addressing_mode; InstructionOperand inputs[] = { @@ -1469,10 +1470,23 @@ void InstructionSelector::VisitInt64Mul(Node* node) { VisitMul(this, node, kX64Imul); } +void InstructionSelector::VisitInt64MulWithOverflow(Node* node) { + if (Node* ovf = NodeProperties::FindProjection(node, 1)) { + FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf); + return VisitBinop(this, node, kX64Imul, &cont); + } + FlagsContinuation cont; + VisitBinop(this, node, kX64Imul, &cont); +} + void InstructionSelector::VisitInt32MulHigh(Node* node) { VisitMulHigh(this, node, kX64ImulHigh32); } +void InstructionSelector::VisitInt64MulHigh(Node* node) { + VisitMulHigh(this, node, kX64ImulHigh64); +} + void InstructionSelector::VisitInt32Div(Node* node) { VisitDiv(this, node, kX64Idiv32); } @@ -1509,6 +1523,10 @@ void InstructionSelector::VisitUint32MulHigh(Node* node) { VisitMulHigh(this, node, kX64UmulHigh32); } +void InstructionSelector::VisitUint64MulHigh(Node* node) { + VisitMulHigh(this, node, kX64UmulHigh64); +} + // TryTruncateFloat32ToInt64 and TryTruncateFloat64ToInt64 operations attempt // truncation from 32|64-bit float to 64-bit integer by performing roughly the // following steps: @@ -2828,6 +2846,9 @@ void InstructionSelector::VisitWordCompareZero(Node* user, Node* value, case IrOpcode::kInt64SubWithOverflow: cont->OverwriteAndNegateIfEqual(kOverflow); return VisitBinop(this, node, kX64Sub, cont); + case IrOpcode::kInt64MulWithOverflow: + cont->OverwriteAndNegateIfEqual(kOverflow); + return VisitBinop(this, node, kX64Imul, cont); default: break; } diff --git a/deps/v8/src/compiler/backend/x64/unwinding-info-writer-x64.h b/deps/v8/src/compiler/backend/x64/unwinding-info-writer-x64.h index c85ad46a6338d1..188a66f56f79dd 100644 --- a/deps/v8/src/compiler/backend/x64/unwinding-info-writer-x64.h +++ b/deps/v8/src/compiler/backend/x64/unwinding-info-writer-x64.h @@ -53,7 +53,7 @@ class UnwindingInfoWriter { } private: - bool enabled() const { return FLAG_perf_prof_unwinding_info; } + bool enabled() const { return v8_flags.perf_prof_unwinding_info; } class BlockInitialState : public ZoneObject { public: diff --git a/deps/v8/src/compiler/basic-block-instrumentor.cc b/deps/v8/src/compiler/basic-block-instrumentor.cc index 2d5a02985ab78c..1e9d0246b6d35b 100644 --- a/deps/v8/src/compiler/basic-block-instrumentor.cc +++ b/deps/v8/src/compiler/basic-block-instrumentor.cc @@ -64,7 +64,7 @@ BasicBlockProfilerData* BasicBlockInstrumentor::Instrument( // Set the function name. data->SetFunctionName(info->GetDebugName()); // Capture the schedule string before instrumentation. - if (FLAG_turbo_profiling_verbose) { + if (v8_flags.turbo_profiling_verbose) { std::ostringstream os; os << *schedule; data->SetSchedule(os); diff --git a/deps/v8/src/compiler/branch-condition-duplicator.cc b/deps/v8/src/compiler/branch-condition-duplicator.cc index 6b1da58a29b0a6..129270b8e4985e 100644 --- a/deps/v8/src/compiler/branch-condition-duplicator.cc +++ b/deps/v8/src/compiler/branch-condition-duplicator.cc @@ -70,7 +70,7 @@ void BranchConditionDuplicator::DuplicateConditionIfNeeded(Node* node) { if (!IsBranch(node)) return; Node* condNode = node->InputAt(0); - if (condNode->UseCount() > 1 && CanDuplicate(condNode)) { + if (condNode->BranchUseCount() > 1 && CanDuplicate(condNode)) { node->ReplaceInput(0, DuplicateNode(condNode)); } } diff --git a/deps/v8/src/compiler/bytecode-analysis.cc b/deps/v8/src/compiler/bytecode-analysis.cc index 0b5ee5776775fc..419d4aa1e231d7 100644 --- a/deps/v8/src/compiler/bytecode-analysis.cc +++ b/deps/v8/src/compiler/bytecode-analysis.cc @@ -529,6 +529,10 @@ void BytecodeAnalysis::Analyze() { ResumeJumpTarget::Leaf(suspend_id, resume_offset)); } + if (bytecode == Bytecode::kResumeGenerator) { + current_loop_info->mark_resumable(); + } + // If we've reached the header of the loop, pop it off the stack. if (current_offset == current_loop.header_offset) { loop_stack_.pop(); @@ -536,6 +540,10 @@ void BytecodeAnalysis::Analyze() { // If there is still an outer loop, propagate inner loop assignments. LoopInfo* parent_loop_info = loop_stack_.top().loop_info; + if (current_loop_info->resumable()) { + parent_loop_info->mark_resumable(); + } + parent_loop_info->assignments().Union( current_loop_info->assignments()); @@ -704,7 +712,7 @@ void BytecodeAnalysis::Analyze() { } DCHECK(analyze_liveness_); - if (FLAG_trace_environment_liveness) { + if (v8_flags.trace_environment_liveness) { StdoutStream of; PrintLivenessTo(of); } diff --git a/deps/v8/src/compiler/bytecode-analysis.h b/deps/v8/src/compiler/bytecode-analysis.h index aa270c6dee9a0f..ab7b66e03fc402 100644 --- a/deps/v8/src/compiler/bytecode-analysis.h +++ b/deps/v8/src/compiler/bytecode-analysis.h @@ -73,6 +73,8 @@ struct V8_EXPORT_PRIVATE LoopInfo { resume_jump_targets_(zone) {} int parent_offset() const { return parent_offset_; } + bool resumable() const { return resumable_; } + void mark_resumable() { resumable_ = true; } const ZoneVector<ResumeJumpTarget>& resume_jump_targets() const { return resume_jump_targets_; @@ -87,6 +89,7 @@ struct V8_EXPORT_PRIVATE LoopInfo { private: // The offset to the parent loop, or -1 if there is no parent. int parent_offset_; + bool resumable_ = false; BytecodeLoopAssignments assignments_; ZoneVector<ResumeJumpTarget> resume_jump_targets_; }; diff --git a/deps/v8/src/compiler/bytecode-graph-builder.cc b/deps/v8/src/compiler/bytecode-graph-builder.cc index be5a9bca7db3e6..f43199cd37a53f 100644 --- a/deps/v8/src/compiler/bytecode-graph-builder.cc +++ b/deps/v8/src/compiler/bytecode-graph-builder.cc @@ -1672,20 +1672,6 @@ void BytecodeGraphBuilder::VisitDefineKeyedOwnPropertyInLiteral() { environment()->RecordAfterState(node, Environment::kAttachFrameState); } -void BytecodeGraphBuilder::VisitCollectTypeProfile() { - PrepareEagerCheckpoint(); - - Node* position = - jsgraph()->Constant(bytecode_iterator().GetImmediateOperand(0)); - Node* value = environment()->LookupAccumulator(); - Node* vector = jsgraph()->Constant(feedback_vector()); - - const Operator* op = javascript()->CallRuntime(Runtime::kCollectTypeProfile); - - Node* node = NewNode(op, position, value, vector); - environment()->RecordAfterState(node, Environment::kAttachFrameState); -} - void BytecodeGraphBuilder::VisitLdaContextSlot() { const Operator* op = javascript()->LoadContext( bytecode_iterator().GetUnsignedImmediateOperand(2), @@ -3229,9 +3215,18 @@ void BytecodeGraphBuilder::VisitGetSuperConstructor() { Environment::kAttachFrameState); } -void BytecodeGraphBuilder::VisitFindNonDefaultConstructor() { - // TODO(v8:13091): Implement. - CHECK(false); +void BytecodeGraphBuilder::VisitFindNonDefaultConstructorOrConstruct() { + Node* this_function = + environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(0)); + Node* new_target = + environment()->LookupRegister(bytecode_iterator().GetRegisterOperand(1)); + + Node* node = NewNode(javascript()->FindNonDefaultConstructorOrConstruct(), + this_function, new_target); + + environment()->BindRegistersToProjections( + bytecode_iterator().GetRegisterOperand(2), node, + Environment::kAttachFrameState); } void BytecodeGraphBuilder::BuildCompareOp(const Operator* op) { diff --git a/deps/v8/src/compiler/code-assembler.cc b/deps/v8/src/compiler/code-assembler.cc index d07482f24ca48a..7b69cea0272b3f 100644 --- a/deps/v8/src/compiler/code-assembler.cc +++ b/deps/v8/src/compiler/code-assembler.cc @@ -501,7 +501,7 @@ void CodeAssembler::Unreachable() { } void CodeAssembler::Comment(std::string str) { - if (!FLAG_code_comments) return; + if (!v8_flags.code_comments) return; raw_assembler()->Comment(str); } @@ -1549,7 +1549,7 @@ void CodeAssemblerLabel::Bind(AssemblerDebugInfo debug_info) { << "\n# previous: " << *label_->block(); FATAL("%s", str.str().c_str()); } - if (FLAG_enable_source_at_csa_bind) { + if (v8_flags.enable_source_at_csa_bind) { state_->raw_assembler_->SetCurrentExternalSourcePosition( {debug_info.file, debug_info.line}); } diff --git a/deps/v8/src/compiler/code-assembler.h b/deps/v8/src/compiler/code-assembler.h index a071b31c602b69..66dbe828536a5e 100644 --- a/deps/v8/src/compiler/code-assembler.h +++ b/deps/v8/src/compiler/code-assembler.h @@ -47,6 +47,7 @@ class JSCollator; class JSCollection; class JSDateTimeFormat; class JSDisplayNames; +class JSDurationFormat; class JSListFormat; class JSLocale; class JSNumberFormat; @@ -244,9 +245,12 @@ class CodeAssemblerParameterizedLabel; V(IntPtrAdd, WordT, WordT, WordT) \ V(IntPtrSub, WordT, WordT, WordT) \ V(IntPtrMul, WordT, WordT, WordT) \ + V(IntPtrMulHigh, IntPtrT, IntPtrT, IntPtrT) \ + V(UintPtrMulHigh, UintPtrT, UintPtrT, UintPtrT) \ V(IntPtrDiv, IntPtrT, IntPtrT, IntPtrT) \ V(IntPtrAddWithOverflow, PAIR_TYPE(IntPtrT, BoolT), IntPtrT, IntPtrT) \ V(IntPtrSubWithOverflow, PAIR_TYPE(IntPtrT, BoolT), IntPtrT, IntPtrT) \ + V(IntPtrMulWithOverflow, PAIR_TYPE(IntPtrT, BoolT), IntPtrT, IntPtrT) \ V(Int32Add, Word32T, Word32T, Word32T) \ V(Int32AddWithOverflow, PAIR_TYPE(Int32T, BoolT), Int32T, Int32T) \ V(Int32Sub, Word32T, Word32T, Word32T) \ @@ -259,6 +263,8 @@ class CodeAssemblerParameterizedLabel; V(Int64Sub, Word64T, Word64T, Word64T) \ V(Int64SubWithOverflow, PAIR_TYPE(Int64T, BoolT), Int64T, Int64T) \ V(Int64Mul, Word64T, Word64T, Word64T) \ + V(Int64MulHigh, Int64T, Int64T, Int64T) \ + V(Uint64MulHigh, Uint64T, Uint64T, Uint64T) \ V(Int64Div, Int64T, Int64T, Int64T) \ V(Int64Mod, Int64T, Int64T, Int64T) \ V(WordOr, WordT, WordT, WordT) \ @@ -435,7 +441,7 @@ class V8_EXPORT_PRIVATE CodeAssembler { !std::is_convertible<TNode<PreviousType>, TNode<A>>::value, "Unnecessary CAST: types are convertible."); #ifdef DEBUG - if (FLAG_debug_code) { + if (v8_flags.debug_code) { TNode<ExternalReference> function = code_assembler_->ExternalConstant( ExternalReference::check_object_type()); code_assembler_->CallCFunction( @@ -618,13 +624,13 @@ class V8_EXPORT_PRIVATE CodeAssembler { void DebugBreak(); void Unreachable(); void Comment(const char* msg) { - if (!FLAG_code_comments) return; + if (!v8_flags.code_comments) return; Comment(std::string(msg)); } void Comment(std::string msg); template <class... Args> void Comment(Args&&... args) { - if (!FLAG_code_comments) return; + if (!v8_flags.code_comments) return; std::ostringstream s; USE((s << std::forward<Args>(args))...); Comment(s.str()); diff --git a/deps/v8/src/compiler/compilation-dependencies.cc b/deps/v8/src/compiler/compilation-dependencies.cc index c356f3b9ac452e..2c219f5d6757aa 100644 --- a/deps/v8/src/compiler/compilation-dependencies.cc +++ b/deps/v8/src/compiler/compilation-dependencies.cc @@ -125,7 +125,7 @@ class PendingDependencies final { } void InstallAll(Isolate* isolate, Handle<Code> code) { - if (V8_UNLIKELY(FLAG_predictable)) { + if (V8_UNLIKELY(v8_flags.predictable)) { InstallAllPredictable(isolate, code); return; } @@ -140,7 +140,7 @@ class PendingDependencies final { } void InstallAllPredictable(Isolate* isolate, Handle<Code> code) { - CHECK(FLAG_predictable); + CHECK(v8_flags.predictable); // First, guarantee predictable iteration order. using HandleAndGroup = std::pair<Handle<HeapObject>, DependentCode::DependencyGroups>; @@ -1065,7 +1065,7 @@ void CompilationDependencies::DependOnConstantInDictionaryPrototypeChain( AllocationType CompilationDependencies::DependOnPretenureMode( const AllocationSiteRef& site) { - if (!FLAG_allocation_site_pretenuring) return AllocationType::kYoung; + if (!v8_flags.allocation_site_pretenuring) return AllocationType::kYoung; AllocationType allocation = site.GetAllocationType(); RecordDependency(zone_->New<PretenureModeDependency>(site, allocation)); return allocation; @@ -1184,7 +1184,7 @@ void CompilationDependencies::DependOnOwnConstantDictionaryProperty( V8_INLINE void TraceInvalidCompilationDependency( const CompilationDependency* d) { - DCHECK(FLAG_trace_compilation_dependencies); + DCHECK(v8_flags.trace_compilation_dependencies); DCHECK(!d->IsValid()); PrintF("Compilation aborted due to invalid dependency: %s\n", d->ToString()); } @@ -1202,7 +1202,7 @@ bool CompilationDependencies::Commit(Handle<Code> code) { // can call EnsureHasInitialMap, which can invalidate a // StableMapDependency on the prototype object's map. if (!dep->IsValid()) { - if (FLAG_trace_compilation_dependencies) { + if (v8_flags.trace_compilation_dependencies) { TraceInvalidCompilationDependency(dep); } dependencies_.clear(); @@ -1226,7 +1226,7 @@ bool CompilationDependencies::Commit(Handle<Code> code) { // deoptimization. // 2. since the function state was deemed consistent above, that means the // compilation saw a self-consistent state of the jsfunction. - if (FLAG_stress_gc_during_compilation) { + if (v8_flags.stress_gc_during_compilation) { broker_->isolate()->heap()->PreciseCollectAllGarbage( Heap::kForcedGC, GarbageCollectionReason::kTesting, kNoGCCallbackFlags); } @@ -1242,13 +1242,13 @@ bool CompilationDependencies::Commit(Handle<Code> code) { } bool CompilationDependencies::PrepareInstall() { - if (V8_UNLIKELY(FLAG_predictable)) { + if (V8_UNLIKELY(v8_flags.predictable)) { return PrepareInstallPredictable(); } for (auto dep : dependencies_) { if (!dep->IsValid()) { - if (FLAG_trace_compilation_dependencies) { + if (v8_flags.trace_compilation_dependencies) { TraceInvalidCompilationDependency(dep); } dependencies_.clear(); @@ -1260,7 +1260,7 @@ bool CompilationDependencies::PrepareInstall() { } bool CompilationDependencies::PrepareInstallPredictable() { - CHECK(FLAG_predictable); + CHECK(v8_flags.predictable); std::vector<const CompilationDependency*> deps(dependencies_.begin(), dependencies_.end()); @@ -1268,7 +1268,7 @@ bool CompilationDependencies::PrepareInstallPredictable() { for (auto dep : deps) { if (!dep->IsValid()) { - if (FLAG_trace_compilation_dependencies) { + if (v8_flags.trace_compilation_dependencies) { TraceInvalidCompilationDependency(dep); } dependencies_.clear(); diff --git a/deps/v8/src/compiler/constant-folding-reducer.cc b/deps/v8/src/compiler/constant-folding-reducer.cc index c768441d29adc3..5e74ba75352cc3 100644 --- a/deps/v8/src/compiler/constant-folding-reducer.cc +++ b/deps/v8/src/compiler/constant-folding-reducer.cc @@ -42,7 +42,7 @@ Node* TryGetConstant(JSGraph* jsgraph, Node* node) { } bool IsAlreadyBeingFolded(Node* node) { - DCHECK(FLAG_assert_types); + DCHECK(v8_flags.assert_types); if (node->opcode() == IrOpcode::kFoldConstant) return true; for (Edge edge : node->use_edges()) { if (NodeProperties::IsValueEdge(edge) && @@ -70,7 +70,7 @@ Reduction ConstantFoldingReducer::Reduce(Node* node) { Node* constant = TryGetConstant(jsgraph(), node); if (constant != nullptr) { DCHECK(NodeProperties::IsTyped(constant)); - if (!FLAG_assert_types) { + if (!v8_flags.assert_types) { DCHECK_EQ(node->op()->ControlOutputCount(), 0); ReplaceWithValue(node, constant); return Replace(constant); diff --git a/deps/v8/src/compiler/control-equivalence.cc b/deps/v8/src/compiler/control-equivalence.cc index 4649cf0d6be98f..f1708139626769 100644 --- a/deps/v8/src/compiler/control-equivalence.cc +++ b/deps/v8/src/compiler/control-equivalence.cc @@ -5,9 +5,9 @@ #include "src/compiler/control-equivalence.h" #include "src/compiler/node-properties.h" -#define TRACE(...) \ - do { \ - if (FLAG_trace_turbo_ceq) PrintF(__VA_ARGS__); \ +#define TRACE(...) \ + do { \ + if (v8_flags.trace_turbo_ceq) PrintF(__VA_ARGS__); \ } while (false) namespace v8 { @@ -219,7 +219,7 @@ void ControlEquivalence::BracketListDelete(BracketList& blist, Node* to, void ControlEquivalence::BracketListTRACE(BracketList& blist) { - if (FLAG_trace_turbo_ceq) { + if (v8_flags.trace_turbo_ceq) { TRACE(" BList: "); for (Bracket bracket : blist) { TRACE("{%d->%d} ", bracket.from->id(), bracket.to->id()); diff --git a/deps/v8/src/compiler/csa-load-elimination.cc b/deps/v8/src/compiler/csa-load-elimination.cc index 4cfce6fa5111f8..43f5572e78cbae 100644 --- a/deps/v8/src/compiler/csa-load-elimination.cc +++ b/deps/v8/src/compiler/csa-load-elimination.cc @@ -14,7 +14,7 @@ namespace internal { namespace compiler { Reduction CsaLoadElimination::Reduce(Node* node) { - if (FLAG_trace_turbo_load_elimination) { + if (v8_flags.trace_turbo_load_elimination) { if (node->op()->EffectInputCount() > 0) { PrintF(" visit #%d:%s", node->id(), node->op()->mnemonic()); if (node->op()->ValueInputCount() > 0) { diff --git a/deps/v8/src/compiler/effect-control-linearizer.cc b/deps/v8/src/compiler/effect-control-linearizer.cc index a4d4e2d18d6a3e..db0504f1f3ac2a 100644 --- a/deps/v8/src/compiler/effect-control-linearizer.cc +++ b/deps/v8/src/compiler/effect-control-linearizer.cc @@ -84,6 +84,7 @@ class EffectControlLinearizer { Node* LowerCheckReceiverOrNullOrUndefined(Node* node, Node* frame_state); Node* LowerCheckString(Node* node, Node* frame_state); Node* LowerCheckBigInt(Node* node, Node* frame_state); + Node* LowerCheckBigInt64(Node* node, Node* frame_state); Node* LowerCheckSymbol(Node* node, Node* frame_state); void LowerCheckIf(Node* node, Node* frame_state); Node* LowerCheckedInt32Add(Node* node, Node* frame_state); @@ -93,6 +94,7 @@ class EffectControlLinearizer { Node* LowerCheckedUint32Div(Node* node, Node* frame_state); Node* LowerCheckedUint32Mod(Node* node, Node* frame_state); Node* LowerCheckedInt32Mul(Node* node, Node* frame_state); + Node* LowerCheckedBigInt64Add(Node* node, Node* frame_state); Node* LowerCheckedInt32ToTaggedSigned(Node* node, Node* frame_state); Node* LowerCheckedInt64ToInt32(Node* node, Node* frame_state); Node* LowerCheckedInt64ToTaggedSigned(Node* node, Node* frame_state); @@ -101,6 +103,7 @@ class EffectControlLinearizer { Node* LowerCheckedUint32ToTaggedSigned(Node* node, Node* frame_state); Node* LowerCheckedUint64Bounds(Node* node, Node* frame_state); Node* LowerCheckedUint64ToInt32(Node* node, Node* frame_state); + Node* LowerCheckedUint64ToInt64(Node* node, Node* frame_state); Node* LowerCheckedUint64ToTaggedSigned(Node* node, Node* frame_state); Node* LowerCheckedFloat64ToInt32(Node* node, Node* frame_state); Node* LowerCheckedFloat64ToInt64(Node* node, Node* frame_state); @@ -198,6 +201,7 @@ class EffectControlLinearizer { GraphAssemblerLabel<0>* bailout); Node* AdaptFastCallArgument(Node* node, CTypeInfo arg_type, GraphAssemblerLabel<0>* if_error); + Node* ClampFastCallArgument(Node* input, CTypeInfo::Type scalar_type); struct AdaptOverloadedFastCallResult { Node* target_address; @@ -999,9 +1003,15 @@ bool EffectControlLinearizer::TryWireInStateEffect(Node* node, case IrOpcode::kCheckString: result = LowerCheckString(node, frame_state); break; + case IrOpcode::kCheckedUint64ToInt64: + result = LowerCheckedUint64ToInt64(node, frame_state); + break; case IrOpcode::kCheckBigInt: result = LowerCheckBigInt(node, frame_state); break; + case IrOpcode::kCheckBigInt64: + result = LowerCheckBigInt64(node, frame_state); + break; case IrOpcode::kCheckInternalizedString: result = LowerCheckInternalizedString(node, frame_state); break; @@ -1029,6 +1039,9 @@ bool EffectControlLinearizer::TryWireInStateEffect(Node* node, case IrOpcode::kCheckedInt32Mul: result = LowerCheckedInt32Mul(node, frame_state); break; + case IrOpcode::kCheckedBigInt64Add: + result = LowerCheckedBigInt64Add(node, frame_state); + break; case IrOpcode::kCheckedInt32ToTaggedSigned: result = LowerCheckedInt32ToTaggedSigned(node, frame_state); break; @@ -2554,6 +2567,18 @@ Node* EffectControlLinearizer::LowerCheckedUint64ToInt32(Node* node, return __ TruncateInt64ToInt32(value); } +Node* EffectControlLinearizer::LowerCheckedUint64ToInt64(Node* node, + Node* frame_state) { + Node* value = node->InputAt(0); + const CheckParameters& params = CheckParametersOf(node->op()); + + Node* check = __ Uint64LessThanOrEqual( + value, __ Uint64Constant(std::numeric_limits<int64_t>::max())); + __ DeoptimizeIfNot(DeoptimizeReason::kLostPrecision, params.feedback(), check, + frame_state); + return value; +} + Node* EffectControlLinearizer::LowerCheckedUint64ToTaggedSigned( Node* node, Node* frame_state) { Node* value = node->InputAt(0); @@ -2912,6 +2937,73 @@ Node* EffectControlLinearizer::LowerCheckBigInt(Node* node, Node* frame_state) { return value; } +Node* EffectControlLinearizer::LowerCheckBigInt64(Node* node, + Node* frame_state) { + DCHECK(machine()->Is64()); + + auto done = __ MakeLabel(); + auto if_not_zero = __ MakeLabel(); + + Node* value = node->InputAt(0); + const CheckParameters& params = CheckParametersOf(node->op()); + + // Check for Smi. + Node* smi_check = ObjectIsSmi(value); + __ DeoptimizeIf(DeoptimizeReason::kSmi, params.feedback(), smi_check, + frame_state); + + // Check for BigInt. + Node* value_map = __ LoadField(AccessBuilder::ForMap(), value); + Node* bi_check = __ TaggedEqual(value_map, __ BigIntMapConstant()); + __ DeoptimizeIfNot(DeoptimizeReason::kWrongInstanceType, params.feedback(), + bi_check, frame_state); + + // Check for BigInt64. + Node* bitfield = __ LoadField(AccessBuilder::ForBigIntBitfield(), value); + __ GotoIfNot(__ Word32Equal(bitfield, __ Int32Constant(0)), &if_not_zero); + __ Goto(&done); + + __ Bind(&if_not_zero); + { + // Length must be 1. Compare it with 2 to avoid a right shift. + Node* length = + __ Word32And(bitfield, __ Int32Constant(BigInt::LengthBits::kMask)); + __ DeoptimizeIfNot( + DeoptimizeReason::kWrongInstanceType, params.feedback(), + __ Word32Equal(length, __ Int32Constant(uint32_t{1} + << BigInt::LengthBits::kShift)), + frame_state); + + Node* lsd = + __ LoadField(AccessBuilder::ForBigIntLeastSignificantDigit64(), value); + // Accepted small BigInts are in the range [-2^63 + 1, 2^63 - 1]. + // Excluding -2^63 from the range makes the check simpler and faster. + Node* bi64_check = __ Uint64LessThanOrEqual( + lsd, __ Int64Constant(std::numeric_limits<int64_t>::max())); + __ DeoptimizeIfNot(DeoptimizeReason::kWrongInstanceType, params.feedback(), + bi64_check, frame_state); + __ Goto(&done); + } + + __ Bind(&done); + return value; +} + +Node* EffectControlLinearizer::LowerCheckedBigInt64Add(Node* node, + Node* frame_state) { + DCHECK(machine()->Is64()); + + Node* lhs = node->InputAt(0); + Node* rhs = node->InputAt(1); + + Node* value = __ Int64AddWithOverflow(lhs, rhs); + + Node* check = __ Projection(1, value); + __ DeoptimizeIf(DeoptimizeReason::kOverflow, FeedbackSource(), check, + frame_state); + return __ Projection(0, value); +} + Node* EffectControlLinearizer::LowerChangeInt64ToBigInt(Node* node) { DCHECK(machine()->Is64()); @@ -5010,14 +5102,112 @@ Node* EffectControlLinearizer::AdaptFastCallTypedArrayArgument( return stack_slot; } +Node* EffectControlLinearizer::ClampFastCallArgument( + Node* input, CTypeInfo::Type scalar_type) { + Node* min = nullptr; + Node* max = nullptr; + switch (scalar_type) { + case CTypeInfo::Type::kInt32: + min = __ Float64Constant(std::numeric_limits<int32_t>::min()); + max = __ Float64Constant(std::numeric_limits<int32_t>::max()); + break; + case CTypeInfo::Type::kUint32: + min = __ Float64Constant(0); + max = __ Float64Constant(std::numeric_limits<uint32_t>::max()); + break; + case CTypeInfo::Type::kInt64: + min = __ Float64Constant(kMinSafeInteger); + max = __ Float64Constant(kMaxSafeInteger); + break; + case CTypeInfo::Type::kUint64: + min = __ Float64Constant(0); + max = __ Float64Constant(kMaxSafeInteger); + break; + default: + UNREACHABLE(); + } + CHECK_NOT_NULL(min); + CHECK_NOT_NULL(max); + + Node* clamped = graph()->NewNode( + common()->Select(MachineRepresentation::kFloat64), + graph()->NewNode(machine()->Float64LessThan(), min, input), + graph()->NewNode( + common()->Select(MachineRepresentation::kFloat64), + graph()->NewNode(machine()->Float64LessThan(), input, max), input, + max), + min); + + Node* rounded = graph()->NewNode( + machine()->Float64RoundTiesEven().placeholder(), clamped); + + auto if_zero = __ MakeDeferredLabel(); + auto if_zero_or_nan = __ MakeDeferredLabel(); + auto check_done = __ MakeLabel(); + auto check_for_nan = __ MakeLabel(); + auto done = __ MakeLabel(MachineRepresentation::kWord64); + + Node* check_is_zero = __ Float64Equal(rounded, __ Float64Constant(0)); + __ Branch(check_is_zero, &check_for_nan, &check_done); + + // Check if {rounded} is NaN. + __ Bind(&check_for_nan); + Node* diff = __ Float64Equal(rounded, rounded); + Node* check_is_nan = __ Word32Equal(diff, __ Int32Constant(0)); + __ Branch(check_is_nan, &if_zero_or_nan, &check_done); + + __ Bind(&if_zero_or_nan); + { + switch (scalar_type) { + case CTypeInfo::Type::kInt32: + __ Goto(&done, __ Int32Constant(0)); + break; + case CTypeInfo::Type::kUint32: + __ Goto(&done, __ Uint32Constant(0)); + break; + case CTypeInfo::Type::kInt64: + __ Goto(&done, __ Int64Constant(0)); + break; + case CTypeInfo::Type::kUint64: + __ Goto(&done, __ Uint64Constant(0)); + break; + default: + UNREACHABLE(); + } + } + + __ Bind(&check_done); + { + switch (scalar_type) { + case CTypeInfo::Type::kInt32: + __ Goto(&done, __ ChangeFloat64ToInt32(rounded)); + break; + case CTypeInfo::Type::kUint32: + __ Goto(&done, __ ChangeFloat64ToUint32(rounded)); + break; + case CTypeInfo::Type::kInt64: + __ Goto(&done, __ ChangeFloat64ToInt64(rounded)); + break; + case CTypeInfo::Type::kUint64: + __ Goto(&done, __ ChangeFloat64ToUint64(rounded)); + break; + default: + UNREACHABLE(); + } + } + + __ Bind(&done); + return done.PhiAt(0); +} + Node* EffectControlLinearizer::AdaptFastCallArgument( Node* node, CTypeInfo arg_type, GraphAssemblerLabel<0>* if_error) { int kAlign = alignof(uintptr_t); int kSize = sizeof(uintptr_t); switch (arg_type.GetSequenceType()) { case CTypeInfo::SequenceType::kScalar: { - if (uint8_t(arg_type.GetFlags()) & - uint8_t(CTypeInfo::Flags::kEnforceRangeBit)) { + uint8_t flags = uint8_t(arg_type.GetFlags()); + if (flags & uint8_t(CTypeInfo::Flags::kEnforceRangeBit)) { Node* truncation; switch (arg_type.GetType()) { case CTypeInfo::Type::kInt32: @@ -5037,9 +5227,12 @@ Node* EffectControlLinearizer::AdaptFastCallArgument( __ GotoIfNot(__ Projection(1, truncation), if_error); return __ Projection(0, truncation); default: { + __ Goto(if_error); return node; } } + } else if (flags & uint8_t(CTypeInfo::Flags::kClampBit)) { + return ClampFastCallArgument(node, arg_type.GetType()); } else { switch (arg_type.GetType()) { case CTypeInfo::Type::kV8Value: { diff --git a/deps/v8/src/compiler/escape-analysis-reducer.cc b/deps/v8/src/compiler/escape-analysis-reducer.cc index d0e705610b3bad..9f760f2c0b268d 100644 --- a/deps/v8/src/compiler/escape-analysis-reducer.cc +++ b/deps/v8/src/compiler/escape-analysis-reducer.cc @@ -15,9 +15,9 @@ namespace internal { namespace compiler { #ifdef DEBUG -#define TRACE(...) \ - do { \ - if (FLAG_trace_turbo_escape) PrintF(__VA_ARGS__); \ +#define TRACE(...) \ + do { \ + if (v8_flags.trace_turbo_escape) PrintF(__VA_ARGS__); \ } while (false) #else #define TRACE(...) diff --git a/deps/v8/src/compiler/escape-analysis.cc b/deps/v8/src/compiler/escape-analysis.cc index 94e5c86f97fefe..ab22f6d7484be3 100644 --- a/deps/v8/src/compiler/escape-analysis.cc +++ b/deps/v8/src/compiler/escape-analysis.cc @@ -14,9 +14,9 @@ #include "src/objects/map-inl.h" #ifdef DEBUG -#define TRACE(...) \ - do { \ - if (FLAG_trace_turbo_escape) PrintF(__VA_ARGS__); \ +#define TRACE(...) \ + do { \ + if (v8_flags.trace_turbo_escape) PrintF(__VA_ARGS__); \ } while (false) #else #define TRACE(...) @@ -616,8 +616,11 @@ void ReduceNode(const Operator* op, EscapeAnalysisTracker::Scope* current, Node* value = current->ValueInput(1); const VirtualObject* vobject = current->GetVirtualObject(object); Variable var; + // BoundedSize fields cannot currently be materialized by the deoptimizer, + // so we must not dematerialze them. if (vobject && !vobject->HasEscaped() && - vobject->FieldAt(OffsetOfFieldAccess(op)).To(&var)) { + vobject->FieldAt(OffsetOfFieldAccess(op)).To(&var) && + !FieldAccessOf(op).is_bounded_size_access) { current->Set(var, value); current->MarkForDeletion(); } else { diff --git a/deps/v8/src/compiler/fast-api-calls.cc b/deps/v8/src/compiler/fast-api-calls.cc index 08f9554a6f20d1..02faf86673d4a1 100644 --- a/deps/v8/src/compiler/fast-api-calls.cc +++ b/deps/v8/src/compiler/fast-api-calls.cc @@ -175,7 +175,7 @@ Node* FastApiCallBuilder::WrapFastCall(const CallDescriptor* call_descriptor, ExternalReference::javascript_execution_assert(isolate())); static_assert(sizeof(bool) == 1, "Wrong assumption about boolean size."); - if (FLAG_debug_code) { + if (v8_flags.debug_code) { auto do_store = __ MakeLabel(); Node* old_scope_value = __ Load(MachineType::Int8(), javascript_execution_assert, 0); diff --git a/deps/v8/src/compiler/feedback-source.h b/deps/v8/src/compiler/feedback-source.h index 29c22cde9c7776..afa3e6bcb84743 100644 --- a/deps/v8/src/compiler/feedback-source.h +++ b/deps/v8/src/compiler/feedback-source.h @@ -43,6 +43,9 @@ bool operator!=(FeedbackSource const&, FeedbackSource const&); V8_EXPORT_PRIVATE std::ostream& operator<<(std::ostream& os, FeedbackSource const&); +inline size_t hash_value(const FeedbackSource& value) { + return FeedbackSource::Hash()(value); +} } // namespace compiler } // namespace internal diff --git a/deps/v8/src/compiler/frame.cc b/deps/v8/src/compiler/frame.cc index 0f2a2b478b2406..dd4abbd8403234 100644 --- a/deps/v8/src/compiler/frame.cc +++ b/deps/v8/src/compiler/frame.cc @@ -46,7 +46,7 @@ void FrameAccessState::MarkHasFrame(bool state) { } void FrameAccessState::SetFrameAccessToDefault() { - if (has_frame() && !FLAG_turbo_sp_frame_access) { + if (has_frame() && !v8_flags.turbo_sp_frame_access) { SetFrameAccessToFP(); } else { SetFrameAccessToSP(); diff --git a/deps/v8/src/compiler/globals.h b/deps/v8/src/compiler/globals.h index c379ecf20aecf9..a20a0044d96874 100644 --- a/deps/v8/src/compiler/globals.h +++ b/deps/v8/src/compiler/globals.h @@ -22,7 +22,7 @@ namespace compiler { // TODO(jgruber): Remove once we've made a decision whether to collect feedback // unconditionally. inline bool CollectFeedbackInGenericLowering() { - return FLAG_turbo_collect_feedback_in_generic_lowering; + return v8_flags.turbo_collect_feedback_in_generic_lowering; } enum class StackCheckKind : uint8_t { @@ -50,6 +50,25 @@ inline size_t hash_value(StackCheckKind kind) { return static_cast<size_t>(kind); } +enum class CheckForMinusZeroMode : uint8_t { + kCheckForMinusZero, + kDontCheckForMinusZero, +}; + +inline size_t hash_value(CheckForMinusZeroMode mode) { + return static_cast<size_t>(mode); +} + +inline std::ostream& operator<<(std::ostream& os, CheckForMinusZeroMode mode) { + switch (mode) { + case CheckForMinusZeroMode::kCheckForMinusZero: + return os << "check-for-minus-zero"; + case CheckForMinusZeroMode::kDontCheckForMinusZero: + return os << "dont-check-for-minus-zero"; + } + UNREACHABLE(); +} + // The CallFeedbackRelation provides the meaning of the call feedback for a // TurboFan JSCall operator // - kReceiver: The call target was Function.prototype.apply and its receiver @@ -97,4 +116,12 @@ const int kMaxFastLiteralProperties = JSObject::kMaxInObjectProperties; #define V8_ENABLE_FP_PARAMS_IN_C_LINKAGE #endif +// The biggest double value that fits within the int64_t/uint64_t value range. +// This is different from safe integer range in that there are gaps of integers +// in-between that cannot be represented as a double. +constexpr double kMaxDoubleRepresentableInt64 = 9223372036854774784.0; +constexpr double kMinDoubleRepresentableInt64 = + std::numeric_limits<int64_t>::min(); +constexpr double kMaxDoubleRepresentableUint64 = 18446744073709549568.0; + #endif // V8_COMPILER_GLOBALS_H_ diff --git a/deps/v8/src/compiler/graph-assembler.cc b/deps/v8/src/compiler/graph-assembler.cc index 813615d8b855b3..8d032235b7e023 100644 --- a/deps/v8/src/compiler/graph-assembler.cc +++ b/deps/v8/src/compiler/graph-assembler.cc @@ -5,6 +5,7 @@ #include "src/compiler/graph-assembler.h" #include "src/codegen/callable.h" +#include "src/compiler/access-builder.h" #include "src/compiler/graph-reducer.h" #include "src/compiler/linkage.h" // For TNode types. @@ -215,6 +216,10 @@ Node* JSGraphAssembler::Allocate(AllocationType allocation, Node* size) { effect(), control())); } +TNode<Map> JSGraphAssembler::LoadMap(TNode<HeapObject> object) { + return TNode<Map>::UncheckedCast(LoadField(AccessBuilder::ForMap(), object)); +} + Node* JSGraphAssembler::LoadField(FieldAccess const& access, Node* object) { Node* value = AddNode(graph()->NewNode(simplified()->LoadField(access), object, effect(), control())); @@ -364,14 +369,14 @@ TNode<Object> JSGraphAssembler::ConvertTaggedHoleToUndefined( TNode<FixedArrayBase> JSGraphAssembler::MaybeGrowFastElements( ElementsKind kind, const FeedbackSource& feedback, TNode<JSArray> array, - TNode<FixedArrayBase> elements, TNode<Number> new_length, + TNode<FixedArrayBase> elements, TNode<Number> index_needed, TNode<Number> old_length) { GrowFastElementsMode mode = IsDoubleElementsKind(kind) ? GrowFastElementsMode::kDoubleElements : GrowFastElementsMode::kSmiOrObjectElements; return AddNode<FixedArrayBase>(graph()->NewNode( simplified()->MaybeGrowFastElements(mode, feedback), array, elements, - new_length, old_length, effect(), control())); + index_needed, old_length, effect(), control())); } Node* JSGraphAssembler::StringCharCodeAt(TNode<String> string, diff --git a/deps/v8/src/compiler/graph-assembler.h b/deps/v8/src/compiler/graph-assembler.h index 6dd3e3d7cfad4f..0ace1cf8780df3 100644 --- a/deps/v8/src/compiler/graph-assembler.h +++ b/deps/v8/src/compiler/graph-assembler.h @@ -5,6 +5,9 @@ #ifndef V8_COMPILER_GRAPH_ASSEMBLER_H_ #define V8_COMPILER_GRAPH_ASSEMBLER_H_ +#include <type_traits> + +#include "src/base/small-vector.h" #include "src/codegen/tnode.h" #include "src/compiler/feedback-source.h" #include "src/compiler/js-graph.h" @@ -37,6 +40,7 @@ class Reducer; V(ChangeFloat64ToInt32) \ V(ChangeFloat64ToInt64) \ V(ChangeFloat64ToUint32) \ + V(ChangeFloat64ToUint64) \ V(ChangeInt32ToFloat64) \ V(ChangeInt32ToInt64) \ V(ChangeInt64ToFloat64) \ @@ -110,6 +114,7 @@ class Reducer; #define CHECKED_ASSEMBLER_MACH_BINOP_LIST(V) \ V(Int32AddWithOverflow) \ + V(Int64AddWithOverflow) \ V(Int32Div) \ V(Int32Mod) \ V(Int32MulWithOverflow) \ @@ -150,10 +155,44 @@ class GraphAssembler; enum class GraphAssemblerLabelType { kDeferred, kNonDeferred, kLoop }; +namespace detail { +constexpr size_t kGraphAssemblerLabelDynamicCount = ~0u; + +template <size_t VarCount> +struct GraphAssemblerHelper { + template <typename T> + using Array = std::array<T, VarCount>; + static constexpr bool kIsDynamic = false; + + static Array<Node*> InitNodeArray(const Array<MachineRepresentation>& reps) { + return {}; + } +}; +template <> +struct GraphAssemblerHelper<kGraphAssemblerLabelDynamicCount> { + // TODO(leszeks): We could allow other sizes of small vector here, by encoding + // the size in the negative VarCount. + template <typename T> + using Array = base::SmallVector<T, 4>; + static constexpr bool kIsDynamic = true; + + static Array<Node*> InitNodeArray(const Array<MachineRepresentation>& reps) { + return Array<Node*>(reps.size()); + } +}; +} // namespace detail + // Label with statically known count of incoming branches and phis. template <size_t VarCount> class GraphAssemblerLabel { + using Helper = detail::GraphAssemblerHelper<VarCount>; + template <typename T> + using Array = typename Helper::template Array<T>; + static constexpr bool kIsDynamic = Helper::kIsDynamic; + public: + size_t Count() { return representations_.size(); } + Node* PhiAt(size_t index); template <typename T> @@ -166,10 +205,11 @@ class GraphAssemblerLabel { bool IsUsed() const { return merged_count_ > 0; } GraphAssemblerLabel(GraphAssemblerLabelType type, int loop_nesting_level, - const std::array<MachineRepresentation, VarCount>& reps) + Array<MachineRepresentation> reps) : type_(type), loop_nesting_level_(loop_nesting_level), - representations_(reps) {} + bindings_(Helper::InitNodeArray(reps)), + representations_(std::move(reps)) {} ~GraphAssemblerLabel() { DCHECK(IsBound() || merged_count_ == 0); } @@ -192,10 +232,43 @@ class GraphAssemblerLabel { size_t merged_count_ = 0; Node* effect_; Node* control_; - std::array<Node*, VarCount> bindings_; - const std::array<MachineRepresentation, VarCount> representations_; + Array<Node*> bindings_; + const Array<MachineRepresentation> representations_; }; +using GraphAssemblerDynamicLabel = + GraphAssemblerLabel<detail::kGraphAssemblerLabelDynamicCount>; + +namespace detail { +template <typename T, typename Enable, typename... Us> +struct GraphAssemblerLabelForXHelper; + +// If the Us are a template pack each assignable to T, use a static label. +template <typename T, typename... Us> +struct GraphAssemblerLabelForXHelper< + T, std::enable_if_t<std::conjunction_v<std::is_assignable<T&, Us>...>>, + Us...> { + using Type = GraphAssemblerLabel<sizeof...(Us)>; +}; + +// If the single arg is a vector of U assignable to T, use a dynamic label. +template <typename T, typename U> +struct GraphAssemblerLabelForXHelper< + T, std::enable_if_t<std::is_assignable_v<T&, U>>, base::SmallVector<U, 4>> { + using Type = GraphAssemblerDynamicLabel; +}; + +template <typename... Vars> +using GraphAssemblerLabelForVars = + typename GraphAssemblerLabelForXHelper<Node*, void, Vars...>::Type; + +template <typename... Reps> +using GraphAssemblerLabelForReps = + typename GraphAssemblerLabelForXHelper<MachineRepresentation, void, + Reps...>::Type; + +} // namespace detail + using NodeChangedCallback = std::function<void(Node*)>; class V8_EXPORT_PRIVATE GraphAssembler { public: @@ -212,35 +285,34 @@ class V8_EXPORT_PRIVATE GraphAssembler { // Create label. template <typename... Reps> - GraphAssemblerLabel<sizeof...(Reps)> MakeLabelFor( + detail::GraphAssemblerLabelForReps<Reps...> MakeLabelFor( GraphAssemblerLabelType type, Reps... reps) { std::array<MachineRepresentation, sizeof...(Reps)> reps_array = {reps...}; - return MakeLabel<sizeof...(Reps)>(reps_array, type); + return detail::GraphAssemblerLabelForReps<Reps...>( + type, loop_nesting_level_, std::move(reps_array)); } - - // As above, but with an std::array of machine representations. - template <int VarCount> - GraphAssemblerLabel<VarCount> MakeLabel( - std::array<MachineRepresentation, VarCount> reps_array, - GraphAssemblerLabelType type) { - return GraphAssemblerLabel<VarCount>(type, loop_nesting_level_, reps_array); + GraphAssemblerDynamicLabel MakeLabelFor( + GraphAssemblerLabelType type, + base::SmallVector<MachineRepresentation, 4> reps) { + return GraphAssemblerDynamicLabel(type, loop_nesting_level_, + std::move(reps)); } // Convenience wrapper for creating non-deferred labels. template <typename... Reps> - GraphAssemblerLabel<sizeof...(Reps)> MakeLabel(Reps... reps) { + detail::GraphAssemblerLabelForReps<Reps...> MakeLabel(Reps... reps) { return MakeLabelFor(GraphAssemblerLabelType::kNonDeferred, reps...); } // Convenience wrapper for creating loop labels. template <typename... Reps> - GraphAssemblerLabel<sizeof...(Reps)> MakeLoopLabel(Reps... reps) { + detail::GraphAssemblerLabelForReps<Reps...> MakeLoopLabel(Reps... reps) { return MakeLabelFor(GraphAssemblerLabelType::kLoop, reps...); } // Convenience wrapper for creating deferred labels. template <typename... Reps> - GraphAssemblerLabel<sizeof...(Reps)> MakeDeferredLabel(Reps... reps) { + detail::GraphAssemblerLabelForReps<Reps...> MakeDeferredLabel(Reps... reps) { return MakeLabelFor(GraphAssemblerLabelType::kDeferred, reps...); } @@ -349,7 +421,7 @@ class V8_EXPORT_PRIVATE GraphAssembler { void Bind(GraphAssemblerLabel<VarCount>* label); template <typename... Vars> - void Goto(GraphAssemblerLabel<sizeof...(Vars)>* label, Vars...); + void Goto(detail::GraphAssemblerLabelForVars<Vars...>* label, Vars...); // Branch hints are inferred from if_true/if_false deferred states. void BranchWithCriticalSafetyCheck(Node* condition, @@ -358,13 +430,14 @@ class V8_EXPORT_PRIVATE GraphAssembler { // Branch hints are inferred from if_true/if_false deferred states. template <typename... Vars> - void Branch(Node* condition, GraphAssemblerLabel<sizeof...(Vars)>* if_true, - GraphAssemblerLabel<sizeof...(Vars)>* if_false, Vars...); + void Branch(Node* condition, + detail::GraphAssemblerLabelForVars<Vars...>* if_true, + detail::GraphAssemblerLabelForVars<Vars...>* if_false, Vars...); template <typename... Vars> void BranchWithHint(Node* condition, - GraphAssemblerLabel<sizeof...(Vars)>* if_true, - GraphAssemblerLabel<sizeof...(Vars)>* if_false, + detail::GraphAssemblerLabelForVars<Vars...>* if_true, + detail::GraphAssemblerLabelForVars<Vars...>* if_false, BranchHint hint, Vars...); // Control helpers. @@ -372,7 +445,8 @@ class V8_EXPORT_PRIVATE GraphAssembler { // {GotoIf(c, l, h)} is equivalent to {BranchWithHint(c, l, templ, h); // Bind(templ)}. template <typename... Vars> - void GotoIf(Node* condition, GraphAssemblerLabel<sizeof...(Vars)>* label, + void GotoIf(Node* condition, + detail::GraphAssemblerLabelForVars<Vars...>* label, BranchHint hint, Vars...); // {GotoIfNot(c, l, h)} is equivalent to {BranchWithHint(c, templ, l, h); @@ -381,18 +455,19 @@ class V8_EXPORT_PRIVATE GraphAssembler { // so {GotoIfNot(..., BranchHint::kTrue)} means "optimize for the case where // the branch is *not* taken". template <typename... Vars> - void GotoIfNot(Node* condition, GraphAssemblerLabel<sizeof...(Vars)>* label, + void GotoIfNot(Node* condition, + detail::GraphAssemblerLabelForVars<Vars...>* label, BranchHint hint, Vars...); // {GotoIf(c, l)} is equivalent to {Branch(c, l, templ);Bind(templ)}. template <typename... Vars> - void GotoIf(Node* condition, GraphAssemblerLabel<sizeof...(Vars)>* label, - Vars...); + void GotoIf(Node* condition, + detail::GraphAssemblerLabelForVars<Vars...>* label, Vars...); // {GotoIfNot(c, l)} is equivalent to {Branch(c, templ, l);Bind(templ)}. template <typename... Vars> - void GotoIfNot(Node* condition, GraphAssemblerLabel<sizeof...(Vars)>* label, - Vars...); + void GotoIfNot(Node* condition, + detail::GraphAssemblerLabelForVars<Vars...>* label, Vars...); bool HasActiveBlock() const { // This is false if the current block has been terminated (e.g. by a Goto or @@ -437,7 +512,8 @@ class V8_EXPORT_PRIVATE GraphAssembler { protected: template <typename... Vars> - void MergeState(GraphAssemblerLabel<sizeof...(Vars)>* label, Vars... vars); + void MergeState(detail::GraphAssemblerLabelForVars<Vars...>* label, + Vars... vars); V8_INLINE Node* AddClonedNode(Node* node); @@ -525,8 +601,8 @@ class V8_EXPORT_PRIVATE GraphAssembler { template <typename... Vars> void BranchImpl(Node* condition, - GraphAssemblerLabel<sizeof...(Vars)>* if_true, - GraphAssemblerLabel<sizeof...(Vars)>* if_false, + detail::GraphAssemblerLabelForVars<Vars...>* if_true, + detail::GraphAssemblerLabelForVars<Vars...>* if_false, BranchHint hint, Vars...); Zone* temp_zone_; @@ -556,18 +632,21 @@ class V8_EXPORT_PRIVATE GraphAssembler { template <size_t VarCount> Node* GraphAssemblerLabel<VarCount>::PhiAt(size_t index) { DCHECK(IsBound()); - DCHECK_LT(index, VarCount); + DCHECK_LT(index, Count()); return bindings_[index]; } template <typename... Vars> -void GraphAssembler::MergeState(GraphAssemblerLabel<sizeof...(Vars)>* label, - Vars... vars) { +void GraphAssembler::MergeState( + detail::GraphAssemblerLabelForVars<Vars...>* label, Vars... vars) { + using NodeArray = typename detail::GraphAssemblerLabelForVars< + Vars...>::template Array<Node*>; RestoreEffectControlScope restore_effect_control_scope(this); const int merged_count = static_cast<int>(label->merged_count_); - static constexpr int kVarCount = sizeof...(vars); - std::array<Node*, kVarCount> var_array = {vars...}; + + const size_t var_count = label->Count(); + NodeArray var_array{vars...}; const bool is_loop_exit = label->loop_nesting_level_ != loop_nesting_level_; if (is_loop_exit) { @@ -585,7 +664,7 @@ void GraphAssembler::MergeState(GraphAssemblerLabel<sizeof...(Vars)>* label, AddNode(graph()->NewNode(common()->LoopExit(), control(), *loop_headers_.back())); AddNode(graph()->NewNode(common()->LoopExitEffect(), effect(), control())); - for (size_t i = 0; i < kVarCount; i++) { + for (size_t i = 0; i < var_count; i++) { var_array[i] = AddNode(graph()->NewNode( common()->LoopExitValue(MachineRepresentation::kTagged), var_array[i], control())); @@ -602,7 +681,7 @@ void GraphAssembler::MergeState(GraphAssemblerLabel<sizeof...(Vars)>* label, Node* terminate = graph()->NewNode(common()->Terminate(), label->effect_, label->control_); NodeProperties::MergeControlToEnd(graph(), common(), terminate); - for (size_t i = 0; i < kVarCount; i++) { + for (size_t i = 0; i < var_count; i++) { label->bindings_[i] = graph()->NewNode(common()->Phi(label->representations_[i], 2), var_array[i], var_array[i], label->control_); @@ -612,7 +691,7 @@ void GraphAssembler::MergeState(GraphAssemblerLabel<sizeof...(Vars)>* label, DCHECK_EQ(1, merged_count); label->control_->ReplaceInput(1, control()); label->effect_->ReplaceInput(1, effect()); - for (size_t i = 0; i < kVarCount; i++) { + for (size_t i = 0; i < var_count; i++) { label->bindings_[i]->ReplaceInput(1, var_array[i]); CHECK(!NodeProperties::IsTyped(var_array[i])); // Unsupported. } @@ -624,7 +703,7 @@ void GraphAssembler::MergeState(GraphAssemblerLabel<sizeof...(Vars)>* label, // Just set the control, effect and variables directly. label->control_ = control(); label->effect_ = effect(); - for (size_t i = 0; i < kVarCount; i++) { + for (size_t i = 0; i < var_count; i++) { label->bindings_[i] = var_array[i]; } } else if (merged_count == 1) { @@ -633,7 +712,7 @@ void GraphAssembler::MergeState(GraphAssemblerLabel<sizeof...(Vars)>* label, graph()->NewNode(common()->Merge(2), label->control_, control()); label->effect_ = graph()->NewNode(common()->EffectPhi(2), label->effect_, effect(), label->control_); - for (size_t i = 0; i < kVarCount; i++) { + for (size_t i = 0; i < var_count; i++) { label->bindings_[i] = graph()->NewNode( common()->Phi(label->representations_[i], 2), label->bindings_[i], var_array[i], label->control_); @@ -651,7 +730,7 @@ void GraphAssembler::MergeState(GraphAssemblerLabel<sizeof...(Vars)>* label, NodeProperties::ChangeOp(label->effect_, common()->EffectPhi(merged_count + 1)); - for (size_t i = 0; i < kVarCount; i++) { + for (size_t i = 0; i < var_count; i++) { DCHECK_EQ(IrOpcode::kPhi, label->bindings_[i]->opcode()); label->bindings_[i]->ReplaceInput(merged_count, var_array[i]); label->bindings_[i]->AppendInput(graph()->zone(), label->control_); @@ -686,7 +765,7 @@ void GraphAssembler::Bind(GraphAssemblerLabel<VarCount>* label) { if (label->merged_count_ > 1 || label->IsLoop()) { AddNode(label->control_); AddNode(label->effect_); - for (size_t i = 0; i < VarCount; i++) { + for (size_t i = 0; i < label->Count(); i++) { AddNode(label->bindings_[i]); } } else { @@ -697,10 +776,9 @@ void GraphAssembler::Bind(GraphAssemblerLabel<VarCount>* label) { } template <typename... Vars> -void GraphAssembler::Branch(Node* condition, - GraphAssemblerLabel<sizeof...(Vars)>* if_true, - GraphAssemblerLabel<sizeof...(Vars)>* if_false, - Vars... vars) { +void GraphAssembler::Branch( + Node* condition, detail::GraphAssemblerLabelForVars<Vars...>* if_true, + detail::GraphAssemblerLabelForVars<Vars...>* if_false, Vars... vars) { BranchHint hint = BranchHint::kNone; if (if_true->IsDeferred() != if_false->IsDeferred()) { hint = if_false->IsDeferred() ? BranchHint::kTrue : BranchHint::kFalse; @@ -711,17 +789,17 @@ void GraphAssembler::Branch(Node* condition, template <typename... Vars> void GraphAssembler::BranchWithHint( - Node* condition, GraphAssemblerLabel<sizeof...(Vars)>* if_true, - GraphAssemblerLabel<sizeof...(Vars)>* if_false, BranchHint hint, + Node* condition, detail::GraphAssemblerLabelForVars<Vars...>* if_true, + detail::GraphAssemblerLabelForVars<Vars...>* if_false, BranchHint hint, Vars... vars) { BranchImpl(condition, if_true, if_false, hint, vars...); } template <typename... Vars> -void GraphAssembler::BranchImpl(Node* condition, - GraphAssemblerLabel<sizeof...(Vars)>* if_true, - GraphAssemblerLabel<sizeof...(Vars)>* if_false, - BranchHint hint, Vars... vars) { +void GraphAssembler::BranchImpl( + Node* condition, detail::GraphAssemblerLabelForVars<Vars...>* if_true, + detail::GraphAssemblerLabelForVars<Vars...>* if_false, BranchHint hint, + Vars... vars) { DCHECK_NOT_NULL(control()); Node* branch = graph()->NewNode(common()->Branch(hint), condition, control()); @@ -737,7 +815,7 @@ void GraphAssembler::BranchImpl(Node* condition, } template <typename... Vars> -void GraphAssembler::Goto(GraphAssemblerLabel<sizeof...(Vars)>* label, +void GraphAssembler::Goto(detail::GraphAssemblerLabelForVars<Vars...>* label, Vars... vars) { DCHECK_NOT_NULL(control()); DCHECK_NOT_NULL(effect()); @@ -749,7 +827,7 @@ void GraphAssembler::Goto(GraphAssemblerLabel<sizeof...(Vars)>* label, template <typename... Vars> void GraphAssembler::GotoIf(Node* condition, - GraphAssemblerLabel<sizeof...(Vars)>* label, + detail::GraphAssemblerLabelForVars<Vars...>* label, BranchHint hint, Vars... vars) { Node* branch = graph()->NewNode(common()->Branch(hint), condition, control()); @@ -760,9 +838,9 @@ void GraphAssembler::GotoIf(Node* condition, } template <typename... Vars> -void GraphAssembler::GotoIfNot(Node* condition, - GraphAssemblerLabel<sizeof...(Vars)>* label, - BranchHint hint, Vars... vars) { +void GraphAssembler::GotoIfNot( + Node* condition, detail::GraphAssemblerLabelForVars<Vars...>* label, + BranchHint hint, Vars... vars) { Node* branch = graph()->NewNode(common()->Branch(hint), condition, control()); control_ = graph()->NewNode(common()->IfFalse(), branch); @@ -773,7 +851,7 @@ void GraphAssembler::GotoIfNot(Node* condition, template <typename... Vars> void GraphAssembler::GotoIf(Node* condition, - GraphAssemblerLabel<sizeof...(Vars)>* label, + detail::GraphAssemblerLabelForVars<Vars...>* label, Vars... vars) { BranchHint hint = label->IsDeferred() ? BranchHint::kFalse : BranchHint::kNone; @@ -781,9 +859,9 @@ void GraphAssembler::GotoIf(Node* condition, } template <typename... Vars> -void GraphAssembler::GotoIfNot(Node* condition, - GraphAssemblerLabel<sizeof...(Vars)>* label, - Vars... vars) { +void GraphAssembler::GotoIfNot( + Node* condition, detail::GraphAssemblerLabelForVars<Vars...>* label, + Vars... vars) { BranchHint hint = label->IsDeferred() ? BranchHint::kTrue : BranchHint::kNone; return GotoIfNot(condition, label, hint, vars...); } @@ -831,6 +909,7 @@ class V8_EXPORT_PRIVATE JSGraphAssembler : public GraphAssembler { #undef SINGLETON_CONST_TEST_DECL Node* Allocate(AllocationType allocation, Node* size); + TNode<Map> LoadMap(TNode<HeapObject> object); Node* LoadField(FieldAccess const&, Node* object); template <typename T> TNode<T> LoadField(FieldAccess const& access, TNode<HeapObject> object) { diff --git a/deps/v8/src/compiler/graph-reducer.cc b/deps/v8/src/compiler/graph-reducer.cc index 48d84c21792fac..043b92a53d9da1 100644 --- a/deps/v8/src/compiler/graph-reducer.cc +++ b/deps/v8/src/compiler/graph-reducer.cc @@ -109,7 +109,7 @@ Reduction GraphReducer::Reduce(Node* const node) { // {replacement} == {node} represents an in-place reduction. Rerun // all the other reducers for this node, as now there may be more // opportunities for reduction. - if (FLAG_trace_turbo_reduction) { + if (v8_flags.trace_turbo_reduction) { UnparkedScopeIfNeeded unparked(broker_); // TODO(neis): Disallow racy handle dereference once we stop // supporting --no-local-heaps --no-concurrent-inlining. @@ -122,7 +122,7 @@ Reduction GraphReducer::Reduce(Node* const node) { continue; } else { // {node} was replaced by another node. - if (FLAG_trace_turbo_reduction) { + if (v8_flags.trace_turbo_reduction) { UnparkedScopeIfNeeded unparked(broker_); // TODO(neis): Disallow racy handle dereference once we stop // supporting --no-local-heaps --no-concurrent-inlining. diff --git a/deps/v8/src/compiler/graph-trimmer.cc b/deps/v8/src/compiler/graph-trimmer.cc index e1dbfffff59151..3a85a456da68a8 100644 --- a/deps/v8/src/compiler/graph-trimmer.cc +++ b/deps/v8/src/compiler/graph-trimmer.cc @@ -33,7 +33,7 @@ void GraphTrimmer::TrimGraph() { for (Edge edge : live->use_edges()) { Node* const user = edge.from(); if (!IsLive(user)) { - if (FLAG_trace_turbo_trimming) { + if (v8_flags.trace_turbo_trimming) { StdoutStream{} << "DeadLink: " << *user << "(" << edge.index() << ") -> " << *live << std::endl; } diff --git a/deps/v8/src/compiler/graph-visualizer.cc b/deps/v8/src/compiler/graph-visualizer.cc index 9b8cc6a514ecfe..2cd220eb86a402 100644 --- a/deps/v8/src/compiler/graph-visualizer.cc +++ b/deps/v8/src/compiler/graph-visualizer.cc @@ -33,8 +33,8 @@ namespace compiler { const char* get_cached_trace_turbo_filename(OptimizedCompilationInfo* info) { if (!info->trace_turbo_filename()) { - info->set_trace_turbo_filename( - GetVisualizerLogFileName(info, FLAG_trace_turbo_path, nullptr, "json")); + info->set_trace_turbo_filename(GetVisualizerLogFileName( + info, v8_flags.trace_turbo_path, nullptr, "json")); } return info->trace_turbo_filename(); } @@ -232,7 +232,7 @@ std::unique_ptr<char[]> GetVisualizerLogFileName(OptimizedCompilationInfo* info, const char* suffix) { base::EmbeddedVector<char, 256> filename(0); std::unique_ptr<char[]> debug_name = info->GetDebugName(); - const char* file_prefix = FLAG_trace_turbo_file_prefix.value(); + const char* file_prefix = v8_flags.trace_turbo_file_prefix.value(); int optimization_id = info->IsOptimizing() ? info->optimization_id() : 0; if (strlen(debug_name.get()) > 0) { SNPrintF(filename, "%s-%s-%i", file_prefix, debug_name.get(), @@ -246,7 +246,7 @@ std::unique_ptr<char[]> GetVisualizerLogFileName(OptimizedCompilationInfo* info, } base::EmbeddedVector<char, 256> source_file(0); bool source_available = false; - if (FLAG_trace_file_names && info->has_shared_info() && + if (v8_flags.trace_file_names && info->has_shared_info() && info->shared_info()->script().IsScript()) { Object source_name = Script::cast(info->shared_info()->script()).name(); if (source_name.IsString()) { @@ -680,7 +680,7 @@ void GraphC1Visualizer::PrintSchedule(const char* phase, PrintIndent(); os_ << "0 " << uses << " "; PrintNode(node); - if (FLAG_trace_turbo_types) { + if (v8_flags.trace_turbo_types) { os_ << " "; PrintType(node); } @@ -710,7 +710,7 @@ void GraphC1Visualizer::PrintSchedule(const char* phase, for (BasicBlock* successor : current->successors()) { os_ << " B" << successor->rpo_number(); } - if (FLAG_trace_turbo_types && current->control_input() != nullptr) { + if (v8_flags.trace_turbo_types && current->control_input() != nullptr) { os_ << " "; PrintType(current->control_input()); } @@ -811,7 +811,7 @@ void GraphC1Visualizer::PrintLiveRange(const LiveRange* range, const char* type, UsePosition* current_pos = range->first_pos(); while (current_pos != nullptr) { - if (current_pos->RegisterIsBeneficial() || FLAG_trace_all_uses) { + if (current_pos->RegisterIsBeneficial() || v8_flags.trace_all_uses) { os_ << " " << current_pos->pos().value() << " M"; } current_pos = current_pos->next(); diff --git a/deps/v8/src/compiler/heap-refs.cc b/deps/v8/src/compiler/heap-refs.cc index 391796d00e06fa..309618a513da95 100644 --- a/deps/v8/src/compiler/heap-refs.cc +++ b/deps/v8/src/compiler/heap-refs.cc @@ -1561,6 +1561,7 @@ ObjectRef CallHandlerInfoRef::data() const { HEAP_ACCESSOR_C(ScopeInfo, int, ContextLength) HEAP_ACCESSOR_C(ScopeInfo, bool, HasContextExtensionSlot) HEAP_ACCESSOR_C(ScopeInfo, bool, HasOuterScopeInfo) +HEAP_ACCESSOR_C(ScopeInfo, bool, ClassScopeHasPrivateBrand) ScopeInfoRef ScopeInfoRef::OuterScopeInfo() const { return MakeRefAssumeMemoryFence(broker(), object()->OuterScopeInfo()); @@ -1701,7 +1702,7 @@ ZoneVector<const CFunctionInfo*> FunctionTemplateInfoRef::c_signatures() const { bool StringRef::IsSeqString() const { return object()->IsSeqString(); } -ScopeInfoRef NativeContextRef::scope_info() const { +ScopeInfoRef ContextRef::scope_info() const { // The scope_info is immutable after initialization. return MakeRefAssumeMemoryFence(broker(), object()->scope_info()); } @@ -1832,17 +1833,23 @@ base::Optional<Object> JSObjectRef::GetOwnConstantElementFromHeap( // This block is carefully constructed to avoid Ref creation and access since // this method may be called after the broker has retired. // The relaxed `length` read is safe to use in this case since: - // - GetOwnConstantElement only detects a constant for JSArray holders if - // the array is frozen/sealed. - // - Frozen/sealed arrays can't change length. - // - We've already seen a map with frozen/sealed elements_kinds (above); + // - TryGetOwnConstantElement (below) only detects a constant for JSArray + // holders if the array is frozen. + // - Frozen arrays can't change length. + // - We've already seen the corresponding map (when this JSObjectRef was + // created); // - The release-load of that map ensures we read the newest value // of `length` below. if (holder->IsJSArray()) { + Object array_length_obj = + JSArray::cast(*holder).length(broker()->isolate(), kRelaxedLoad); + if (!array_length_obj.IsSmi()) { + // Can't safely read into HeapNumber objects without atomic semantics + // (relaxed would be sufficient due to the guarantees above). + return {}; + } uint32_t array_length; - if (!JSArray::cast(*holder) - .length(broker()->isolate(), kRelaxedLoad) - .ToArrayLength(&array_length)) { + if (!array_length_obj.ToArrayLength(&array_length)) { return {}; } // See also ElementsAccessorBase::GetMaxIndex. @@ -2250,7 +2257,7 @@ base::Optional<PropertyCellRef> JSGlobalObjectRef::GetPropertyCell( } std::ostream& operator<<(std::ostream& os, const ObjectRef& ref) { - if (!FLAG_concurrent_recompilation) { + if (!v8_flags.concurrent_recompilation) { // We cannot be in a background thread so it's safe to read the heap. AllowHandleDereference allow_handle_dereference; return os << ref.data() << " {" << ref.object() << "}"; diff --git a/deps/v8/src/compiler/heap-refs.h b/deps/v8/src/compiler/heap-refs.h index 1076a1bafd4ea2..0564b609cf6f95 100644 --- a/deps/v8/src/compiler/heap-refs.h +++ b/deps/v8/src/compiler/heap-refs.h @@ -56,7 +56,8 @@ class PropertyAccessInfo; enum class AccessMode { kLoad, kStore, kStoreInLiteral, kHas, kDefine }; inline bool IsAnyStore(AccessMode mode) { - return mode == AccessMode::kStore || mode == AccessMode::kStoreInLiteral; + return mode == AccessMode::kStore || mode == AccessMode::kStoreInLiteral || + mode == AccessMode::kDefine; } enum class OddballType : uint8_t { @@ -527,6 +528,8 @@ class ContextRef : public HeapObjectRef { // Only returns a value if the index is valid for this ContextRef. base::Optional<ObjectRef> get(int index) const; + + ScopeInfoRef scope_info() const; }; #define BROKER_NATIVE_CONTEXT_FIELDS(V) \ @@ -584,7 +587,6 @@ class NativeContextRef : public ContextRef { BROKER_NATIVE_CONTEXT_FIELDS(DECL_ACCESSOR) #undef DECL_ACCESSOR - ScopeInfoRef scope_info() const; MapRef GetFunctionMapFromIndex(int index) const; MapRef GetInitialJSArrayMap(ElementsKind kind) const; base::Optional<JSFunctionRef> GetConstructorFunction(const MapRef& map) const; @@ -879,6 +881,7 @@ class ScopeInfoRef : public HeapObjectRef { int ContextLength() const; bool HasOuterScopeInfo() const; bool HasContextExtensionSlot() const; + bool ClassScopeHasPrivateBrand() const; ScopeInfoRef OuterScopeInfo() const; }; @@ -899,6 +902,7 @@ class ScopeInfoRef : public HeapObjectRef { V(int, StartPosition) \ V(bool, is_compiled) \ V(bool, IsUserJavaScript) \ + V(bool, requires_instance_members_initializer) \ IF_WASM(V, const wasm::WasmModule*, wasm_module) \ IF_WASM(V, const wasm::FunctionSig*, wasm_function_signature) diff --git a/deps/v8/src/compiler/js-call-reducer.cc b/deps/v8/src/compiler/js-call-reducer.cc index 35220fa28496e0..256d41748e6420 100644 --- a/deps/v8/src/compiler/js-call-reducer.cc +++ b/deps/v8/src/compiler/js-call-reducer.cc @@ -29,6 +29,7 @@ #include "src/compiler/state-values-utils.h" #include "src/compiler/type-cache.h" #include "src/ic/call-optimization.h" +#include "src/objects/elements-kind.h" #include "src/objects/js-function.h" #include "src/objects/objects-inl.h" #include "src/objects/ordered-hash-table.h" @@ -255,6 +256,7 @@ class JSCallReducerAssembler : public JSGraphAssembler { TNode<Object> value, NumberOperationHint hint = NumberOperationHint::kNumberOrOddball); TNode<Smi> CheckSmi(TNode<Object> value); + TNode<Number> CheckNumber(TNode<Object> value); TNode<String> CheckString(TNode<Object> value); TNode<Number> CheckBounds(TNode<Number> value, TNode<Number> limit); @@ -681,7 +683,7 @@ class IteratingArrayBuiltinReducerAssembler : public JSCallReducerAssembler { public: IteratingArrayBuiltinReducerAssembler(JSCallReducer* reducer, Node* node) : JSCallReducerAssembler(reducer, node) { - DCHECK(FLAG_turbo_inline_array_builtins); + DCHECK(v8_flags.turbo_inline_array_builtins); } TNode<Object> ReduceArrayPrototypeForEach( @@ -714,6 +716,7 @@ class IteratingArrayBuiltinReducerAssembler : public JSCallReducerAssembler { bool needs_fallback_builtin_call); TNode<Object> ReduceArrayPrototypeIndexOfIncludes( ElementsKind kind, ArrayIndexOfIncludesVariant variant); + TNode<Number> ReduceArrayPrototypePush(MapInference* inference); private: // Returns {index,value}. Assumes that the map has not changed, but possibly @@ -743,10 +746,7 @@ class IteratingArrayBuiltinReducerAssembler : public JSCallReducerAssembler { TNode<Vars>... vars) { if (!IsHoleyElementsKind(kind)) return o; - std::array<MachineRepresentation, sizeof...(Vars)> reps = { - MachineRepresentationOf<Vars>::value...}; - auto if_not_hole = - MakeLabel<sizeof...(Vars)>(reps, GraphAssemblerLabelType::kNonDeferred); + auto if_not_hole = MakeLabel(MachineRepresentationOf<Vars>::value...); BranchWithHint(HoleCheck(kind, o), continue_label, &if_not_hole, BranchHint::kFalse, vars...); @@ -1029,6 +1029,11 @@ TNode<Smi> JSCallReducerAssembler::CheckSmi(TNode<Object> value) { value, effect(), control())); } +TNode<Number> JSCallReducerAssembler::CheckNumber(TNode<Object> value) { + return AddNode<Number>(graph()->NewNode(simplified()->CheckNumber(feedback()), + value, effect(), control())); +} + TNode<String> JSCallReducerAssembler::CheckString(TNode<Object> value) { return AddNode<String>(graph()->NewNode(simplified()->CheckString(feedback()), value, effect(), control())); @@ -1329,8 +1334,7 @@ TNode<Object> IteratingArrayBuiltinReducerAssembler::ReduceArrayPrototypeAt( TNode<Number> index_num = CheckSmi(index); TNode<FixedArrayBase> elements = LoadElements(receiver); - TNode<Map> receiver_map = - TNode<Map>::UncheckedCast(LoadField(AccessBuilder::ForMap(), receiver)); + TNode<Map> receiver_map = LoadMap(receiver); auto out = MakeLabel(MachineRepresentation::kTagged); @@ -1401,6 +1405,129 @@ TNode<Object> IteratingArrayBuiltinReducerAssembler::ReduceArrayPrototypeAt( return out.PhiAt<Object>(0); } +TNode<Number> IteratingArrayBuiltinReducerAssembler::ReduceArrayPrototypePush( + MapInference* inference) { + int const num_push_arguments = ArgumentCount(); + ZoneVector<MapRef> const& receiver_maps = inference->GetMaps(); + + base::SmallVector<MachineRepresentation, 4> argument_reps; + base::SmallVector<Node*, 4> argument_nodes; + + for (int i = 0; i < num_push_arguments; ++i) { + argument_reps.push_back(MachineRepresentation::kTagged); + argument_nodes.push_back(Argument(i)); + } + + TNode<JSArray> receiver = ReceiverInputAs<JSArray>(); + TNode<Map> receiver_map = LoadMap(receiver); + + auto double_label = MakeLabel(argument_reps); + auto smi_label = MakeLabel(argument_reps); + auto object_label = MakeLabel(argument_reps); + + for (size_t i = 0; i < receiver_maps.size(); i++) { + const MapRef& map = receiver_maps[i]; + ElementsKind kind = map.elements_kind(); + + if (i < receiver_maps.size() - 1) { + TNode<Boolean> is_map_equal = ReferenceEqual(receiver_map, Constant(map)); + if (IsDoubleElementsKind(kind)) { + GotoIf(is_map_equal, &double_label, argument_nodes); + } else if (IsSmiElementsKind(kind)) { + GotoIf(is_map_equal, &smi_label, argument_nodes); + } else { + GotoIf(is_map_equal, &object_label, argument_nodes); + } + } else { + if (IsDoubleElementsKind(kind)) { + Goto(&double_label, argument_nodes); + } else if (IsSmiElementsKind(kind)) { + Goto(&smi_label, argument_nodes); + } else { + Goto(&object_label, argument_nodes); + } + } + } + + auto return_label = MakeLabel(MachineRepresentation::kTagged); + + auto build_array_push = [&](ElementsKind kind, + base::SmallVector<Node*, 1>& push_arguments) { + // Only support PACKED_ELEMENTS and PACKED_DOUBLE_ELEMENTS, as "markers" of + // what the elements array is (a FixedArray or FixedDoubleArray). + DCHECK(kind == PACKED_ELEMENTS || kind == PACKED_DOUBLE_ELEMENTS); + + // Load the "length" property of the {receiver}. + TNode<Smi> length = LoadJSArrayLength(receiver, kind); + TNode<Number> return_value = length; + + // Check if we have any {values} to push. + if (num_push_arguments > 0) { + // Compute the resulting "length" of the {receiver}. + TNode<Number> new_length = return_value = + NumberAdd(length, NumberConstant(num_push_arguments)); + + // Load the elements backing store of the {receiver}. + TNode<FixedArrayBase> elements = LoadElements(receiver); + TNode<Smi> elements_length = LoadFixedArrayBaseLength(elements); + + elements = MaybeGrowFastElements( + kind, feedback(), receiver, elements, + NumberAdd(length, NumberConstant(num_push_arguments - 1)), + elements_length); + + // Update the JSArray::length field. Since this is observable, + // there must be no other check after this. + StoreJSArrayLength(receiver, new_length, kind); + + // Append the {values} to the {elements}. + for (int i = 0; i < num_push_arguments; ++i) { + StoreFixedArrayBaseElement( + elements, NumberAdd(length, NumberConstant(i)), + TNode<Object>::UncheckedCast(push_arguments[i]), kind); + } + } + + Goto(&return_label, return_value); + }; + + if (double_label.IsUsed()) { + Bind(&double_label); + base::SmallVector<Node*, 1> push_arguments(num_push_arguments); + for (int i = 0; i < num_push_arguments; ++i) { + Node* value = + CheckNumber(TNode<Object>::UncheckedCast(double_label.PhiAt(i))); + // Make sure we do not store signaling NaNs into double arrays. + value = AddNode<Number>( + graph()->NewNode(simplified()->NumberSilenceNaN(), value)); + push_arguments[i] = value; + } + build_array_push(PACKED_DOUBLE_ELEMENTS, push_arguments); + } + + if (smi_label.IsUsed()) { + Bind(&smi_label); + base::SmallVector<Node*, 4> push_arguments(num_push_arguments); + for (int i = 0; i < num_push_arguments; ++i) { + Node* value = CheckSmi(TNode<Object>::UncheckedCast(smi_label.PhiAt(i))); + push_arguments[i] = value; + } + Goto(&object_label, push_arguments); + } + + if (object_label.IsUsed()) { + Bind(&object_label); + base::SmallVector<Node*, 1> push_arguments(num_push_arguments); + for (int i = 0; i < num_push_arguments; ++i) { + push_arguments[i] = object_label.PhiAt(i); + } + build_array_push(PACKED_ELEMENTS, push_arguments); + } + + Bind(&return_label); + return TNode<Number>::UncheckedCast(return_label.PhiAt(0)); +} + namespace { struct ForEachFrameStateParams { @@ -3402,7 +3529,7 @@ class IteratingArrayBuiltinHelper { effect_(NodeProperties::GetEffectInput(node)), control_(NodeProperties::GetControlInput(node)), inference_(broker, receiver_, effect_) { - if (!FLAG_turbo_inline_array_builtins) return; + if (!v8_flags.turbo_inline_array_builtins) return; DCHECK_EQ(IrOpcode::kJSCall, node->opcode()); const CallParameters& p = CallParametersOf(node->op()); @@ -3700,7 +3827,7 @@ FastApiCallFunctionVector CanOptimizeFastCall( Zone* zone, const FunctionTemplateInfoRef& function_template_info, size_t argc) { FastApiCallFunctionVector result(zone); - if (!FLAG_turbo_fast_api_calls) return result; + if (!v8_flags.turbo_fast_api_calls) return result; static constexpr int kReceiver = 1; @@ -4275,7 +4402,7 @@ Reduction JSCallReducer::ReduceCallOrConstructWithArrayLikeOrSpread( feedback_source, speculation_mode, feedback_relation); } - if (!FLAG_turbo_optimize_apply) return NoChange(); + if (!v8_flags.turbo_optimize_apply) return NoChange(); // Optimization of construct nodes not supported yet. if (!IsCallWithArrayLikeOrSpread(node)) return NoChange(); @@ -5620,7 +5747,7 @@ void JSCallReducer::CheckIfElementsKind(Node* receiver_elements_kind, // ES6 section 23.1.3.1 Array.prototype.at ( ) Reduction JSCallReducer::ReduceArrayPrototypeAt(Node* node) { - if (!FLAG_turbo_inline_array_builtins) return NoChange(); + if (!v8_flags.turbo_inline_array_builtins) return NoChange(); JSCallNode n(node); CallParameters const& p = n.Parameters(); @@ -5675,7 +5802,6 @@ Reduction JSCallReducer::ReduceArrayPrototypePush(Node* node) { return NoChange(); } - int const num_values = n.ArgumentCount(); Node* receiver = n.receiver(); Effect effect = n.effect(); Control control = n.control(); @@ -5691,118 +5817,15 @@ Reduction JSCallReducer::ReduceArrayPrototypePush(Node* node) { if (!dependencies()->DependOnNoElementsProtector()) { return inference.NoChange(); } + inference.RelyOnMapsPreferStability(dependencies(), jsgraph(), &effect, control, p.feedback()); - std::vector<Node*> controls_to_merge; - std::vector<Node*> effects_to_merge; - std::vector<Node*> values_to_merge; - Node* return_value = jsgraph()->UndefinedConstant(); - - Node* receiver_elements_kind = - LoadReceiverElementsKind(receiver, &effect, control); - Node* next_control = control; - Node* next_effect = effect; - for (size_t i = 0; i < kinds.size(); i++) { - ElementsKind kind = kinds[i]; - control = next_control; - effect = next_effect; - // We do not need branch for the last elements kind. - if (i != kinds.size() - 1) { - Node* control_node = control; - CheckIfElementsKind(receiver_elements_kind, kind, control_node, - &control_node, &next_control); - control = control_node; - } - - // Collect the value inputs to push. - std::vector<Node*> values(num_values); - for (int j = 0; j < num_values; ++j) { - values[j] = n.Argument(j); - } - - for (auto& value : values) { - if (IsSmiElementsKind(kind)) { - value = effect = graph()->NewNode(simplified()->CheckSmi(p.feedback()), - value, effect, control); - } else if (IsDoubleElementsKind(kind)) { - value = effect = graph()->NewNode( - simplified()->CheckNumber(p.feedback()), value, effect, control); - // Make sure we do not store signaling NaNs into double arrays. - value = graph()->NewNode(simplified()->NumberSilenceNaN(), value); - } - } - - // Load the "length" property of the {receiver}. - Node* length = effect = graph()->NewNode( - simplified()->LoadField(AccessBuilder::ForJSArrayLength(kind)), - receiver, effect, control); - return_value = length; - - // Check if we have any {values} to push. - if (num_values > 0) { - // Compute the resulting "length" of the {receiver}. - Node* new_length = return_value = graph()->NewNode( - simplified()->NumberAdd(), length, jsgraph()->Constant(num_values)); - - // Load the elements backing store of the {receiver}. - Node* elements = effect = graph()->NewNode( - simplified()->LoadField(AccessBuilder::ForJSObjectElements()), - receiver, effect, control); - Node* elements_length = effect = graph()->NewNode( - simplified()->LoadField(AccessBuilder::ForFixedArrayLength()), - elements, effect, control); - - GrowFastElementsMode mode = - IsDoubleElementsKind(kind) - ? GrowFastElementsMode::kDoubleElements - : GrowFastElementsMode::kSmiOrObjectElements; - elements = effect = graph()->NewNode( - simplified()->MaybeGrowFastElements(mode, p.feedback()), receiver, - elements, - graph()->NewNode(simplified()->NumberAdd(), length, - jsgraph()->Constant(num_values - 1)), - elements_length, effect, control); - - // Update the JSArray::length field. Since this is observable, - // there must be no other check after this. - effect = graph()->NewNode( - simplified()->StoreField(AccessBuilder::ForJSArrayLength(kind)), - receiver, new_length, effect, control); - - // Append the {values} to the {elements}. - for (int j = 0; j < num_values; ++j) { - Node* value = values[j]; - Node* index = graph()->NewNode(simplified()->NumberAdd(), length, - jsgraph()->Constant(j)); - effect = - graph()->NewNode(simplified()->StoreElement( - AccessBuilder::ForFixedArrayElement(kind)), - elements, index, value, effect, control); - } - } - - controls_to_merge.push_back(control); - effects_to_merge.push_back(effect); - values_to_merge.push_back(return_value); - } - - if (controls_to_merge.size() > 1) { - int const count = static_cast<int>(controls_to_merge.size()); - - control = graph()->NewNode(common()->Merge(count), count, - &controls_to_merge.front()); - effects_to_merge.push_back(control); - effect = graph()->NewNode(common()->EffectPhi(count), count + 1, - &effects_to_merge.front()); - values_to_merge.push_back(control); - return_value = - graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, count), - count + 1, &values_to_merge.front()); - } + IteratingArrayBuiltinReducerAssembler a(this, node); + a.InitializeEffectControl(effect, control); - ReplaceWithValue(node, return_value, effect, control); - return Replace(return_value); + TNode<Object> subgraph = a.ReduceArrayPrototypePush(&inference); + return ReplaceWithSubgraph(&a, subgraph); } // ES6 section 22.1.3.17 Array.prototype.pop ( ) @@ -6072,7 +6095,7 @@ Reduction JSCallReducer::ReduceArrayPrototypeShift(Node* node) { ElementAccess const access = AccessBuilder::ForFixedArrayElement(kind); - // When disable FLAG_turbo_loop_variable, typer cannot infer index + // When disable v8_flags.turbo_loop_variable, typer cannot infer index // is in [1, kMaxCopyElements-1], and will break in representing // kRepFloat64 (Range(1, inf)) to kRepWord64 when converting // input for kLoadElement. So we need to add type guard here. @@ -6193,7 +6216,7 @@ Reduction JSCallReducer::ReduceArrayPrototypeShift(Node* node) { // ES6 section 22.1.3.23 Array.prototype.slice ( ) Reduction JSCallReducer::ReduceArrayPrototypeSlice(Node* node) { - if (!FLAG_turbo_inline_array_builtins) return NoChange(); + if (!v8_flags.turbo_inline_array_builtins) return NoChange(); JSCallNode n(node); CallParameters const& p = n.Parameters(); if (p.speculation_mode() == SpeculationMode::kDisallowSpeculation) { @@ -8233,7 +8256,7 @@ Reduction JSCallReducer::ReduceNumberParseInt(Node* node) { Reduction JSCallReducer::ReduceRegExpPrototypeTest(Node* node) { JSCallNode n(node); CallParameters const& p = n.Parameters(); - if (FLAG_force_slow_path) return NoChange(); + if (v8_flags.force_slow_path) return NoChange(); if (n.ArgumentCount() < 1) return NoChange(); if (p.speculation_mode() == SpeculationMode::kDisallowSpeculation) { diff --git a/deps/v8/src/compiler/js-create-lowering.cc b/deps/v8/src/compiler/js-create-lowering.cc index 4885a6c842248d..85e975433748f8 100644 --- a/deps/v8/src/compiler/js-create-lowering.cc +++ b/deps/v8/src/compiler/js-create-lowering.cc @@ -359,7 +359,7 @@ Reduction JSCreateLowering::ReduceJSCreateArguments(Node* node) { int argument_count = args_state_info.parameter_count() - 1; int length = std::max(0, argument_count - start_index); static_assert(JSArray::kHeaderSize == 4 * kTaggedSize); - a.Allocate(JSArray::kHeaderSize); + a.Allocate(ALIGN_TO_ALLOCATION_ALIGNMENT(JSArray::kHeaderSize)); a.Store(AccessBuilder::ForMap(), jsarray_map); a.Store(AccessBuilder::ForJSObjectPropertiesOrHashKnownPointer(), jsgraph()->EmptyFixedArrayConstant()); @@ -1049,7 +1049,7 @@ Reduction JSCreateLowering::ReduceJSCreateKeyValueArray(Node* node) { Node* elements = aa.Finish(); AllocationBuilder a(jsgraph(), elements, graph()->start()); - a.Allocate(JSArray::kHeaderSize); + a.Allocate(ALIGN_TO_ALLOCATION_ALIGNMENT(JSArray::kHeaderSize)); a.Store(AccessBuilder::ForMap(), array_map); a.Store(AccessBuilder::ForJSObjectPropertiesOrHashKnownPointer(), jsgraph()->EmptyFixedArrayConstant()); diff --git a/deps/v8/src/compiler/js-generic-lowering.cc b/deps/v8/src/compiler/js-generic-lowering.cc index fe831d1bc8515b..1cca549fbbaafa 100644 --- a/deps/v8/src/compiler/js-generic-lowering.cc +++ b/deps/v8/src/compiler/js-generic-lowering.cc @@ -552,6 +552,11 @@ void JSGenericLowering::LowerJSGetSuperConstructor(Node* node) { AccessBuilder::ForMapPrototype())); } +void JSGenericLowering::LowerJSFindNonDefaultConstructorOrConstruct( + Node* node) { + ReplaceWithBuiltinCall(node, Builtin::kFindNonDefaultConstructorOrConstruct); +} + void JSGenericLowering::LowerJSHasInPrototypeChain(Node* node) { ReplaceWithRuntimeCall(node, Runtime::kHasInPrototypeChain); } @@ -1060,78 +1065,11 @@ void JSGenericLowering::LowerJSWasmCall(Node* node) {} #endif // V8_ENABLE_WEBASSEMBLY void JSGenericLowering::LowerJSForInPrepare(Node* node) { - JSForInPrepareNode n(node); - Effect effect(node); // {node} is kept in the effect chain. - Control control = n.control(); // .. but not in the control chain. - Node* enumerator = n.enumerator(); - Node* slot = - jsgraph()->UintPtrConstant(n.Parameters().feedback().slot.ToInt()); - - std::vector<Edge> use_edges; - for (Edge edge : node->use_edges()) use_edges.push_back(edge); - - // {node} will be changed to a builtin call (see below). The returned value - // is a fixed array containing {cache_array} and {cache_length}. - // TODO(jgruber): This is awkward; what we really want is two return values, - // the {cache_array} and {cache_length}, or better yet three return values - // s.t. we can avoid the graph rewrites below. Builtin support for multiple - // return types is unclear though. - - Node* result_fixed_array = node; - Node* cache_type = enumerator; // Just to clarify the rename. - Node* cache_array; - Node* cache_length; - - cache_array = effect = graph()->NewNode( - machine()->Load(MachineType::AnyTagged()), result_fixed_array, - jsgraph()->IntPtrConstant(FixedArray::OffsetOfElementAt(0) - - kHeapObjectTag), - effect, control); - cache_length = effect = graph()->NewNode( - machine()->Load(MachineType::AnyTagged()), result_fixed_array, - jsgraph()->IntPtrConstant(FixedArray::OffsetOfElementAt(1) - - kHeapObjectTag), - effect, control); - - // Update the uses of {node}. - for (Edge edge : use_edges) { - Node* const user = edge.from(); - if (NodeProperties::IsEffectEdge(edge)) { - edge.UpdateTo(effect); - } else if (NodeProperties::IsControlEdge(edge)) { - edge.UpdateTo(control); - } else { - DCHECK(NodeProperties::IsValueEdge(edge)); - switch (ProjectionIndexOf(user->op())) { - case 0: - Replace(user, cache_type); - break; - case 1: - Replace(user, cache_array); - break; - case 2: - Replace(user, cache_length); - break; - default: - UNREACHABLE(); - } - } - } - - // Finally, change the original node into a builtin call. This happens here, - // after graph rewrites, since the Call does not have a control output and - // thus must not have any control uses. Any previously existing control - // outputs have been replaced by the graph rewrite above. - node->InsertInput(zone(), n.FeedbackVectorIndex(), slot); - ReplaceWithBuiltinCall(node, Builtin::kForInPrepare); + UNREACHABLE(); // Eliminated in typed lowering. } void JSGenericLowering::LowerJSForInNext(Node* node) { - JSForInNextNode n(node); - node->InsertInput( - zone(), 0, - jsgraph()->UintPtrConstant(n.Parameters().feedback().slot.ToInt())); - ReplaceWithBuiltinCall(node, Builtin::kForInNext); + UNREACHABLE(); // Eliminated in typed lowering. } void JSGenericLowering::LowerJSLoadMessage(Node* node) { diff --git a/deps/v8/src/compiler/js-heap-broker.cc b/deps/v8/src/compiler/js-heap-broker.cc index 83246ca90d66d6..48b21bea31e464 100644 --- a/deps/v8/src/compiler/js-heap-broker.cc +++ b/deps/v8/src/compiler/js-heap-broker.cc @@ -486,7 +486,7 @@ ProcessedFeedback const& JSHeapBroker::ReadFeedbackForPropertyAccess( // if non-deprecation is important. if (map.is_deprecated()) { // TODO(ishell): support fast map updating if we enable it. - CHECK(!FLAG_fast_map_update); + CHECK(!v8_flags.fast_map_update); base::Optional<Map> maybe_map = MapUpdater::TryUpdateNoLock( isolate(), *map.object(), ConcurrencyMode::kConcurrent); if (maybe_map.has_value()) { diff --git a/deps/v8/src/compiler/js-heap-broker.h b/deps/v8/src/compiler/js-heap-broker.h index a841be74c3b611..1faa1004e943a6 100644 --- a/deps/v8/src/compiler/js-heap-broker.h +++ b/deps/v8/src/compiler/js-heap-broker.h @@ -40,16 +40,16 @@ class ObjectRef; std::ostream& operator<<(std::ostream& os, const ObjectRef& ref); -#define TRACE_BROKER(broker, x) \ - do { \ - if (broker->tracing_enabled() && FLAG_trace_heap_broker_verbose) \ - StdoutStream{} << broker->Trace() << x << '\n'; \ +#define TRACE_BROKER(broker, x) \ + do { \ + if (broker->tracing_enabled() && v8_flags.trace_heap_broker_verbose) \ + StdoutStream{} << broker->Trace() << x << '\n'; \ } while (false) -#define TRACE_BROKER_MEMORY(broker, x) \ - do { \ - if (broker->tracing_enabled() && FLAG_trace_heap_broker_memory) \ - StdoutStream{} << broker->Trace() << x << std::endl; \ +#define TRACE_BROKER_MEMORY(broker, x) \ + do { \ + if (broker->tracing_enabled() && v8_flags.trace_heap_broker_memory) \ + StdoutStream{} << broker->Trace() << x << std::endl; \ } while (false) #define TRACE_BROKER_MISSING(broker, x) \ @@ -101,7 +101,7 @@ class V8_EXPORT_PRIVATE JSHeapBroker { // For use only in tests, sets default values for some arguments. Avoids // churn when new flags are added. JSHeapBroker(Isolate* isolate, Zone* broker_zone) - : JSHeapBroker(isolate, broker_zone, FLAG_trace_heap_broker, + : JSHeapBroker(isolate, broker_zone, v8_flags.trace_heap_broker, CodeKind::TURBOFAN) {} ~JSHeapBroker(); diff --git a/deps/v8/src/compiler/js-inlining-heuristic.cc b/deps/v8/src/compiler/js-inlining-heuristic.cc index 4ab224b6cd0d09..e73a9c9f90b1bf 100644 --- a/deps/v8/src/compiler/js-inlining-heuristic.cc +++ b/deps/v8/src/compiler/js-inlining-heuristic.cc @@ -14,14 +14,15 @@ namespace v8 { namespace internal { namespace compiler { -#define TRACE(...) \ - do { \ - if (FLAG_trace_turbo_inlining) StdoutStream{} << __VA_ARGS__ << std::endl; \ +#define TRACE(...) \ + do { \ + if (v8_flags.trace_turbo_inlining) \ + StdoutStream{} << __VA_ARGS__ << std::endl; \ } while (false) namespace { bool IsSmall(int const size) { - return size <= FLAG_max_inlined_bytecode_size_small; + return size <= v8_flags.max_inlined_bytecode_size_small; } bool CanConsiderForInlining(JSHeapBroker* broker, @@ -173,7 +174,7 @@ Reduction JSInliningHeuristic::Reduce(Node* node) { Candidate candidate = CollectFunctions(node, kMaxCallPolymorphism); if (candidate.num_functions == 0) { return NoChange(); - } else if (candidate.num_functions > 1 && !FLAG_polymorphic_inlining) { + } else if (candidate.num_functions > 1 && !v8_flags.polymorphic_inlining) { TRACE("Not considering call site #" << node->id() << ":" << node->op()->mnemonic() << ", because polymorphic inlining is disabled"); @@ -248,7 +249,7 @@ Reduction JSInliningHeuristic::Reduce(Node* node) { // threshold, i.e. a call site that is only hit once every N // invocations of the caller. if (candidate.frequency.IsKnown() && - candidate.frequency.value() < FLAG_min_inlining_frequency) { + candidate.frequency.value() < v8_flags.min_inlining_frequency) { return NoChange(); } @@ -275,7 +276,7 @@ Reduction JSInliningHeuristic::Reduce(Node* node) { void JSInliningHeuristic::Finalize() { if (candidates_.empty()) return; // Nothing to do without candidates. - if (FLAG_trace_turbo_inlining) PrintCandidates(); + if (v8_flags.trace_turbo_inlining) PrintCandidates(); // We inline at most one candidate in every iteration of the fixpoint. // This is to ensure that we don't consume the full inlining budget @@ -293,7 +294,7 @@ void JSInliningHeuristic::Finalize() { // Make sure we have some extra budget left, so that any small functions // exposed by this function would be given a chance to inline. double size_of_candidate = - candidate.total_size * FLAG_reserve_inline_budget_scale_factor; + candidate.total_size * v8_flags.reserve_inline_budget_scale_factor; int total_size = total_inlined_bytecode_size_ + static_cast<int>(size_of_candidate); if (total_size > max_inlined_bytecode_size_cumulative_) { diff --git a/deps/v8/src/compiler/js-inlining-heuristic.h b/deps/v8/src/compiler/js-inlining-heuristic.h index 93571653526655..3a830943bdf415 100644 --- a/deps/v8/src/compiler/js-inlining-heuristic.h +++ b/deps/v8/src/compiler/js-inlining-heuristic.h @@ -30,9 +30,9 @@ class JSInliningHeuristic final : public AdvancedReducer { broker_(broker), mode_(mode), max_inlined_bytecode_size_cumulative_( - FLAG_max_inlined_bytecode_size_cumulative), + v8_flags.max_inlined_bytecode_size_cumulative), max_inlined_bytecode_size_absolute_( - FLAG_max_inlined_bytecode_size_absolute) {} + v8_flags.max_inlined_bytecode_size_absolute) {} const char* reducer_name() const override { return "JSInliningHeuristic"; } diff --git a/deps/v8/src/compiler/js-inlining.cc b/deps/v8/src/compiler/js-inlining.cc index a3aac2a032f5f8..4baabf3077565f 100644 --- a/deps/v8/src/compiler/js-inlining.cc +++ b/deps/v8/src/compiler/js-inlining.cc @@ -34,11 +34,11 @@ namespace { static const int kMaxDepthForInlining = 50; } // namespace -#define TRACE(x) \ - do { \ - if (FLAG_trace_turbo_inlining) { \ - StdoutStream() << x << "\n"; \ - } \ +#define TRACE(x) \ + do { \ + if (v8_flags.trace_turbo_inlining) { \ + StdoutStream() << x << "\n"; \ + } \ } while (false) // Provides convenience accessors for the common layout of nodes having either diff --git a/deps/v8/src/compiler/js-intrinsic-lowering.cc b/deps/v8/src/compiler/js-intrinsic-lowering.cc index 831697d64574dc..7ad3f5c78d4a16 100644 --- a/deps/v8/src/compiler/js-intrinsic-lowering.cc +++ b/deps/v8/src/compiler/js-intrinsic-lowering.cc @@ -70,8 +70,8 @@ Reduction JSIntrinsicLowering::Reduce(Node* node) { return ReduceAsyncGeneratorReject(node); case Runtime::kInlineAsyncGeneratorResolve: return ReduceAsyncGeneratorResolve(node); - case Runtime::kInlineAsyncGeneratorYield: - return ReduceAsyncGeneratorYield(node); + case Runtime::kInlineAsyncGeneratorYieldWithAwait: + return ReduceAsyncGeneratorYieldWithAwait(node); case Runtime::kInlineGeneratorGetResumeMode: return ReduceGeneratorGetResumeMode(node); case Runtime::kInlineIncBlockCounter: @@ -216,9 +216,11 @@ Reduction JSIntrinsicLowering::ReduceAsyncGeneratorResolve(Node* node) { 0); } -Reduction JSIntrinsicLowering::ReduceAsyncGeneratorYield(Node* node) { +Reduction JSIntrinsicLowering::ReduceAsyncGeneratorYieldWithAwait(Node* node) { return Change( - node, Builtins::CallableFor(isolate(), Builtin::kAsyncGeneratorYield), 0); + node, + Builtins::CallableFor(isolate(), Builtin::kAsyncGeneratorYieldWithAwait), + 0); } Reduction JSIntrinsicLowering::ReduceGeneratorGetResumeMode(Node* node) { @@ -277,7 +279,7 @@ Reduction JSIntrinsicLowering::ReduceIsJSReceiver(Node* node) { } Reduction JSIntrinsicLowering::ReduceTurbofanStaticAssert(Node* node) { - if (FLAG_always_turbofan) { + if (v8_flags.always_turbofan) { // Ignore static asserts, as we most likely won't have enough information RelaxEffectsAndControls(node); } else { diff --git a/deps/v8/src/compiler/js-intrinsic-lowering.h b/deps/v8/src/compiler/js-intrinsic-lowering.h index 00188243c15a5e..0affbb184dde75 100644 --- a/deps/v8/src/compiler/js-intrinsic-lowering.h +++ b/deps/v8/src/compiler/js-intrinsic-lowering.h @@ -53,7 +53,7 @@ class V8_EXPORT_PRIVATE JSIntrinsicLowering final Reduction ReduceAsyncGeneratorAwaitUncaught(Node* node); Reduction ReduceAsyncGeneratorReject(Node* node); Reduction ReduceAsyncGeneratorResolve(Node* node); - Reduction ReduceAsyncGeneratorYield(Node* node); + Reduction ReduceAsyncGeneratorYieldWithAwait(Node* node); Reduction ReduceGeneratorGetResumeMode(Node* node); Reduction ReduceIsInstanceType(Node* node, InstanceType instance_type); Reduction ReduceIsJSReceiver(Node* node); diff --git a/deps/v8/src/compiler/js-native-context-specialization.cc b/deps/v8/src/compiler/js-native-context-specialization.cc index 39302152ed44b2..31b6d2dd0944a4 100644 --- a/deps/v8/src/compiler/js-native-context-specialization.cc +++ b/deps/v8/src/compiler/js-native-context-specialization.cc @@ -20,6 +20,7 @@ #include "src/compiler/linkage.h" #include "src/compiler/map-inference.h" #include "src/compiler/node-matchers.h" +#include "src/compiler/node-properties.h" #include "src/compiler/property-access-builder.h" #include "src/compiler/simplified-operator.h" #include "src/compiler/type-cache.h" @@ -65,7 +66,8 @@ JSNativeContextSpecialization::JSNativeContextSpecialization( dependencies_(dependencies), zone_(zone), shared_zone_(shared_zone), - type_cache_(TypeCache::Get()) {} + type_cache_(TypeCache::Get()), + created_strings_(zone) {} Reduction JSNativeContextSpecialization::Reduce(Node* node) { switch (node->opcode()) { @@ -79,6 +81,8 @@ Reduction JSNativeContextSpecialization::Reduce(Node* node) { return ReduceJSAsyncFunctionResolve(node); case IrOpcode::kJSGetSuperConstructor: return ReduceJSGetSuperConstructor(node); + case IrOpcode::kJSFindNonDefaultConstructorOrConstruct: + return ReduceJSFindNonDefaultConstructorOrConstruct(node); case IrOpcode::kJSInstanceOf: return ReduceJSInstanceOf(node); case IrOpcode::kJSHasInPrototypeChain: @@ -135,7 +139,6 @@ base::Optional<size_t> JSNativeContextSpecialization::GetMaxStringLength( HeapObjectMatcher matcher(node); if (matcher.HasResolvedValue() && matcher.Ref(broker).IsString()) { StringRef input = matcher.Ref(broker).AsString(); - if (!input.IsContentAccessible()) return base::nullopt; return input.length(); } @@ -196,6 +199,13 @@ Handle<String> JSNativeContextSpecialization::CreateStringConstant(Node* node) { ->local_isolate_or_isolate() ->factory() ->NewNumber<AllocationType::kOld>(number_matcher.ResolvedValue()); + // Note that we do not store the result of NumberToString in + // {created_strings_}, because the latter is used to know if strings are + // safe to be used in the background, but we always have as additional + // information the node from which the string was created ({node} is that + // case), and if this node is a kHeapNumber, then we know that we must have + // created the string, and that there it is safe to read. So, we don't need + // {created_strings_} in that case. return broker()->local_isolate_or_isolate()->factory()->NumberToString( num_obj); } else { @@ -213,6 +223,15 @@ bool IsStringConstant(JSHeapBroker* broker, Node* node) { HeapObjectMatcher matcher(node); return matcher.HasResolvedValue() && matcher.Ref(broker).IsString(); } + +bool IsStringWithNonAccessibleContent(JSHeapBroker* broker, Node* node) { + HeapObjectMatcher matcher(node); + if (matcher.HasResolvedValue() && matcher.Ref(broker).IsString()) { + StringRef input = matcher.Ref(broker).AsString(); + return !input.IsContentAccessible(); + } + return false; +} } // namespace Reduction JSNativeContextSpecialization::ReduceJSAsyncFunctionEnter( @@ -321,16 +340,14 @@ Reduction JSNativeContextSpecialization::ReduceJSAsyncFunctionResolve( return Replace(promise); } -namespace { - // Concatenates {left} and {right}. The result is fairly similar to creating a // new ConsString with {left} and {right} and then flattening it, which we don't // do because String::Flatten does not support background threads. Rather than // implementing a full String::Flatten for background threads, we prefered to // implement this Concatenate function, which, unlike String::Flatten, doesn't // need to replace ConsStrings by ThinStrings. -Handle<String> Concatenate(Handle<String> left, Handle<String> right, - JSHeapBroker* broker) { +Handle<String> JSNativeContextSpecialization::Concatenate( + Handle<String> left, Handle<String> right) { if (left->length() == 0) return right; if (right->length() == 0) return left; @@ -357,7 +374,8 @@ Handle<String> Concatenate(Handle<String> left, Handle<String> right, // generational write-barrier supports background threads. if (!LocalHeap::Current() || (!ObjectInYoungGeneration(*left) && !ObjectInYoungGeneration(*right))) { - return broker->local_isolate_or_isolate() + return broker() + ->local_isolate_or_isolate() ->factory() ->NewConsString(left, right, AllocationType::kOld) .ToHandleChecked(); @@ -367,19 +385,24 @@ Handle<String> Concatenate(Handle<String> left, Handle<String> right, // If one of the string is not in readonly space, then we need a // SharedStringAccessGuardIfNeeded before accessing its content. bool require_guard = SharedStringAccessGuardIfNeeded::IsNeeded( - *left, broker->local_isolate_or_isolate()) || + *left, broker()->local_isolate_or_isolate()) || SharedStringAccessGuardIfNeeded::IsNeeded( - *right, broker->local_isolate_or_isolate()); + *right, broker()->local_isolate_or_isolate()); SharedStringAccessGuardIfNeeded access_guard( - require_guard ? broker->local_isolate_or_isolate() : nullptr); + require_guard ? broker()->local_isolate_or_isolate() : nullptr); if (left->IsOneByteRepresentation() && right->IsOneByteRepresentation()) { // {left} and {right} are 1-byte ==> the result will be 1-byte. - Handle<SeqOneByteString> flat = - broker->local_isolate_or_isolate() + // Note that we need a canonical handle, because we insert in + // {created_strings_} the handle's address, which is kinda meaningless if + // the handle isn't canonical. + Handle<SeqOneByteString> flat = broker()->CanonicalPersistentHandle( + broker() + ->local_isolate_or_isolate() ->factory() ->NewRawOneByteString(length, AllocationType::kOld) - .ToHandleChecked(); + .ToHandleChecked()); + created_strings_.insert(flat); DisallowGarbageCollection no_gc; String::WriteToFlat(*left, flat->GetChars(no_gc, access_guard), 0, left->length(), GetPtrComprCageBase(*left), @@ -391,11 +414,13 @@ Handle<String> Concatenate(Handle<String> left, Handle<String> right, } else { // One (or both) of {left} and {right} is 2-byte ==> the result will be // 2-byte. - Handle<SeqTwoByteString> flat = - broker->local_isolate_or_isolate() + Handle<SeqTwoByteString> flat = broker()->CanonicalPersistentHandle( + broker() + ->local_isolate_or_isolate() ->factory() ->NewRawTwoByteString(length, AllocationType::kOld) - .ToHandleChecked(); + .ToHandleChecked()); + created_strings_.insert(flat); DisallowGarbageCollection no_gc; String::WriteToFlat(*left, flat->GetChars(no_gc, access_guard), 0, left->length(), GetPtrComprCageBase(*left), @@ -407,7 +432,22 @@ Handle<String> Concatenate(Handle<String> left, Handle<String> right, } } -} // namespace +bool JSNativeContextSpecialization::StringCanSafelyBeRead(Node* const node, + Handle<String> str) { + DCHECK(node->opcode() == IrOpcode::kHeapConstant || + node->opcode() == IrOpcode::kNumberConstant); + if (broker()->IsMainThread()) { + // All strings are safe to be read on the main thread. + return true; + } + if (node->opcode() == IrOpcode::kNumberConstant) { + // If {node} is a number constant, then {str} is the stringification of this + // number which we must have created ourselves. + return true; + } + return !IsStringWithNonAccessibleContent(broker(), node) || + created_strings_.find(str) != created_strings_.end(); +} Reduction JSNativeContextSpecialization::ReduceJSAdd(Node* node) { // TODO(turbofan): This has to run together with the inlining and @@ -427,10 +467,45 @@ Reduction JSNativeContextSpecialization::ReduceJSAdd(Node* node) { // addition won't throw due to too long result. if (*lhs_len + *rhs_len <= String::kMaxLength && (IsStringConstant(broker(), lhs) || IsStringConstant(broker(), rhs))) { - Handle<String> left = CreateStringConstant(lhs); - Handle<String> right = CreateStringConstant(rhs); + // We need canonical handles for {left} and {right}, in order to be able to + // search {created_strings_} if needed. + Handle<String> left = + broker()->CanonicalPersistentHandle(CreateStringConstant(lhs)); + Handle<String> right = + broker()->CanonicalPersistentHandle(CreateStringConstant(rhs)); + + if (!(StringCanSafelyBeRead(lhs, left) && + StringCanSafelyBeRead(rhs, right))) { + // One of {lhs} or {rhs} is not safe to be read in the background. + + if (left->length() + right->length() > ConsString::kMinLength && + (!LocalHeap::Current() || (!ObjectInYoungGeneration(*left) && + !ObjectInYoungGeneration(*right)))) { + // We can create a ConsString with {left} and {right}, without needing + // to read their content (and this ConsString will not introduce + // old-to-new pointers from the background). + Handle<String> concatenated = + broker() + ->local_isolate_or_isolate() + ->factory() + ->NewConsString(left, right, AllocationType::kOld) + .ToHandleChecked(); + Node* reduced = graph()->NewNode(common()->HeapConstant( + broker()->CanonicalPersistentHandle(concatenated))); + ReplaceWithValue(node, reduced); + return Replace(reduced); + } else { + // Concatenating those strings would not produce a ConsString but rather + // a flat string (because the result is small). And, since the strings + // are not safe to be read in the background, this wouldn't be safe. + // Or, one of the string is in the young generation, and since the + // generational barrier doesn't support background threads, we cannot + // create the ConsString. + return NoChange(); + } + } - Handle<String> concatenated = Concatenate(left, right, broker()); + Handle<String> concatenated = Concatenate(left, right); Node* reduced = graph()->NewNode(common()->HeapConstant( broker()->CanonicalPersistentHandle(concatenated))); @@ -468,6 +543,119 @@ Reduction JSNativeContextSpecialization::ReduceJSGetSuperConstructor( return NoChange(); } +Reduction +JSNativeContextSpecialization::ReduceJSFindNonDefaultConstructorOrConstruct( + Node* node) { + JSFindNonDefaultConstructorOrConstructNode n(node); + Node* this_function = n.this_function(); + Node* new_target = n.new_target(); + Node* effect = n.effect(); + Control control = n.control(); + + // If the JSFindNonDefaultConstructorOrConstruct operation is inside a try + // catch, wiring up the graph is complex (reason: if + // JSFindNonDefaultConstructorOrConstruct reduces to a constant which is + // something else than a default base ctor, it cannot throw an exception, and + // the try-catch structure has to be rewired). As this use case is rare, give + // up optimizing it here. + if (NodeProperties::IsExceptionalCall(node)) { + return NoChange(); + } + + // TODO(v8:13091): Don't produce incomplete stack traces when debug is active. + // We already deopt when a breakpoint is set. But it would be even nicer to + // avoid producting incomplete stack traces when when debug is active, even if + // there are no breakpoints - then a user inspecting stack traces via Dev + // Tools would always see the full stack trace. + + // Check if the input is a known JSFunction. + HeapObjectMatcher m(this_function); + if (!m.HasResolvedValue() || !m.Ref(broker()).IsJSFunction()) { + return NoChange(); + } + + JSFunctionRef this_function_ref = m.Ref(broker()).AsJSFunction(); + MapRef function_map = this_function_ref.map(); + HeapObjectRef current = function_map.prototype(); + + Node* return_value; + Node* ctor_or_instance; + + // Walk the class inheritance tree until we find a ctor which is not a default + // derived ctor. + while (true) { + if (!current.IsJSFunction()) { + return NoChange(); + } + JSFunctionRef current_function = current.AsJSFunction(); + + // If there are class fields, bail out. TODO(v8:13091): Handle them here. + if (current_function.shared().requires_instance_members_initializer()) { + return NoChange(); + } + + // If there are private methods, bail out. TODO(v8:13091): Handle them here. + if (current_function.context().scope_info().ClassScopeHasPrivateBrand()) { + return NoChange(); + } + + FunctionKind kind = current_function.shared().kind(); + + if (kind != FunctionKind::kDefaultDerivedConstructor) { + // The hierarchy walk will end here; this is the last change to bail out + // before creating new nodes. + if (!dependencies()->DependOnArrayIteratorProtector()) { + return NoChange(); + } + + if (kind == FunctionKind::kDefaultBaseConstructor) { + return_value = jsgraph()->BooleanConstant(true); + + // Generate a builtin call for creating the instance. + Node* constructor = jsgraph()->Constant(current_function); + + effect = ctor_or_instance = graph()->NewNode( + jsgraph()->javascript()->Create(), constructor, new_target, + n.context(), n.frame_state(), effect, control); + } else { + return_value = jsgraph()->BooleanConstant(false); + ctor_or_instance = jsgraph()->Constant(current_function); + } + break; + } + + // Keep walking up the class tree. + current = current_function.map().prototype(); + } + + dependencies()->DependOnStablePrototypeChain(function_map, + WhereToStart::kStartAtReceiver); + + // Update the uses of {node}. + for (Edge edge : node->use_edges()) { + Node* const user = edge.from(); + if (NodeProperties::IsEffectEdge(edge)) { + edge.UpdateTo(effect); + } else if (NodeProperties::IsControlEdge(edge)) { + edge.UpdateTo(control); + } else { + DCHECK(NodeProperties::IsValueEdge(edge)); + switch (ProjectionIndexOf(user->op())) { + case 0: + Replace(user, return_value); + break; + case 1: + Replace(user, ctor_or_instance); + break; + default: + UNREACHABLE(); + } + } + } + node->Kill(); + return Replace(return_value); +} + Reduction JSNativeContextSpecialization::ReduceJSInstanceOf(Node* node) { JSInstanceOfNode n(node); FeedbackParameter const& p = n.Parameters(); @@ -1164,7 +1352,7 @@ Reduction JSNativeContextSpecialization::ReduceMegaDOMPropertyAccess( simplified()->LoadField(AccessBuilder::ForMapInstanceType()), receiver_map, effect, control); - if (FLAG_embedder_instance_types && range_start != 0) { + if (v8_flags.embedder_instance_types && range_start != 0) { // Embedder instance ID is set, doing a simple range check. Node* diff_to_start = graph()->NewNode(simplified()->NumberSubtract(), receiver_instance_type, @@ -1251,7 +1439,7 @@ Reduction JSNativeContextSpecialization::ReduceNamedAccess( // lookup_start_object = the object where we start looking for the property. Node* lookup_start_object; if (node->opcode() == IrOpcode::kJSLoadNamedFromSuper) { - DCHECK(FLAG_super_ic); + DCHECK(v8_flags.super_ic); JSLoadNamedFromSuperNode n(node); // Lookup start object is the __proto__ of the home object. lookup_start_object = effect = @@ -2438,10 +2626,11 @@ Node* JSNativeContextSpecialization::InlinePropertyGetterCall( if (receiver != lookup_start_object) { return nullptr; } - Node* holder = access_info.holder().has_value() - ? jsgraph()->Constant(access_info.holder().value()) - : receiver; - value = InlineApiCall(receiver, holder, frame_state, nullptr, effect, + Node* api_holder = + access_info.api_holder().has_value() + ? jsgraph()->Constant(access_info.api_holder().value()) + : receiver; + value = InlineApiCall(receiver, api_holder, frame_state, nullptr, effect, control, constant.AsFunctionTemplateInfo()); } // Remember to rewire the IfException edge if this is inside a try-block. @@ -2472,10 +2661,11 @@ void JSNativeContextSpecialization::InlinePropertySetterCall( target, receiver, value, feedback, context, frame_state, *effect, *control); } else { - Node* holder = access_info.holder().has_value() - ? jsgraph()->Constant(access_info.holder().value()) - : receiver; - InlineApiCall(receiver, holder, frame_state, value, effect, control, + Node* api_holder = + access_info.api_holder().has_value() + ? jsgraph()->Constant(access_info.api_holder().value()) + : receiver; + InlineApiCall(receiver, api_holder, frame_state, value, effect, control, constant.AsFunctionTemplateInfo()); } // Remember to rewire the IfException edge if this is inside a try-block. @@ -2490,8 +2680,9 @@ void JSNativeContextSpecialization::InlinePropertySetterCall( } Node* JSNativeContextSpecialization::InlineApiCall( - Node* receiver, Node* holder, Node* frame_state, Node* value, Node** effect, - Node** control, FunctionTemplateInfoRef const& function_template_info) { + Node* receiver, Node* api_holder, Node* frame_state, Node* value, + Node** effect, Node** control, + FunctionTemplateInfoRef const& function_template_info) { if (!function_template_info.call_code().has_value()) { TRACE_BROKER_MISSING(broker(), "call code for function template info " << function_template_info); @@ -2520,9 +2711,8 @@ Node* JSNativeContextSpecialization::InlineApiCall( // Add CallApiCallbackStub's register argument as well. Node* context = jsgraph()->Constant(native_context()); - Node* inputs[11] = { - code, function_reference, jsgraph()->Constant(argc), data, holder, - receiver}; + Node* inputs[11] = {code, function_reference, jsgraph()->Constant(argc), + data, api_holder, receiver}; int index = 6 + argc; inputs[index++] = context; inputs[index++] = frame_state; @@ -3165,7 +3355,7 @@ JSNativeContextSpecialization::BuildElementAccess( // Don't try to store to a copy-on-write backing store (unless supported by // the store mode). - if (keyed_mode.access_mode() == AccessMode::kStore && + if (IsAnyStore(keyed_mode.access_mode()) && IsSmiOrObjectElementsKind(elements_kind) && !IsCOWHandlingStoreMode(keyed_mode.store_mode())) { effect = graph()->NewNode( diff --git a/deps/v8/src/compiler/js-native-context-specialization.h b/deps/v8/src/compiler/js-native-context-specialization.h index 4996cec82eb1a0..4730f9850d094c 100644 --- a/deps/v8/src/compiler/js-native-context-specialization.h +++ b/deps/v8/src/compiler/js-native-context-specialization.h @@ -10,6 +10,7 @@ #include "src/compiler/graph-reducer.h" #include "src/compiler/js-heap-broker.h" #include "src/deoptimizer/deoptimize-reason.h" +#include "src/zone/zone-containers.h" namespace v8 { namespace internal { @@ -74,6 +75,7 @@ class V8_EXPORT_PRIVATE JSNativeContextSpecialization final Reduction ReduceJSAsyncFunctionReject(Node* node); Reduction ReduceJSAsyncFunctionResolve(Node* node); Reduction ReduceJSGetSuperConstructor(Node* node); + Reduction ReduceJSFindNonDefaultConstructorOrConstruct(Node* node); Reduction ReduceJSInstanceOf(Node* node); Reduction ReduceJSHasInPrototypeChain(Node* node); Reduction ReduceJSOrdinaryHasInstance(Node* node); @@ -180,7 +182,7 @@ class V8_EXPORT_PRIVATE JSNativeContextSpecialization final Node** control, ZoneVector<Node*>* if_exceptions, PropertyAccessInfo const& access_info); - Node* InlineApiCall(Node* receiver, Node* holder, Node* frame_state, + Node* InlineApiCall(Node* receiver, Node* api_holder, Node* frame_state, Node* value, Node** effect, Node** control, FunctionTemplateInfoRef const& function_template_info); @@ -205,6 +207,17 @@ class V8_EXPORT_PRIVATE JSNativeContextSpecialization final Node* BuildCheckEqualsName(NameRef const& name, Node* value, Node* effect, Node* control); + // Concatenates {left} and {right}. + Handle<String> Concatenate(Handle<String> left, Handle<String> right); + + // Returns true if {str} can safely be read: + // - if we are on the main thread, then any string can safely be read + // - in the background, we can only read some string shapes, except if we + // created the string ourselves. + // {node} is the node from which we got {str}, but which is still taken as + // parameter to simplify the checks. + bool StringCanSafelyBeRead(Node* const node, Handle<String> str); + // Checks if we can turn the hole into undefined when loading an element // from an object with one of the {receiver_maps}; sets up appropriate // code dependencies and might use the array protector cell. @@ -264,6 +277,9 @@ class V8_EXPORT_PRIVATE JSNativeContextSpecialization final Zone* const zone_; Zone* const shared_zone_; TypeCache const* type_cache_; + ZoneUnorderedSet<Handle<String>, Handle<String>::hash, + Handle<String>::equal_to> + created_strings_; }; DEFINE_OPERATORS_FOR_FLAGS(JSNativeContextSpecialization::Flags) diff --git a/deps/v8/src/compiler/js-operator.cc b/deps/v8/src/compiler/js-operator.cc index 3945a8730f1d2c..8af8e7d32fb68f 100644 --- a/deps/v8/src/compiler/js-operator.cc +++ b/deps/v8/src/compiler/js-operator.cc @@ -770,6 +770,7 @@ Type JSWasmCallNode::TypeForWasmReturnType(const wasm::ValueType& type) { V(RejectPromise, Operator::kNoDeopt | Operator::kNoThrow, 3, 1) \ V(ResolvePromise, Operator::kNoDeopt | Operator::kNoThrow, 2, 1) \ V(GetSuperConstructor, Operator::kNoWrite | Operator::kNoThrow, 1, 1) \ + V(FindNonDefaultConstructorOrConstruct, Operator::kNoProperties, 2, 2) \ V(ParseInt, Operator::kNoProperties, 2, 1) \ V(RegExpTest, Operator::kNoProperties, 2, 1) diff --git a/deps/v8/src/compiler/js-operator.h b/deps/v8/src/compiler/js-operator.h index 12408aea6e0299..623f166bc20d9a 100644 --- a/deps/v8/src/compiler/js-operator.h +++ b/deps/v8/src/compiler/js-operator.h @@ -1040,6 +1040,8 @@ class V8_EXPORT_PRIVATE JSOperatorBuilder final const Operator* GetSuperConstructor(); + const Operator* FindNonDefaultConstructorOrConstruct(); + const Operator* CreateGeneratorObject(); const Operator* LoadGlobal(const NameRef& name, @@ -1757,6 +1759,22 @@ class JSForInNextNode final : public JSNodeWrapperBase { #undef INPUTS }; +class JSFindNonDefaultConstructorOrConstructNode final + : public JSNodeWrapperBase { + public: + explicit constexpr JSFindNonDefaultConstructorOrConstructNode(Node* node) + : JSNodeWrapperBase(node) { + DCHECK_EQ(IrOpcode::kJSFindNonDefaultConstructorOrConstruct, + node->opcode()); + } + +#define INPUTS(V) \ + V(ThisFunction, this_function, 0, Object) \ + V(NewTarget, new_target, 1, Object) + INPUTS(DEFINE_INPUT_ACCESSORS) +#undef INPUTS +}; + #undef DEFINE_INPUT_ACCESSORS } // namespace compiler diff --git a/deps/v8/src/compiler/js-type-hint-lowering.cc b/deps/v8/src/compiler/js-type-hint-lowering.cc index 0f450106802b3c..54b3d056b5cd99 100644 --- a/deps/v8/src/compiler/js-type-hint-lowering.cc +++ b/deps/v8/src/compiler/js-type-hint-lowering.cc @@ -36,6 +36,7 @@ bool BinaryOperationHintToNumberOperationHint( case BinaryOperationHint::kNone: case BinaryOperationHint::kString: case BinaryOperationHint::kBigInt: + case BinaryOperationHint::kBigInt64: break; } return false; @@ -52,6 +53,9 @@ bool BinaryOperationHintToBigIntOperationHint( case BinaryOperationHint::kNone: case BinaryOperationHint::kString: return false; + case BinaryOperationHint::kBigInt64: + *bigint_hint = BigIntOperationHint::kBigInt64; + return true; case BinaryOperationHint::kBigInt: *bigint_hint = BigIntOperationHint::kBigInt; return true; diff --git a/deps/v8/src/compiler/linkage.h b/deps/v8/src/compiler/linkage.h index e65efa8db68b86..3b41f3bc199a68 100644 --- a/deps/v8/src/compiler/linkage.h +++ b/deps/v8/src/compiler/linkage.h @@ -108,7 +108,7 @@ class LinkageLocation { } static LinkageLocation ForSavedCallerConstantPool() { - DCHECK(V8_EMBEDDED_CONSTANT_POOL); + DCHECK(V8_EMBEDDED_CONSTANT_POOL_BOOL); return ForCalleeFrameSlot((StandardFrameConstants::kCallerPCOffset - StandardFrameConstants::kConstantPoolOffset) / kSystemPointerSize, diff --git a/deps/v8/src/compiler/load-elimination.cc b/deps/v8/src/compiler/load-elimination.cc index d526dc1cae736e..cc56abe4930470 100644 --- a/deps/v8/src/compiler/load-elimination.cc +++ b/deps/v8/src/compiler/load-elimination.cc @@ -72,7 +72,7 @@ bool MustAlias(Node* a, Node* b) { } // namespace Reduction LoadElimination::Reduce(Node* node) { - if (FLAG_trace_turbo_load_elimination) { + if (v8_flags.trace_turbo_load_elimination) { if (node->op()->EffectInputCount() > 0) { PrintF(" visit #%d:%s", node->id(), node->op()->mnemonic()); if (node->op()->ValueInputCount() > 0) { diff --git a/deps/v8/src/compiler/loop-analysis.cc b/deps/v8/src/compiler/loop-analysis.cc index a9682e955d0c44..715e6b18a34eb4 100644 --- a/deps/v8/src/compiler/loop-analysis.cc +++ b/deps/v8/src/compiler/loop-analysis.cc @@ -5,6 +5,7 @@ #include "src/compiler/loop-analysis.h" #include "src/codegen/tick-counter.h" +#include "src/compiler/all-nodes.h" #include "src/compiler/common-operator.h" #include "src/compiler/graph.h" #include "src/compiler/node-marker.h" @@ -542,7 +543,7 @@ LoopTree* LoopFinder::BuildLoopTree(Graph* graph, TickCounter* tick_counter, graph->zone()->New<LoopTree>(graph->NodeCount(), graph->zone()); LoopFinderImpl finder(graph, loop_tree, tick_counter, zone); finder.Run(); - if (FLAG_trace_turbo_loop) { + if (v8_flags.trace_turbo_loop) { finder.Print(); } return loop_tree; @@ -551,7 +552,8 @@ LoopTree* LoopFinder::BuildLoopTree(Graph* graph, TickCounter* tick_counter, #if V8_ENABLE_WEBASSEMBLY // static ZoneUnorderedSet<Node*>* LoopFinder::FindSmallInnermostLoopFromHeader( - Node* loop_header, Zone* zone, size_t max_size, bool calls_are_large) { + Node* loop_header, AllNodes& all_nodes, Zone* zone, size_t max_size, + bool calls_are_large) { auto* visited = zone->New<ZoneUnorderedSet<Node*>>(zone); std::vector<Node*> queue; @@ -655,6 +657,8 @@ ZoneUnorderedSet<Node*>* LoopFinder::FindSmallInnermostLoopFromHeader( // The loop header is allowed to point outside the loop. if (node == loop_header) continue; + if (!all_nodes.IsLive(node)) continue; + for (Edge edge : node->input_edges()) { Node* input = edge.to(); if (NodeProperties::IsControlEdge(edge) && visited->count(input) == 0 && @@ -694,7 +698,7 @@ bool LoopFinder::HasMarkedExits(LoopTree* loop_tree, unmarked_exit = (use->opcode() != IrOpcode::kTerminate); } if (unmarked_exit) { - if (FLAG_trace_turbo_loop) { + if (v8_flags.trace_turbo_loop) { PrintF( "Cannot peel loop %i. Loop exit without explicit mark: Node %i " "(%s) is inside loop, but its use %i (%s) is outside.\n", diff --git a/deps/v8/src/compiler/loop-analysis.h b/deps/v8/src/compiler/loop-analysis.h index e7c09da105dcac..d3c53b850be592 100644 --- a/deps/v8/src/compiler/loop-analysis.h +++ b/deps/v8/src/compiler/loop-analysis.h @@ -26,6 +26,7 @@ namespace compiler { static const int kAssumedLoopEntryIndex = 0; // assume loops are entered here. class LoopFinderImpl; +class AllNodes; using NodeRange = base::iterator_range<Node**>; @@ -190,7 +191,8 @@ class V8_EXPORT_PRIVATE LoopFinder { // calls to a set of wasm builtins, // 3) a nested loop is found in the loop. static ZoneUnorderedSet<Node*>* FindSmallInnermostLoopFromHeader( - Node* loop_header, Zone* zone, size_t max_size, bool calls_are_large); + Node* loop_header, AllNodes& all_nodes, Zone* zone, size_t max_size, + bool calls_are_large); #endif }; diff --git a/deps/v8/src/compiler/loop-peeling.cc b/deps/v8/src/compiler/loop-peeling.cc index cfc149f63991ac..ee46d5e49474c4 100644 --- a/deps/v8/src/compiler/loop-peeling.cc +++ b/deps/v8/src/compiler/loop-peeling.cc @@ -225,7 +225,7 @@ void LoopPeeler::PeelInnerLoops(LoopTree::Loop* loop) { } // Only peel small-enough loops. if (loop->TotalSize() > LoopPeeler::kMaxPeeledNodes) return; - if (FLAG_trace_turbo_loop) { + if (v8_flags.trace_turbo_loop) { PrintF("Peeling loop with header: "); for (Node* node : loop_tree_->HeaderNodes(loop)) { PrintF("%i ", node->id()); diff --git a/deps/v8/src/compiler/loop-variable-optimizer.cc b/deps/v8/src/compiler/loop-variable-optimizer.cc index 7f16e243c5d7bd..8d1caa22feb394 100644 --- a/deps/v8/src/compiler/loop-variable-optimizer.cc +++ b/deps/v8/src/compiler/loop-variable-optimizer.cc @@ -17,9 +17,9 @@ namespace internal { namespace compiler { // Macro for outputting trace information from representation inference. -#define TRACE(...) \ - do { \ - if (FLAG_trace_turbo_loop) PrintF(__VA_ARGS__); \ +#define TRACE(...) \ + do { \ + if (v8_flags.trace_turbo_loop) PrintF(__VA_ARGS__); \ } while (false) LoopVariableOptimizer::LoopVariableOptimizer(Graph* graph, @@ -76,7 +76,7 @@ void LoopVariableOptimizer::Run() { void InductionVariable::AddUpperBound(Node* bound, InductionVariable::ConstraintKind kind) { - if (FLAG_trace_turbo_loop) { + if (v8_flags.trace_turbo_loop) { StdoutStream{} << "New upper bound for " << phi()->id() << " (loop " << NodeProperties::GetControlInput(phi())->id() << "): " << *bound << std::endl; @@ -86,7 +86,7 @@ void InductionVariable::AddUpperBound(Node* bound, void InductionVariable::AddLowerBound(Node* bound, InductionVariable::ConstraintKind kind) { - if (FLAG_trace_turbo_loop) { + if (v8_flags.trace_turbo_loop) { StdoutStream{} << "New lower bound for " << phi()->id() << " (loop " << NodeProperties::GetControlInput(phi())->id() << "): " << *bound; diff --git a/deps/v8/src/compiler/machine-graph-verifier.cc b/deps/v8/src/compiler/machine-graph-verifier.cc index 664f2748404978..64f6a5a0c0c20a 100644 --- a/deps/v8/src/compiler/machine-graph-verifier.cc +++ b/deps/v8/src/compiler/machine-graph-verifier.cc @@ -51,6 +51,7 @@ class MachineRepresentationInferrer { : MachineRepresentation::kBit; case IrOpcode::kInt64AddWithOverflow: case IrOpcode::kInt64SubWithOverflow: + case IrOpcode::kInt64MulWithOverflow: CHECK_LE(index, static_cast<size_t>(1)); return index == 0 ? MachineRepresentation::kWord64 : MachineRepresentation::kBit; diff --git a/deps/v8/src/compiler/machine-operator-reducer.cc b/deps/v8/src/compiler/machine-operator-reducer.cc index 0d1bc47fc3483e..9436303aded017 100644 --- a/deps/v8/src/compiler/machine-operator-reducer.cc +++ b/deps/v8/src/compiler/machine-operator-reducer.cc @@ -1489,15 +1489,15 @@ Reduction MachineOperatorReducer::ReduceWord64Comparisons(Node* node) { m.left().op() == machine()->Word64SarShiftOutZeros() && m.left().node()->UseCount() == 1) { Int64BinopMatcher mleft(m.left().node()); - int64_t right = m.right().ResolvedValue(); + uint64_t right = m.right().ResolvedValue(); if (mleft.right().HasResolvedValue()) { auto shift = mleft.right().ResolvedValue(); if (CanRevertLeftShiftWithRightShift<int64_t>(right, shift)) { sign_extended = mleft.left().IsChangeInt32ToInt64(); - int64_t value = right << shift; + uint64_t value = right << shift; // Reducing to 32-bit comparison when possible. if ((sign_extended || mleft.left().IsChangeUint32ToUint64()) && - CanTruncate(value)) { + CanTruncate(static_cast<int64_t>(value))) { NodeProperties::ChangeOp( node, Map64To32Comparison(node->op(), sign_extended)); node->ReplaceInput(0, mleft.left().node()->InputAt(0)); @@ -1516,16 +1516,16 @@ Reduction MachineOperatorReducer::ReduceWord64Comparisons(Node* node) { if (m.left().HasResolvedValue() && m.right().op() == machine()->Word64SarShiftOutZeros() && m.right().node()->UseCount() == 1) { - int64_t left = m.left().ResolvedValue(); + uint64_t left = m.left().ResolvedValue(); Int64BinopMatcher mright(m.right().node()); if (mright.right().HasResolvedValue()) { auto shift = mright.right().ResolvedValue(); if (CanRevertLeftShiftWithRightShift<int64_t>(left, shift)) { sign_extended = mright.left().IsChangeInt32ToInt64(); - int64_t value = left << shift; + uint64_t value = left << shift; // Reducing to 32-bit comparison when possible. if ((sign_extended || mright.left().IsChangeUint32ToUint64()) && - CanTruncate(value)) { + CanTruncate(static_cast<int64_t>(value))) { NodeProperties::ChangeOp( node, Map64To32Comparison(node->op(), sign_extended)); node->ReplaceInput(0, Int32Constant(static_cast<int32_t>(value))); diff --git a/deps/v8/src/compiler/machine-operator.cc b/deps/v8/src/compiler/machine-operator.cc index 859de257a350d0..5a7ccfe3dcf614 100644 --- a/deps/v8/src/compiler/machine-operator.cc +++ b/deps/v8/src/compiler/machine-operator.cc @@ -305,25 +305,27 @@ std::ostream& operator<<(std::ostream& os, TruncateKind kind) { // The format is: // V(Name, properties, value_input_count, control_input_count, output_count) -#define PURE_BINARY_OP_LIST_64(V) \ - V(Word64And, Operator::kAssociative | Operator::kCommutative, 2, 0, 1) \ - V(Word64Or, Operator::kAssociative | Operator::kCommutative, 2, 0, 1) \ - V(Word64Xor, Operator::kAssociative | Operator::kCommutative, 2, 0, 1) \ - V(Word64Shl, Operator::kNoProperties, 2, 0, 1) \ - V(Word64Shr, Operator::kNoProperties, 2, 0, 1) \ - V(Word64Ror, Operator::kNoProperties, 2, 0, 1) \ - V(Word64RorLowerable, Operator::kNoProperties, 2, 1, 1) \ - V(Word64Equal, Operator::kCommutative, 2, 0, 1) \ - V(Int64Add, Operator::kAssociative | Operator::kCommutative, 2, 0, 1) \ - V(Int64Sub, Operator::kNoProperties, 2, 0, 1) \ - V(Int64Mul, Operator::kAssociative | Operator::kCommutative, 2, 0, 1) \ - V(Int64Div, Operator::kNoProperties, 2, 1, 1) \ - V(Int64Mod, Operator::kNoProperties, 2, 1, 1) \ - V(Int64LessThan, Operator::kNoProperties, 2, 0, 1) \ - V(Int64LessThanOrEqual, Operator::kNoProperties, 2, 0, 1) \ - V(Uint64Div, Operator::kNoProperties, 2, 1, 1) \ - V(Uint64Mod, Operator::kNoProperties, 2, 1, 1) \ - V(Uint64LessThan, Operator::kNoProperties, 2, 0, 1) \ +#define PURE_BINARY_OP_LIST_64(V) \ + V(Word64And, Operator::kAssociative | Operator::kCommutative, 2, 0, 1) \ + V(Word64Or, Operator::kAssociative | Operator::kCommutative, 2, 0, 1) \ + V(Word64Xor, Operator::kAssociative | Operator::kCommutative, 2, 0, 1) \ + V(Word64Shl, Operator::kNoProperties, 2, 0, 1) \ + V(Word64Shr, Operator::kNoProperties, 2, 0, 1) \ + V(Word64Ror, Operator::kNoProperties, 2, 0, 1) \ + V(Word64RorLowerable, Operator::kNoProperties, 2, 1, 1) \ + V(Word64Equal, Operator::kCommutative, 2, 0, 1) \ + V(Int64Add, Operator::kAssociative | Operator::kCommutative, 2, 0, 1) \ + V(Int64Sub, Operator::kNoProperties, 2, 0, 1) \ + V(Int64Mul, Operator::kAssociative | Operator::kCommutative, 2, 0, 1) \ + V(Int64MulHigh, Operator::kAssociative | Operator::kCommutative, 2, 0, 1) \ + V(Int64Div, Operator::kNoProperties, 2, 1, 1) \ + V(Int64Mod, Operator::kNoProperties, 2, 1, 1) \ + V(Int64LessThan, Operator::kNoProperties, 2, 0, 1) \ + V(Int64LessThanOrEqual, Operator::kNoProperties, 2, 0, 1) \ + V(Uint64MulHigh, Operator::kAssociative | Operator::kCommutative, 2, 0, 1) \ + V(Uint64Div, Operator::kNoProperties, 2, 1, 1) \ + V(Uint64Mod, Operator::kNoProperties, 2, 1, 1) \ + V(Uint64LessThan, Operator::kNoProperties, 2, 0, 1) \ V(Uint64LessThanOrEqual, Operator::kNoProperties, 2, 0, 1) // The format is: @@ -673,7 +675,8 @@ std::ostream& operator<<(std::ostream& os, TruncateKind kind) { V(Int32SubWithOverflow, Operator::kNoProperties) \ V(Int32MulWithOverflow, Operator::kAssociative | Operator::kCommutative) \ V(Int64AddWithOverflow, Operator::kAssociative | Operator::kCommutative) \ - V(Int64SubWithOverflow, Operator::kNoProperties) + V(Int64SubWithOverflow, Operator::kNoProperties) \ + V(Int64MulWithOverflow, Operator::kAssociative | Operator::kCommutative) #define MACHINE_TYPE_LIST(V) \ V(Float32) \ diff --git a/deps/v8/src/compiler/machine-operator.h b/deps/v8/src/compiler/machine-operator.h index b84106b2a10ea0..5e28ea15f6f8ef 100644 --- a/deps/v8/src/compiler/machine-operator.h +++ b/deps/v8/src/compiler/machine-operator.h @@ -533,6 +533,8 @@ class V8_EXPORT_PRIVATE MachineOperatorBuilder final const Operator* Int64Sub(); const Operator* Int64SubWithOverflow(); const Operator* Int64Mul(); + const Operator* Int64MulHigh(); + const Operator* Int64MulWithOverflow(); const Operator* Int64Div(); const Operator* Int64Mod(); const Operator* Int64LessThan(); @@ -541,6 +543,7 @@ class V8_EXPORT_PRIVATE MachineOperatorBuilder final const Operator* Uint64LessThan(); const Operator* Uint64LessThanOrEqual(); const Operator* Uint64Mod(); + const Operator* Uint64MulHigh(); // This operator reinterprets the bits of a tagged pointer as a word. const Operator* BitcastTaggedToWord(); diff --git a/deps/v8/src/compiler/memory-lowering.cc b/deps/v8/src/compiler/memory-lowering.cc index 6e7d0a2ce6d7f3..306063fba9e212 100644 --- a/deps/v8/src/compiler/memory-lowering.cc +++ b/deps/v8/src/compiler/memory-lowering.cc @@ -48,7 +48,7 @@ class MemoryLowering::AllocationGroup final : public ZoneObject { static inline AllocationType CheckAllocationType(AllocationType allocation) { // For non-generational heap, all young allocations are redirected to old // space. - if (FLAG_single_generation && allocation == AllocationType::kYoung) { + if (v8_flags.single_generation && allocation == AllocationType::kYoung) { return AllocationType::kOld; } return allocation; @@ -131,13 +131,39 @@ Node* MemoryLowering::GetWasmInstanceNode() { #define __ gasm()-> +Node* MemoryLowering::AlignToAllocationAlignment(Node* value) { + if (!V8_COMPRESS_POINTERS_8GB_BOOL) return value; + + auto already_aligned = __ MakeLabel(MachineRepresentation::kWord64); + Node* alignment_check = __ WordEqual( + __ WordAnd(value, __ UintPtrConstant(kObjectAlignment8GbHeapMask)), + __ UintPtrConstant(0)); + + __ GotoIf(alignment_check, &already_aligned, value); + { + Node* aligned_value; + if (kObjectAlignment8GbHeap == 2 * kTaggedSize) { + aligned_value = __ IntPtrAdd(value, __ IntPtrConstant(kTaggedSize)); + } else { + aligned_value = __ WordAnd( + __ IntPtrAdd(value, __ IntPtrConstant(kObjectAlignment8GbHeapMask)), + __ UintPtrConstant(~kObjectAlignment8GbHeapMask)); + } + __ Goto(&already_aligned, aligned_value); + } + + __ Bind(&already_aligned); + + return already_aligned.PhiAt(0); +} + Reduction MemoryLowering::ReduceAllocateRaw( Node* node, AllocationType allocation_type, AllowLargeObjects allow_large_objects, AllocationState const** state_ptr) { DCHECK_EQ(IrOpcode::kAllocateRaw, node->opcode()); DCHECK_IMPLIES(allocation_folding_ == AllocationFolding::kDoAllocationFolding, state_ptr != nullptr); - if (FLAG_single_generation && allocation_type == AllocationType::kYoung) { + if (v8_flags.single_generation && allocation_type == AllocationType::kYoung) { allocation_type = AllocationType::kOld; } // Code objects may have a maximum size smaller than kMaxHeapObjectSize due to @@ -232,9 +258,10 @@ Reduction MemoryLowering::ReduceAllocateRaw( // Check if we can fold this allocation into a previous allocation represented // by the incoming {state}. IntPtrMatcher m(size); - if (m.IsInRange(0, kMaxRegularHeapObjectSize) && FLAG_inline_new && + if (m.IsInRange(0, kMaxRegularHeapObjectSize) && v8_flags.inline_new && allocation_folding_ == AllocationFolding::kDoAllocationFolding) { - intptr_t const object_size = m.ResolvedValue(); + intptr_t const object_size = + ALIGN_TO_ALLOCATION_ALIGNMENT(m.ResolvedValue()); AllocationState const* state = *state_ptr; if (state->size() <= kMaxRegularHeapObjectSize - object_size && state->group()->allocation() == allocation_type) { @@ -260,7 +287,9 @@ Reduction MemoryLowering::ReduceAllocateRaw( // Update the allocation top with the new object allocation. // TODO(bmeurer): Defer writing back top as much as possible. - Node* top = __ IntAdd(state->top(), size); + DCHECK_IMPLIES(V8_COMPRESS_POINTERS_8GB_BOOL, + IsAligned(object_size, kObjectAlignment8GbHeap)); + Node* top = __ IntAdd(state->top(), __ IntPtrConstant(object_size)); __ Store(StoreRepresentation(MachineType::PointerRepresentation(), kNoWriteBarrier), top_address, __ IntPtrConstant(0), top); @@ -336,7 +365,7 @@ Reduction MemoryLowering::ReduceAllocateRaw( __ Load(MachineType::Pointer(), limit_address, __ IntPtrConstant(0)); // Compute the new top. - Node* new_top = __ IntAdd(top, size); + Node* new_top = __ IntAdd(top, AlignToAllocationAlignment(size)); // Check if we can do bump pointer allocation here. Node* check = __ UintLessThan(new_top, limit); @@ -463,6 +492,27 @@ Reduction MemoryLowering::ReduceLoadExternalPointerField(Node* node) { return Changed(node); } +Reduction MemoryLowering::ReduceLoadBoundedSize(Node* node) { +#ifdef V8_ENABLE_SANDBOX + const Operator* load_op = + !machine()->UnalignedLoadSupported(MachineRepresentation::kWord64) + ? machine()->UnalignedLoad(MachineType::Uint64()) + : machine()->Load(MachineType::Uint64()); + NodeProperties::ChangeOp(node, load_op); + + Node* effect = NodeProperties::GetEffectInput(node); + Node* control = NodeProperties::GetControlInput(node); + __ InitializeEffectControl(effect, control); + + Node* raw_value = __ AddNode(graph()->CloneNode(node)); + Node* shift_amount = __ IntPtrConstant(kBoundedSizeShift); + Node* decoded_size = __ Word64Shr(raw_value, shift_amount); + return Replace(decoded_size); +#else + UNREACHABLE(); +#endif +} + Reduction MemoryLowering::ReduceLoadMap(Node* node) { #ifdef V8_MAP_PACKING NodeProperties::ChangeOp(node, machine()->Load(MachineType::AnyTagged())); @@ -495,6 +545,10 @@ Reduction MemoryLowering::ReduceLoadField(Node* node) { return ReduceLoadExternalPointerField(node); } + if (access.is_bounded_size_access) { + return ReduceLoadBoundedSize(node); + } + NodeProperties::ChangeOp(node, machine()->Load(type)); return Changed(node); @@ -545,6 +599,8 @@ Reduction MemoryLowering::ReduceStoreField(Node* node, DCHECK(!access.type.Is(Type::ExternalPointer())); // SandboxedPointers are not currently stored by optimized code. DCHECK(!access.type.Is(Type::SandboxedPointer())); + // Bounded size fields are not currently stored by optimized code. + DCHECK(!access.is_bounded_size_access); MachineType machine_type = access.machine_type; Node* object = node->InputAt(0); Node* value = node->InputAt(1); @@ -645,7 +701,7 @@ WriteBarrierKind MemoryLowering::ComputeWriteBarrierKind( if (!ValueNeedsWriteBarrier(value, isolate())) { write_barrier_kind = kNoWriteBarrier; } - if (FLAG_disable_write_barriers) { + if (v8_flags.disable_write_barriers) { write_barrier_kind = kNoWriteBarrier; } if (write_barrier_kind == WriteBarrierKind::kAssertNoWriteBarrier) { diff --git a/deps/v8/src/compiler/memory-lowering.h b/deps/v8/src/compiler/memory-lowering.h index 707a31572d2f10..536e910faea5f1 100644 --- a/deps/v8/src/compiler/memory-lowering.h +++ b/deps/v8/src/compiler/memory-lowering.h @@ -109,11 +109,16 @@ class MemoryLowering final : public Reducer { AllocationState const* state, WriteBarrierKind); Reduction ReduceLoadExternalPointerField(Node* node); + Reduction ReduceLoadBoundedSize(Node* node); Reduction ReduceLoadMap(Node* node); Node* ComputeIndex(ElementAccess const& access, Node* node); void EnsureAllocateOperator(); Node* GetWasmInstanceNode(); + // Align the value to kObjectAlignment8GbHeap if V8_COMPRESS_POINTERS_8GB is + // defined. + Node* AlignToAllocationAlignment(Node* address); + Graph* graph() const { return graph_; } Isolate* isolate() const { return isolate_; } Zone* zone() const { return zone_; } diff --git a/deps/v8/src/compiler/node.cc b/deps/v8/src/compiler/node.cc index ae24ca7c121289..d8e201bd4871bd 100644 --- a/deps/v8/src/compiler/node.cc +++ b/deps/v8/src/compiler/node.cc @@ -293,6 +293,15 @@ int Node::UseCount() const { return use_count; } +int Node::BranchUseCount() const { + int use_count = 0; + for (Use* use = first_use_; use; use = use->next) { + if (use->from()->opcode() == IrOpcode::kBranch) { + ++use_count; + } + } + return use_count; +} void Node::ReplaceUses(Node* that) { DCHECK(this->first_use_ == nullptr || this->first_use_->prev == nullptr); @@ -497,6 +506,7 @@ bool Node::Uses::empty() const { return begin() == end(); } } // namespace internal } // namespace v8 +V8_DONT_STRIP_SYMBOL V8_EXPORT_PRIVATE extern void _v8_internal_Node_Print(void* object) { reinterpret_cast<i::compiler::Node*>(object)->Print(); } diff --git a/deps/v8/src/compiler/node.h b/deps/v8/src/compiler/node.h index 57d49fe1ac0009..5a323efa992932 100644 --- a/deps/v8/src/compiler/node.h +++ b/deps/v8/src/compiler/node.h @@ -98,6 +98,7 @@ class V8_EXPORT_PRIVATE Node final { void EnsureInputCount(Zone* zone, int new_input_count); int UseCount() const; + int BranchUseCount() const; void ReplaceUses(Node* replace_to); class InputEdges; diff --git a/deps/v8/src/compiler/opcodes.h b/deps/v8/src/compiler/opcodes.h index 9853c06f559d50..f61ec25d66e210 100644 --- a/deps/v8/src/compiler/opcodes.h +++ b/deps/v8/src/compiler/opcodes.h @@ -183,7 +183,8 @@ V(JSStoreInArrayLiteral) \ V(JSDeleteProperty) \ V(JSHasProperty) \ - V(JSGetSuperConstructor) + V(JSGetSuperConstructor) \ + V(JSFindNonDefaultConstructorOrConstruct) #define JS_CONTEXT_OP_LIST(V) \ V(JSHasContextExtension) \ @@ -287,6 +288,7 @@ V(CheckedUint32ToTaggedSigned) \ V(CheckedUint64Bounds) \ V(CheckedUint64ToInt32) \ + V(CheckedUint64ToInt64) \ V(CheckedUint64ToTaggedSigned) \ V(CheckedFloat64ToInt32) \ V(CheckedFloat64ToInt64) \ @@ -297,7 +299,8 @@ V(CheckedTaggedToFloat64) \ V(CheckedTaggedToInt64) \ V(CheckedTaggedToTaggedSigned) \ - V(CheckedTaggedToTaggedPointer) + V(CheckedTaggedToTaggedPointer) \ + V(CheckedBigInt64Add) #define SIMPLIFIED_COMPARE_BINOP_LIST(V) \ V(NumberEqual) \ @@ -393,7 +396,8 @@ #define SIMPLIFIED_BIGINT_UNOP_LIST(V) \ V(BigIntNegate) \ - V(CheckBigInt) + V(CheckBigInt) \ + V(CheckBigInt64) #define SIMPLIFIED_SPECULATIVE_NUMBER_UNOP_LIST(V) V(SpeculativeToNumber) @@ -601,10 +605,13 @@ V(Int64Sub) \ V(Int64SubWithOverflow) \ V(Int64Mul) \ + V(Int64MulHigh) \ + V(Int64MulWithOverflow) \ V(Int64Div) \ V(Int64Mod) \ V(Uint64Div) \ - V(Uint64Mod) + V(Uint64Mod) \ + V(Uint64MulHigh) #define MACHINE_FLOAT32_UNOP_LIST(V) \ V(Float32Abs) \ diff --git a/deps/v8/src/compiler/operation-typer.cc b/deps/v8/src/compiler/operation-typer.cc index a3b232a871d611..4279c54d1bbf2e 100644 --- a/deps/v8/src/compiler/operation-typer.cc +++ b/deps/v8/src/compiler/operation-typer.cc @@ -585,6 +585,8 @@ Type OperationTyper::SpeculativeBigIntAsUintN(Type) { Type OperationTyper::CheckBigInt(Type type) { return Type::BigInt(); } +Type OperationTyper::CheckBigInt64(Type type) { return Type::SignedBigInt64(); } + Type OperationTyper::NumberAdd(Type lhs, Type rhs) { DCHECK(lhs.Is(Type::Number())); DCHECK(rhs.Is(Type::Number())); diff --git a/deps/v8/src/compiler/operator-properties.cc b/deps/v8/src/compiler/operator-properties.cc index 0389822629f07a..2b642fdee1502e 100644 --- a/deps/v8/src/compiler/operator-properties.cc +++ b/deps/v8/src/compiler/operator-properties.cc @@ -91,6 +91,7 @@ bool OperatorProperties::NeedsExactContext(const Operator* op) { case IrOpcode::kJSSetNamedProperty: case IrOpcode::kJSDefineNamedOwnProperty: case IrOpcode::kJSSetKeyedProperty: + case IrOpcode::kJSFindNonDefaultConstructorOrConstruct: return true; case IrOpcode::kJSAsyncFunctionEnter: @@ -239,6 +240,7 @@ bool OperatorProperties::HasFrameStateInput(const Operator* op) { case IrOpcode::kJSStackCheck: case IrOpcode::kJSDebugger: case IrOpcode::kJSGetSuperConstructor: + case IrOpcode::kJSFindNonDefaultConstructorOrConstruct: case IrOpcode::kJSBitwiseNot: case IrOpcode::kJSDecrement: case IrOpcode::kJSIncrement: diff --git a/deps/v8/src/compiler/pipeline.cc b/deps/v8/src/compiler/pipeline.cc index babd0e8dd0f36f..a142d46a4760d6 100644 --- a/deps/v8/src/compiler/pipeline.cc +++ b/deps/v8/src/compiler/pipeline.cc @@ -19,6 +19,7 @@ #include "src/codegen/reloc-info.h" #include "src/common/high-allocation-throughput-scope.h" #include "src/compiler/add-type-assertions-reducer.h" +#include "src/compiler/all-nodes.h" #include "src/compiler/backend/code-generator.h" #include "src/compiler/backend/frame-elider.h" #include "src/compiler/backend/instruction-selector.h" @@ -82,8 +83,10 @@ #include "src/compiler/turboshaft/graph-builder.h" #include "src/compiler/turboshaft/graph-visualizer.h" #include "src/compiler/turboshaft/graph.h" +#include "src/compiler/turboshaft/machine-optimization-assembler.h" #include "src/compiler/turboshaft/optimization-phase.h" #include "src/compiler/turboshaft/recreate-schedule.h" +#include "src/compiler/turboshaft/simplify-tf-loops.h" #include "src/compiler/turboshaft/value-numbering-assembler.h" #include "src/compiler/type-narrowing-reducer.h" #include "src/compiler/typed-optimization.h" @@ -94,6 +97,7 @@ #include "src/diagnostics/code-tracer.h" #include "src/diagnostics/disassembler.h" #include "src/execution/isolate-inl.h" +#include "src/flags/flags.h" #include "src/heap/local-heap.h" #include "src/logging/code-events.h" #include "src/logging/counters.h" @@ -160,7 +164,7 @@ class PipelineData { allocator_(isolate->allocator()), info_(info), debug_name_(info_->GetDebugName()), - may_have_unverifiable_graph_(FLAG_turboshaft), + may_have_unverifiable_graph_(v8_flags.turboshaft), zone_stats_(zone_stats), pipeline_statistics_(pipeline_statistics), graph_zone_scope_(zone_stats_, kGraphZoneName, kCompressGraphZone), @@ -578,7 +582,7 @@ class PipelineData { osr_helper_, start_source_position_, jump_optimization_info_, assembler_options(), buffer_cache, info_->builtin(), max_unoptimized_frame_height(), max_pushed_argument_count(), - FLAG_trace_turbo_stack_accesses ? debug_name_.get() : nullptr); + v8_flags.trace_turbo_stack_accesses ? debug_name_.get() : nullptr); } void BeginPhaseKind(const char* phase_kind_name) { @@ -919,15 +923,15 @@ void PrintParticipatingSource(OptimizedCompilationInfo* info, // Print the code after compiling it. void PrintCode(Isolate* isolate, Handle<Code> code, OptimizedCompilationInfo* info) { - if (FLAG_print_opt_source && info->IsOptimizing()) { + if (v8_flags.print_opt_source && info->IsOptimizing()) { PrintParticipatingSource(info, isolate); } #ifdef ENABLE_DISASSEMBLER const bool print_code = - FLAG_print_code || - (info->IsOptimizing() && FLAG_print_opt_code && - info->shared_info()->PassesFilter(FLAG_print_opt_code_filter)); + v8_flags.print_code || + (info->IsOptimizing() && v8_flags.print_opt_code && + info->shared_info()->PassesFilter(v8_flags.print_opt_code_filter)); if (print_code) { std::unique_ptr<char[]> debug_name = info->GetDebugName(); CodeTracer::StreamScope tracing_scope(isolate->GetCodeTracer()); @@ -988,7 +992,7 @@ void TraceSchedule(OptimizedCompilationInfo* info, PipelineData* data, json_of << "\"},\n"; } - if (info->trace_turbo_graph() || FLAG_trace_turbo_scheduler) { + if (info->trace_turbo_graph() || v8_flags.trace_turbo_scheduler) { UnparkedScopeIfNeeded scope(data->broker()); AllowHandleDereference allow_deref; @@ -1008,7 +1012,7 @@ void TraceScheduleAndVerify(OptimizedCompilationInfo* info, PipelineData* data, TraceSchedule(info, data, schedule, phase_name); - if (FLAG_turbo_verify) ScheduleVerifier::Run(schedule); + if (v8_flags.turbo_verify) ScheduleVerifier::Run(schedule); } void AddReducer(PipelineData* data, GraphReducer* graph_reducer, @@ -1038,7 +1042,7 @@ PipelineStatistics* CreatePipelineStatistics(Handle<Script> script, bool tracing_enabled; TRACE_EVENT_CATEGORY_GROUP_ENABLED(TRACE_DISABLED_BY_DEFAULT("v8.turbofan"), &tracing_enabled); - if (tracing_enabled || FLAG_turbo_stats || FLAG_turbo_stats_nvp) { + if (tracing_enabled || v8_flags.turbo_stats || v8_flags.turbo_stats_nvp) { pipeline_statistics = new PipelineStatistics(info, isolate->GetTurboStatistics(), zone_stats); pipeline_statistics->BeginPhaseKind("V8.TFInitializing"); @@ -1064,7 +1068,7 @@ PipelineStatistics* CreatePipelineStatistics( bool tracing_enabled; TRACE_EVENT_CATEGORY_GROUP_ENABLED( TRACE_DISABLED_BY_DEFAULT("v8.wasm.turbofan"), &tracing_enabled); - if (tracing_enabled || FLAG_turbo_stats_wasm) { + if (tracing_enabled || v8_flags.turbo_stats_wasm) { pipeline_statistics = new PipelineStatistics( info, wasm::GetWasmEngine()->GetOrCreateTurboStatistics(), zone_stats); pipeline_statistics->BeginPhaseKind("V8.WasmInitializing"); @@ -1180,20 +1184,20 @@ PipelineCompilationJob::Status PipelineCompilationJob::PrepareJobImpl( PipelineJobScope scope(&data_, isolate->counters()->runtime_call_stats()); if (compilation_info()->bytecode_array()->length() > - FLAG_max_optimized_bytecode_size) { + v8_flags.max_optimized_bytecode_size) { return AbortOptimization(BailoutReason::kFunctionTooBig); } - if (!FLAG_always_turbofan) { + if (!v8_flags.always_turbofan) { compilation_info()->set_bailout_on_uninitialized(); } - if (FLAG_turbo_loop_peeling) { + if (v8_flags.turbo_loop_peeling) { compilation_info()->set_loop_peeling(); } - if (FLAG_turbo_inlining) { + if (v8_flags.turbo_inlining) { compilation_info()->set_inlining(); } - if (FLAG_turbo_allocation_folding) { + if (v8_flags.turbo_allocation_folding) { compilation_info()->set_allocation_folding(); } @@ -1461,7 +1465,7 @@ struct EarlyGraphTrimmingPhase { GraphTrimmer trimmer(temp_zone, data->graph()); NodeVector roots(temp_zone); data->jsgraph()->GetCachedNodes(&roots); - UnparkedScopeIfNeeded scope(data->broker(), FLAG_trace_turbo_trimming); + UnparkedScopeIfNeeded scope(data->broker(), v8_flags.trace_turbo_trimming); trimmer.TrimGraph(roots.begin(), roots.end()); } }; @@ -1479,7 +1483,7 @@ struct TyperPhase { LoopVariableOptimizer induction_vars(data->jsgraph()->graph(), data->common(), temp_zone); - if (FLAG_turbo_loop_variable) induction_vars.Run(); + if (v8_flags.turbo_loop_variable) induction_vars.Run(); // The typer inspects heap objects, so we need to unpark the local heap. UnparkedScopeIfNeeded scope(data->broker()); @@ -1634,7 +1638,8 @@ struct LoopPeelingPhase { NodeVector roots(temp_zone); data->jsgraph()->GetCachedNodes(&roots); { - UnparkedScopeIfNeeded scope(data->broker(), FLAG_trace_turbo_trimming); + UnparkedScopeIfNeeded scope(data->broker(), + v8_flags.trace_turbo_trimming); trimmer.TrimGraph(roots.begin(), roots.end()); } @@ -1698,19 +1703,21 @@ struct WasmLoopUnrollingPhase { void Run(PipelineData* data, Zone* temp_zone, std::vector<compiler::WasmLoopInfo>* loop_infos) { + if (loop_infos->empty()) return; + AllNodes all_nodes(temp_zone, data->graph(), data->graph()->end()); for (WasmLoopInfo& loop_info : *loop_infos) { - if (loop_info.can_be_innermost) { - ZoneUnorderedSet<Node*>* loop = - LoopFinder::FindSmallInnermostLoopFromHeader( - loop_info.header, temp_zone, - // Only discover the loop until its size is the maximum unrolled - // size for its depth. - maximum_unrollable_size(loop_info.nesting_depth), true); - if (loop == nullptr) continue; - UnrollLoop(loop_info.header, loop, loop_info.nesting_depth, - data->graph(), data->common(), temp_zone, - data->source_positions(), data->node_origins()); - } + if (!loop_info.can_be_innermost) continue; + if (!all_nodes.IsReachable(loop_info.header)) continue; + ZoneUnorderedSet<Node*>* loop = + LoopFinder::FindSmallInnermostLoopFromHeader( + loop_info.header, all_nodes, temp_zone, + // Only discover the loop until its size is the maximum unrolled + // size for its depth. + maximum_unrollable_size(loop_info.nesting_depth), true); + if (loop == nullptr) continue; + UnrollLoop(loop_info.header, loop, loop_info.nesting_depth, data->graph(), + data->common(), temp_zone, data->source_positions(), + data->node_origins()); } EliminateLoopExits(loop_infos); @@ -1722,19 +1729,20 @@ struct WasmLoopPeelingPhase { void Run(PipelineData* data, Zone* temp_zone, std::vector<compiler::WasmLoopInfo>* loop_infos) { + AllNodes all_nodes(temp_zone, data->graph()); for (WasmLoopInfo& loop_info : *loop_infos) { if (loop_info.can_be_innermost) { ZoneUnorderedSet<Node*>* loop = LoopFinder::FindSmallInnermostLoopFromHeader( - loop_info.header, temp_zone, FLAG_wasm_loop_peeling_max_size, - false); + loop_info.header, all_nodes, temp_zone, + v8_flags.wasm_loop_peeling_max_size, false); if (loop == nullptr) continue; PeelWasmLoop(loop_info.header, loop, data->graph(), data->common(), temp_zone, data->source_positions(), data->node_origins()); } } // If we are going to unroll later, keep loop exits. - if (!FLAG_wasm_loop_unrolling) EliminateLoopExits(loop_infos); + if (!v8_flags.wasm_loop_unrolling) EliminateLoopExits(loop_infos); } }; #endif // V8_ENABLE_WEBASSEMBLY @@ -1815,7 +1823,8 @@ struct EffectControlLinearizationPhase { NodeVector roots(temp_zone); data->jsgraph()->GetCachedNodes(&roots); { - UnparkedScopeIfNeeded scope(data->broker(), FLAG_trace_turbo_trimming); + UnparkedScopeIfNeeded scope(data->broker(), + v8_flags.trace_turbo_trimming); trimmer.TrimGraph(roots.begin(), roots.end()); } @@ -1868,7 +1877,8 @@ struct StoreStoreEliminationPhase { NodeVector roots(temp_zone); data->jsgraph()->GetCachedNodes(&roots); { - UnparkedScopeIfNeeded scope(data->broker(), FLAG_trace_turbo_trimming); + UnparkedScopeIfNeeded scope(data->broker(), + v8_flags.trace_turbo_trimming); trimmer.TrimGraph(roots.begin(), roots.end()); } @@ -1930,7 +1940,8 @@ struct MemoryOptimizationPhase { NodeVector roots(temp_zone); data->jsgraph()->GetCachedNodes(&roots); { - UnparkedScopeIfNeeded scope(data->broker(), FLAG_trace_turbo_trimming); + UnparkedScopeIfNeeded scope(data->broker(), + v8_flags.trace_turbo_trimming); trimmer.TrimGraph(roots.begin(), roots.end()); } @@ -2038,9 +2049,12 @@ struct BuildTurboshaftPhase { Schedule* schedule = data->schedule(); data->reset_schedule(); data->CreateTurboshaftGraph(); - return turboshaft::BuildGraph( - schedule, data->graph_zone(), temp_zone, &data->turboshaft_graph(), - data->source_positions(), data->node_origins()); + if (auto bailout = turboshaft::BuildGraph( + schedule, data->graph_zone(), temp_zone, &data->turboshaft_graph(), + data->source_positions(), data->node_origins())) { + return bailout; + } + return {}; } }; @@ -2048,8 +2062,12 @@ struct OptimizeTurboshaftPhase { DECL_PIPELINE_PHASE_CONSTANTS(OptimizeTurboshaft) void Run(PipelineData* data, Zone* temp_zone) { - turboshaft::OptimizationPhase<turboshaft::LivenessAnalyzer, - turboshaft::ValueNumberingAssembler>:: + UnparkedScopeIfNeeded scope(data->broker(), + FLAG_turboshaft_trace_reduction); + turboshaft::OptimizationPhase< + turboshaft::AnalyzerBase, + turboshaft::MachineOptimizationAssembler< + turboshaft::ValueNumberingAssembler, false>>:: Run(&data->turboshaft_graph(), temp_zone, data->node_origins(), turboshaft::VisitOrder::kDominator); } @@ -2097,6 +2115,19 @@ struct WasmGCOptimizationPhase { } }; +struct SimplifyLoopsPhase { + DECL_PIPELINE_PHASE_CONSTANTS(SimplifyLoops) + + void Run(PipelineData* data, Zone* temp_zone) { + GraphReducer graph_reducer( + temp_zone, data->graph(), &data->info()->tick_counter(), data->broker(), + data->jsgraph()->Dead(), data->observe_node_manager()); + SimplifyTFLoops simplify_loops(&graph_reducer, data->mcgraph()); + AddReducer(data, &graph_reducer, &simplify_loops); + graph_reducer.ReduceGraph(); + } +}; + struct WasmGCLoweringPhase { DECL_PIPELINE_PHASE_CONSTANTS(WasmGCLowering) @@ -2121,7 +2152,7 @@ struct WasmOptimizationPhase { // then one around branch elimination. This is because those two // optimizations sometimes display quadratic complexity when run together. // We only need load elimination for managed objects. - if (FLAG_experimental_wasm_gc) { + if (v8_flags.experimental_wasm_gc) { GraphReducer graph_reducer(temp_zone, data->graph(), &data->info()->tick_counter(), data->broker(), data->jsgraph()->Dead(), @@ -2314,7 +2345,7 @@ struct InstructionSelectionPhase { ? InstructionSelector::kAllSourcePositions : InstructionSelector::kCallSourcePositions, InstructionSelector::SupportedFeatures(), - FLAG_turbo_instruction_scheduling + v8_flags.turbo_instruction_scheduling ? InstructionSelector::kEnableScheduling : InstructionSelector::kDisableScheduling, data->assembler_options().enable_root_relative_access @@ -2716,7 +2747,7 @@ CompilationJob::Status WasmHeapStubCompilationJob::PrepareJobImpl( CompilationJob::Status WasmHeapStubCompilationJob::ExecuteJobImpl( RuntimeCallStats* stats, LocalIsolate* local_isolate) { std::unique_ptr<PipelineStatistics> pipeline_statistics; - if (FLAG_turbo_stats || FLAG_turbo_stats_nvp) { + if (v8_flags.turbo_stats || v8_flags.turbo_stats_nvp) { pipeline_statistics.reset(new PipelineStatistics( &info_, wasm::GetWasmEngine()->GetOrCreateTurboStatistics(), &zone_stats_)); @@ -2759,7 +2790,7 @@ CompilationJob::Status WasmHeapStubCompilationJob::FinalizeJobImpl( if (pipeline_.CommitDependencies(code)) { info_.SetCode(code); #ifdef ENABLE_DISASSEMBLER - if (FLAG_print_opt_code) { + if (v8_flags.print_opt_code) { CodeTracer::StreamScope tracing_scope(isolate->GetCodeTracer()); code->Disassemble(compilation_info()->GetDebugName().get(), tracing_scope.stream(), isolate); @@ -2778,7 +2809,7 @@ void PipelineImpl::RunPrintAndVerify(const char* phase, bool untyped) { if (info()->trace_turbo_json() || info()->trace_turbo_graph()) { Run<PrintGraphPhase>(phase); } - if (FLAG_turbo_verify) { + if (v8_flags.turbo_verify) { Run<VerifyGraphPhase>(untyped); } } @@ -2871,18 +2902,18 @@ bool PipelineImpl::OptimizeGraph(Linkage* linkage) { RunPrintAndVerify(LoopExitEliminationPhase::phase_name(), true); } - if (FLAG_turbo_load_elimination) { + if (v8_flags.turbo_load_elimination) { Run<LoadEliminationPhase>(); RunPrintAndVerify(LoadEliminationPhase::phase_name()); } data->DeleteTyper(); - if (FLAG_turbo_escape) { + if (v8_flags.turbo_escape) { Run<EscapeAnalysisPhase>(); RunPrintAndVerify(EscapeAnalysisPhase::phase_name()); } - if (FLAG_assert_types) { + if (v8_flags.assert_types) { Run<TypeAssertionsPhase>(); RunPrintAndVerify(TypeAssertionsPhase::phase_name()); } @@ -2928,13 +2959,13 @@ bool PipelineImpl::OptimizeGraph(Linkage* linkage) { Run<EffectControlLinearizationPhase>(); RunPrintAndVerify(EffectControlLinearizationPhase::phase_name(), true); - if (FLAG_turbo_store_elimination) { + if (v8_flags.turbo_store_elimination) { Run<StoreStoreEliminationPhase>(); RunPrintAndVerify(StoreStoreEliminationPhase::phase_name(), true); } // Optimize control flow. - if (FLAG_turbo_cf_optimization) { + if (v8_flags.turbo_cf_optimization) { Run<ControlFlowOptimizationPhase>(); RunPrintAndVerify(ControlFlowOptimizationPhase::phase_name(), true); } @@ -2946,13 +2977,13 @@ bool PipelineImpl::OptimizeGraph(Linkage* linkage) { Run<MemoryOptimizationPhase>(); RunPrintAndVerify(MemoryOptimizationPhase::phase_name(), true); - // Run value numbering and machine operator reducer to optimize load/store - // address computation (in particular, reuse the address computation whenever - // possible). - Run<MachineOperatorOptimizationPhase>(); - RunPrintAndVerify(MachineOperatorOptimizationPhase::phase_name(), true); + if (!v8_flags.turboshaft) { + // Run value numbering and machine operator reducer to optimize load/store + // address computation (in particular, reuse the address computation + // whenever possible). + Run<MachineOperatorOptimizationPhase>(); + RunPrintAndVerify(MachineOperatorOptimizationPhase::phase_name(), true); - if (!FLAG_turboshaft) { Run<DecompressionOptimizationPhase>(); RunPrintAndVerify(DecompressionOptimizationPhase::phase_name(), true); } @@ -2967,7 +2998,7 @@ bool PipelineImpl::OptimizeGraph(Linkage* linkage) { ComputeScheduledGraph(); - if (FLAG_turboshaft) { + if (v8_flags.turboshaft) { if (base::Optional<BailoutReason> bailout = Run<BuildTurboshaftPhase>()) { info()->AbortOptimization(*bailout); data->EndPhaseKind(); @@ -3072,17 +3103,17 @@ MaybeHandle<Code> Pipeline::GenerateCodeForCodeStub( NodeOriginTable node_origins(graph); JumpOptimizationInfo jump_opt; bool should_optimize_jumps = isolate->serializer_enabled() && - FLAG_turbo_rewrite_far_jumps && - !FLAG_turbo_profiling; + v8_flags.turbo_rewrite_far_jumps && + !v8_flags.turbo_profiling; PipelineData data(&zone_stats, &info, isolate, isolate->allocator(), graph, jsgraph, nullptr, source_positions, &node_origins, should_optimize_jumps ? &jump_opt : nullptr, options, profile_data); PipelineJobScope scope(&data, isolate->counters()->runtime_call_stats()); RCS_SCOPE(isolate, RuntimeCallCounterId::kOptimizeCode); - data.set_verify_graph(FLAG_verify_csa); + data.set_verify_graph(v8_flags.verify_csa); std::unique_ptr<PipelineStatistics> pipeline_statistics; - if (FLAG_turbo_stats || FLAG_turbo_stats_nvp) { + if (v8_flags.turbo_stats || v8_flags.turbo_stats_nvp) { pipeline_statistics.reset(new PipelineStatistics( &info, isolate->GetTurboStatistics(), &zone_stats)); pipeline_statistics->BeginPhaseKind("V8.TFStubCodegen"); @@ -3127,13 +3158,13 @@ MaybeHandle<Code> Pipeline::GenerateCodeForCodeStub( pipeline.Run<VerifyGraphPhase>(true); int graph_hash_before_scheduling = 0; - if (FLAG_turbo_profiling || profile_data != nullptr) { + if (v8_flags.turbo_profiling || profile_data != nullptr) { graph_hash_before_scheduling = HashGraphForPGO(data.graph()); } if (profile_data != nullptr && profile_data->hash() != graph_hash_before_scheduling) { - if (FLAG_warn_about_builtin_profile_data) { + if (v8_flags.warn_about_builtin_profile_data) { PrintF("Rejected profile data for %s due to function change\n", debug_name); PrintF("Please use tools/builtins-pgo/generate.py to refresh it.\n"); @@ -3155,11 +3186,11 @@ MaybeHandle<Code> Pipeline::GenerateCodeForCodeStub( profile_data); PipelineJobScope second_scope(&second_data, isolate->counters()->runtime_call_stats()); - second_data.set_verify_graph(FLAG_verify_csa); + second_data.set_verify_graph(v8_flags.verify_csa); PipelineImpl second_pipeline(&second_data); second_pipeline.SelectInstructionsAndAssemble(call_descriptor); - if (FLAG_turbo_profiling) { + if (v8_flags.turbo_profiling) { info.profiler_data()->SetHash(graph_hash_before_scheduling); } @@ -3206,7 +3237,7 @@ wasm::WasmCompilationResult Pipeline::GenerateCodeForWasmNativeStub( PipelineData data(&zone_stats, wasm_engine, &info, mcgraph, nullptr, source_positions, node_positions, options, kNoBufferCache); std::unique_ptr<PipelineStatistics> pipeline_statistics; - if (FLAG_turbo_stats || FLAG_turbo_stats_nvp) { + if (v8_flags.turbo_stats || v8_flags.turbo_stats_nvp) { pipeline_statistics.reset(new PipelineStatistics( &info, wasm_engine->GetOrCreateTurboStatistics(), &zone_stats)); pipeline_statistics->BeginPhaseKind("V8.WasmStubCodegen"); @@ -3304,7 +3335,7 @@ void Pipeline::GenerateCodeForWasmFunction( wasm::AssemblerBufferCache* buffer_cache) { auto* wasm_engine = wasm::GetWasmEngine(); base::TimeTicks start_time; - if (V8_UNLIKELY(FLAG_trace_wasm_compilation_times)) { + if (V8_UNLIKELY(v8_flags.trace_wasm_compilation_times)) { start_time = base::TimeTicks::Now(); } ZoneStats zone_stats(wasm_engine->allocator()); @@ -3327,38 +3358,39 @@ void Pipeline::GenerateCodeForWasmFunction( pipeline.RunPrintAndVerify("V8.WasmMachineCode", true); data.BeginPhaseKind("V8.WasmOptimization"); - if (FLAG_wasm_inlining) { + if (v8_flags.wasm_inlining) { pipeline.Run<WasmInliningPhase>(env, function_index, wire_bytes_storage, loop_info); pipeline.RunPrintAndVerify(WasmInliningPhase::phase_name(), true); } - if (FLAG_wasm_loop_peeling) { + if (v8_flags.wasm_loop_peeling) { pipeline.Run<WasmLoopPeelingPhase>(loop_info); pipeline.RunPrintAndVerify(WasmLoopPeelingPhase::phase_name(), true); } - if (FLAG_wasm_loop_unrolling) { + if (v8_flags.wasm_loop_unrolling) { pipeline.Run<WasmLoopUnrollingPhase>(loop_info); pipeline.RunPrintAndVerify(WasmLoopUnrollingPhase::phase_name(), true); } const bool is_asm_js = is_asmjs_module(module); - if (FLAG_experimental_wasm_gc || FLAG_experimental_wasm_stringref) { + if (v8_flags.experimental_wasm_gc || v8_flags.experimental_wasm_stringref) { pipeline.Run<WasmTypingPhase>(function_index); pipeline.RunPrintAndVerify(WasmTypingPhase::phase_name(), true); - if (FLAG_wasm_opt) { + if (v8_flags.wasm_opt) { pipeline.Run<WasmGCOptimizationPhase>(module); pipeline.RunPrintAndVerify(WasmGCOptimizationPhase::phase_name(), true); } } // These proposals use gc nodes. - if (FLAG_experimental_wasm_gc || FLAG_experimental_wasm_typed_funcref || - FLAG_experimental_wasm_stringref) { + if (v8_flags.experimental_wasm_gc || + v8_flags.experimental_wasm_typed_funcref || + v8_flags.experimental_wasm_stringref) { pipeline.Run<WasmGCLoweringPhase>(); pipeline.RunPrintAndVerify(WasmGCLoweringPhase::phase_name(), true); } - if (FLAG_wasm_opt || is_asm_js) { + if (v8_flags.wasm_opt || is_asm_js) { pipeline.Run<WasmOptimizationPhase>(is_asm_js); pipeline.RunPrintAndVerify(WasmOptimizationPhase::phase_name(), true); } else { @@ -3369,25 +3401,32 @@ void Pipeline::GenerateCodeForWasmFunction( pipeline.Run<MemoryOptimizationPhase>(); pipeline.RunPrintAndVerify(MemoryOptimizationPhase::phase_name(), true); - if (FLAG_experimental_wasm_gc && FLAG_wasm_opt) { + if (v8_flags.experimental_wasm_gc && v8_flags.wasm_opt) { // Run value numbering and machine operator reducer to optimize load/store // address computation (in particular, reuse the address computation // whenever possible). pipeline.Run<MachineOperatorOptimizationPhase>(); pipeline.RunPrintAndVerify(MachineOperatorOptimizationPhase::phase_name(), true); - pipeline.Run<DecompressionOptimizationPhase>(); - pipeline.RunPrintAndVerify(DecompressionOptimizationPhase::phase_name(), - true); + if (!v8_flags.turboshaft_wasm) { + pipeline.Run<DecompressionOptimizationPhase>(); + pipeline.RunPrintAndVerify(DecompressionOptimizationPhase::phase_name(), + true); + } } - if (FLAG_wasm_opt) { + if (v8_flags.wasm_opt) { pipeline.Run<BranchConditionDuplicationPhase>(); pipeline.RunPrintAndVerify(BranchConditionDuplicationPhase::phase_name(), true); } - if (FLAG_turbo_splitting && !is_asm_js) { + if (v8_flags.turboshaft_wasm) { + pipeline.Run<SimplifyLoopsPhase>(); + pipeline.RunPrintAndVerify(SimplifyLoopsPhase::phase_name(), true); + } + + if (v8_flags.turbo_splitting && !is_asm_js) { data.info()->set_splitting(); } @@ -3399,6 +3438,30 @@ void Pipeline::GenerateCodeForWasmFunction( pipeline.ComputeScheduledGraph(); Linkage linkage(call_descriptor); + + if (v8_flags.turboshaft_wasm) { + if (base::Optional<BailoutReason> bailout = + pipeline.Run<BuildTurboshaftPhase>()) { + pipeline.info()->AbortOptimization(*bailout); + data.EndPhaseKind(); + info->SetWasmCompilationResult({}); + return; + } + pipeline.Run<PrintTurboshaftGraphPhase>(BuildTurboshaftPhase::phase_name()); + + pipeline.Run<OptimizeTurboshaftPhase>(); + pipeline.Run<PrintTurboshaftGraphPhase>( + OptimizeTurboshaftPhase::phase_name()); + + pipeline.Run<DecompressionOptimizationPhase>(); + pipeline.Run<PrintTurboshaftGraphPhase>( + DecompressionOptimizationPhase::phase_name()); + + pipeline.Run<TurboshaftRecreateSchedulePhase>(&linkage); + TraceSchedule(data.info(), &data, data.schedule(), + TurboshaftRecreateSchedulePhase::phase_name()); + } + if (!pipeline.SelectInstructions(&linkage)) return; pipeline.AssembleCode(&linkage); @@ -3443,7 +3506,7 @@ void Pipeline::GenerateCodeForWasmFunction( << " using TurboFan" << std::endl; } - if (V8_UNLIKELY(FLAG_trace_wasm_compilation_times)) { + if (V8_UNLIKELY(v8_flags.trace_wasm_compilation_times)) { base::TimeDelta time = base::TimeTicks::Now() - start_time; int codesize = result->code_desc.body_size(); StdoutStream{} << "Compiled function " @@ -3524,7 +3587,7 @@ MaybeHandle<Code> Pipeline::GenerateCodeForTesting( nullptr, schedule, nullptr, node_positions, nullptr, options, nullptr); std::unique_ptr<PipelineStatistics> pipeline_statistics; - if (FLAG_turbo_stats || FLAG_turbo_stats_nvp) { + if (v8_flags.turbo_stats || v8_flags.turbo_stats_nvp) { pipeline_statistics.reset(new PipelineStatistics( info, isolate->GetTurboStatistics(), &zone_stats)); pipeline_statistics->BeginPhaseKind("V8.TFTestCodegen"); @@ -3604,7 +3667,7 @@ bool PipelineImpl::SelectInstructions(Linkage* linkage) { DCHECK_NOT_NULL(data->graph()); DCHECK_NOT_NULL(data->schedule()); - if (FLAG_turbo_profiling) { + if (v8_flags.turbo_profiling) { UnparkedScopeIfNeeded unparked_scope(data->broker()); data->info()->set_profiler_data(BasicBlockInstrumentor::Instrument( info(), data->graph(), data->schedule(), data->isolate())); @@ -3612,9 +3675,9 @@ bool PipelineImpl::SelectInstructions(Linkage* linkage) { bool verify_stub_graph = data->verify_graph() || - (FLAG_turbo_verify_machine_graph != nullptr && - (!strcmp(FLAG_turbo_verify_machine_graph, "*") || - !strcmp(FLAG_turbo_verify_machine_graph, data->debug_name()))); + (v8_flags.turbo_verify_machine_graph != nullptr && + (!strcmp(v8_flags.turbo_verify_machine_graph, "*") || + !strcmp(v8_flags.turbo_verify_machine_graph, data->debug_name()))); // Jump optimization runs instruction selection twice, but the instruction // selector mutates nodes like swapping the inputs of a load, which can // violate the machine graph verification rules. So we skip the second @@ -3624,7 +3687,7 @@ bool PipelineImpl::SelectInstructions(Linkage* linkage) { verify_stub_graph = false; } if (verify_stub_graph) { - if (FLAG_trace_verify_csa) { + if (v8_flags.trace_verify_csa) { UnparkedScopeIfNeeded scope(data->broker()); AllowHandleDereference allow_deref; CodeTracer::StreamScope tracing_scope(data->GetCodeTracer()); @@ -3689,7 +3752,7 @@ bool PipelineImpl::SelectInstructions(Linkage* linkage) { data->BeginPhaseKind("V8.TFRegisterAllocation"); - bool run_verifier = FLAG_turbo_verify_allocation; + bool run_verifier = v8_flags.turbo_verify_allocation; // Allocate registers. @@ -3705,8 +3768,8 @@ bool PipelineImpl::SelectInstructions(Linkage* linkage) { // JS functions. bool use_mid_tier_register_allocator = data->info()->code_kind() == CodeKind::WASM_FUNCTION && - (FLAG_turbo_force_mid_tier_regalloc || - (FLAG_turbo_use_mid_tier_regalloc_for_huge_functions && + (v8_flags.turbo_force_mid_tier_regalloc || + (v8_flags.turbo_use_mid_tier_regalloc_for_huge_functions && data->sequence()->VirtualRegisterCount() > kTopTierVirtualRegistersLimit)); @@ -3733,7 +3796,7 @@ bool PipelineImpl::SelectInstructions(Linkage* linkage) { bool generate_frame_at_start = data_->sequence()->instruction_blocks().front()->must_construct_frame(); // Optimimize jumps. - if (FLAG_turbo_jt) { + if (v8_flags.turbo_jt) { Run<JumpThreadingPhase>(generate_frame_at_start); } @@ -4004,7 +4067,7 @@ void PipelineImpl::AllocateRegistersForTopTier( Run<PopulateReferenceMapsPhase>(); - if (FLAG_turbo_move_optimization) { + if (v8_flags.turbo_move_optimization) { Run<OptimizeMovesPhase>(); } diff --git a/deps/v8/src/compiler/raw-machine-assembler.cc b/deps/v8/src/compiler/raw-machine-assembler.cc index cdd6b4a55a0ac1..7302e6f2fac56d 100644 --- a/deps/v8/src/compiler/raw-machine-assembler.cc +++ b/deps/v8/src/compiler/raw-machine-assembler.cc @@ -88,7 +88,7 @@ Node* RawMachineAssembler::OptimizedAllocate( Schedule* RawMachineAssembler::ExportForTest() { // Compute the correct codegen order. DCHECK(schedule_->rpo_order()->empty()); - if (FLAG_trace_turbo_scheduler) { + if (v8_flags.trace_turbo_scheduler) { PrintF("--- RAW SCHEDULE -------------------------------------------\n"); StdoutStream{} << *schedule_; } @@ -96,7 +96,7 @@ Schedule* RawMachineAssembler::ExportForTest() { Scheduler::ComputeSpecialRPO(zone(), schedule_); Scheduler::GenerateDominatorTree(schedule_); schedule_->PropagateDeferredMark(); - if (FLAG_trace_turbo_scheduler) { + if (v8_flags.trace_turbo_scheduler) { PrintF("--- EDGE SPLIT AND PROPAGATED DEFERRED SCHEDULE ------------\n"); StdoutStream{} << *schedule_; } @@ -110,14 +110,14 @@ Schedule* RawMachineAssembler::ExportForTest() { Graph* RawMachineAssembler::ExportForOptimization() { // Compute the correct codegen order. DCHECK(schedule_->rpo_order()->empty()); - if (FLAG_trace_turbo_scheduler) { + if (v8_flags.trace_turbo_scheduler) { PrintF("--- RAW SCHEDULE -------------------------------------------\n"); StdoutStream{} << *schedule_; } schedule_->EnsureCFGWellFormedness(); OptimizeControlFlow(schedule_, graph(), common()); Scheduler::ComputeSpecialRPO(zone(), schedule_); - if (FLAG_trace_turbo_scheduler) { + if (v8_flags.trace_turbo_scheduler) { PrintF("--- SCHEDULE BEFORE GRAPH CREATION -------------------------\n"); StdoutStream{} << *schedule_; } diff --git a/deps/v8/src/compiler/raw-machine-assembler.h b/deps/v8/src/compiler/raw-machine-assembler.h index bec0a9b2b7b6f1..57cee2f0e1d2f0 100644 --- a/deps/v8/src/compiler/raw-machine-assembler.h +++ b/deps/v8/src/compiler/raw-machine-assembler.h @@ -514,6 +514,15 @@ class V8_EXPORT_PRIVATE RawMachineAssembler { Node* Int64Mul(Node* a, Node* b) { return AddNode(machine()->Int64Mul(), a, b); } + Node* Int64MulHigh(Node* a, Node* b) { + return AddNode(machine()->Int64MulHigh(), a, b); + } + Node* Uint64MulHigh(Node* a, Node* b) { + return AddNode(machine()->Uint64MulHigh(), a, b); + } + Node* Int64MulWithOverflow(Node* a, Node* b) { + return AddNode(machine()->Int64MulWithOverflow(), a, b); + } Node* Int64Div(Node* a, Node* b) { return AddNode(machine()->Int64Div(), a, b); } @@ -599,6 +608,8 @@ class V8_EXPORT_PRIVATE RawMachineAssembler { INTPTR_BINOP(Int, Sub) INTPTR_BINOP(Int, SubWithOverflow) INTPTR_BINOP(Int, Mul) + INTPTR_BINOP(Int, MulHigh) + INTPTR_BINOP(Int, MulWithOverflow) INTPTR_BINOP(Int, Div) INTPTR_BINOP(Int, LessThan) INTPTR_BINOP(Int, LessThanOrEqual) @@ -619,6 +630,7 @@ class V8_EXPORT_PRIVATE RawMachineAssembler { UINTPTR_BINOP(Uint, LessThanOrEqual) UINTPTR_BINOP(Uint, GreaterThanOrEqual) UINTPTR_BINOP(Uint, GreaterThan) + UINTPTR_BINOP(Uint, MulHigh) #undef UINTPTR_BINOP diff --git a/deps/v8/src/compiler/redundancy-elimination.cc b/deps/v8/src/compiler/redundancy-elimination.cc index 110b9ba9be98bd..9f1e15f1317904 100644 --- a/deps/v8/src/compiler/redundancy-elimination.cc +++ b/deps/v8/src/compiler/redundancy-elimination.cc @@ -20,6 +20,7 @@ Reduction RedundancyElimination::Reduce(Node* node) { if (node_checks_.Get(node)) return NoChange(); switch (node->opcode()) { case IrOpcode::kCheckBigInt: + case IrOpcode::kCheckBigInt64: case IrOpcode::kCheckBounds: case IrOpcode::kCheckClosure: case IrOpcode::kCheckEqualsInternalizedString: @@ -164,6 +165,7 @@ bool CheckSubsumes(Node const* a, Node const* b) { case IrOpcode::kCheckString: case IrOpcode::kCheckNumber: case IrOpcode::kCheckBigInt: + case IrOpcode::kCheckBigInt64: break; case IrOpcode::kCheckedInt32ToTaggedSigned: case IrOpcode::kCheckedInt64ToInt32: diff --git a/deps/v8/src/compiler/representation-change.cc b/deps/v8/src/compiler/representation-change.cc index 5ec5af0be2ff0a..68e0cac877663b 100644 --- a/deps/v8/src/compiler/representation-change.cc +++ b/deps/v8/src/compiler/representation-change.cc @@ -136,6 +136,11 @@ bool IsWord(MachineRepresentation rep) { rep == MachineRepresentation::kWord32; } +bool TypeCheckIsBigInt(TypeCheckKind type_check) { + return type_check == TypeCheckKind::kBigInt || + type_check == TypeCheckKind::kBigInt64; +} + } // namespace RepresentationChanger::RepresentationChanger( @@ -164,7 +169,7 @@ Node* RepresentationChanger::GetRepresentationFor( // Rematerialize any truncated BigInt if user is not expecting a BigInt. if (output_type.Is(Type::BigInt()) && output_rep == MachineRepresentation::kWord64 && - use_info.type_check() != TypeCheckKind::kBigInt) { + !TypeCheckIsBigInt(use_info.type_check())) { if (output_type.Is(Type::UnsignedBigInt64())) { node = InsertConversion(node, simplified()->ChangeUint64ToBigInt(), use_node); @@ -182,7 +187,7 @@ Node* RepresentationChanger::GetRepresentationFor( // this behavior is disabled only for TypeCheckKind::kBigInt, but should // be fixed for all other type checks. (output_rep != MachineRepresentation::kWord32 && - use_info.type_check() != TypeCheckKind::kBigInt)) { + !TypeCheckIsBigInt(use_info.type_check()))) { if (use_info.representation() == output_rep) { // Representations are the same. That's a no-op. return node; @@ -234,7 +239,7 @@ Node* RepresentationChanger::GetRepresentationFor( case MachineRepresentation::kWord64: DCHECK(use_info.type_check() == TypeCheckKind::kNone || use_info.type_check() == TypeCheckKind::kSigned64 || - use_info.type_check() == TypeCheckKind::kBigInt || + TypeCheckIsBigInt(use_info.type_check()) || use_info.type_check() == TypeCheckKind::kArrayIndex); return GetWord64RepresentationFor(node, output_rep, output_type, use_node, use_info); @@ -399,7 +404,7 @@ Node* RepresentationChanger::GetTaggedPointerRepresentationFor( // Eagerly fold representation changes for constants. switch (node->opcode()) { case IrOpcode::kHeapConstant: - if (use_info.type_check() == TypeCheckKind::kBigInt) break; + if (TypeCheckIsBigInt(use_info.type_check())) break; return node; // No change necessary. case IrOpcode::kInt32Constant: case IrOpcode::kFloat64Constant: @@ -417,7 +422,7 @@ Node* RepresentationChanger::GetTaggedPointerRepresentationFor( node); } - if (use_info.type_check() == TypeCheckKind::kBigInt && + if (TypeCheckIsBigInt(use_info.type_check()) && !output_type.Is(Type::BigInt())) { // BigInt checks can only be performed on tagged representations. Note that // a corresponding check is inserted down below. @@ -490,13 +495,21 @@ Node* RepresentationChanger::GetTaggedPointerRepresentationFor( // TODO(turbofan): Consider adding a Bailout operator that just deopts // for TaggedSigned output representation. op = simplified()->CheckedTaggedToTaggedPointer(use_info.feedback()); - } else if (IsAnyTagged(output_rep) && - (use_info.type_check() == TypeCheckKind::kBigInt || - output_type.Is(Type::BigInt()))) { - if (output_type.Is(Type::BigInt())) { - return node; + } else if (IsAnyTagged(output_rep)) { + if (use_info.type_check() == TypeCheckKind::kBigInt) { + if (output_type.Is(Type::BigInt())) { + return node; + } + op = simplified()->CheckBigInt(use_info.feedback()); + } else if (use_info.type_check() == TypeCheckKind::kBigInt64) { + if (output_type.Is(Type::SignedBigInt64())) { + return node; + } + op = simplified()->CheckBigInt64(use_info.feedback()); + } else { + return TypeError(node, output_rep, output_type, + MachineRepresentation::kTaggedPointer); } - op = simplified()->CheckBigInt(use_info.feedback()); } else { return TypeError(node, output_rep, output_type, MachineRepresentation::kTaggedPointer); @@ -693,7 +706,7 @@ Node* RepresentationChanger::GetFloat64RepresentationFor( NumberMatcher m(node); if (m.HasResolvedValue()) { // BigInts are not used as number constants. - DCHECK(use_info.type_check() != TypeCheckKind::kBigInt); + DCHECK(!TypeCheckIsBigInt(use_info.type_check())); switch (use_info.type_check()) { case TypeCheckKind::kNone: case TypeCheckKind::kNumber: @@ -701,6 +714,7 @@ Node* RepresentationChanger::GetFloat64RepresentationFor( case TypeCheckKind::kNumberOrOddball: return jsgraph()->Float64Constant(m.ResolvedValue()); case TypeCheckKind::kBigInt: + case TypeCheckKind::kBigInt64: case TypeCheckKind::kHeapObject: case TypeCheckKind::kSigned32: case TypeCheckKind::kSigned64: @@ -1101,7 +1115,7 @@ Node* RepresentationChanger::GetWord64RepresentationFor( case IrOpcode::kFloat64Constant: UNREACHABLE(); case IrOpcode::kNumberConstant: { - if (use_info.type_check() != TypeCheckKind::kBigInt) { + if (!TypeCheckIsBigInt(use_info.type_check())) { double const fv = OpParameter<double>(node->op()); if (base::IsValueInRangeForNumericType<int64_t>(fv)) { int64_t const iv = static_cast<int64_t>(fv); @@ -1128,7 +1142,7 @@ Node* RepresentationChanger::GetWord64RepresentationFor( break; } - if (use_info.type_check() == TypeCheckKind::kBigInt) { + if (TypeCheckIsBigInt(use_info.type_check())) { // BigInts are only represented as tagged pointer and word64. if (!CanBeTaggedPointer(output_rep) && output_rep != MachineRepresentation::kWord64) { @@ -1152,6 +1166,7 @@ Node* RepresentationChanger::GetWord64RepresentationFor( CHECK_NE(use_info.type_check(), TypeCheckKind::kNone); CHECK_NE(use_info.type_check(), TypeCheckKind::kNumberOrOddball); CHECK_NE(use_info.type_check(), TypeCheckKind::kBigInt); + CHECK_NE(use_info.type_check(), TypeCheckKind::kBigInt64); Node* unreachable = InsertUnconditionalDeopt(use_node, DeoptimizeReason::kNotASmi); return jsgraph()->graph()->NewNode( @@ -1218,9 +1233,10 @@ Node* RepresentationChanger::GetWord64RepresentationFor( MachineRepresentation::kWord64); } } else if (IsAnyTagged(output_rep) && - use_info.truncation().IsUsedAsWord64() && - (use_info.type_check() == TypeCheckKind::kBigInt || - output_type.Is(Type::BigInt()))) { + ((use_info.truncation().IsUsedAsWord64() && + (use_info.type_check() == TypeCheckKind::kBigInt || + output_type.Is(Type::BigInt()))) || + use_info.type_check() == TypeCheckKind::kBigInt64)) { node = GetTaggedPointerRepresentationFor(node, output_rep, output_type, use_node, use_info); op = simplified()->TruncateBigIntToWord64(); @@ -1240,10 +1256,18 @@ Node* RepresentationChanger::GetWord64RepresentationFor( MachineRepresentation::kWord64); } } else if (output_rep == MachineRepresentation::kWord64) { - DCHECK_EQ(use_info.type_check(), TypeCheckKind::kBigInt); - if (output_type.Is(Type::BigInt())) { + DCHECK(TypeCheckIsBigInt(use_info.type_check())); + if (output_type.Is(Type::UnsignedBigInt64()) && + use_info.type_check() == TypeCheckKind::kBigInt64) { + op = simplified()->CheckedUint64ToInt64(use_info.feedback()); + } else if ((output_type.Is(Type::BigInt()) && + use_info.type_check() == TypeCheckKind::kBigInt) || + (output_type.Is(Type::SignedBigInt64()) && + use_info.type_check() == TypeCheckKind::kBigInt64)) { return node; } else { + DCHECK(output_type != Type::BigInt() || + use_info.type_check() != TypeCheckKind::kBigInt64); Node* unreachable = InsertUnconditionalDeopt( use_node, DeoptimizeReason::kNotABigInt, use_info.feedback()); return jsgraph()->graph()->NewNode( diff --git a/deps/v8/src/compiler/representation-change.h b/deps/v8/src/compiler/representation-change.h index 5c1ffabf67459d..cbfd893eb0fceb 100644 --- a/deps/v8/src/compiler/representation-change.h +++ b/deps/v8/src/compiler/representation-change.h @@ -8,6 +8,7 @@ #include "src/compiler/feedback-source.h" #include "src/compiler/js-graph.h" #include "src/compiler/simplified-operator.h" +#include "src/compiler/use-info.h" namespace v8 { namespace internal { @@ -17,305 +18,6 @@ namespace compiler { class SimplifiedLoweringVerifier; class TypeCache; -enum IdentifyZeros : uint8_t { kIdentifyZeros, kDistinguishZeros }; - -class Truncation final { - public: - // Constructors. - static Truncation None() { - return Truncation(TruncationKind::kNone, kIdentifyZeros); - } - static Truncation Bool() { - return Truncation(TruncationKind::kBool, kIdentifyZeros); - } - static Truncation Word32() { - return Truncation(TruncationKind::kWord32, kIdentifyZeros); - } - static Truncation Word64() { - return Truncation(TruncationKind::kWord64, kIdentifyZeros); - } - static Truncation OddballAndBigIntToNumber( - IdentifyZeros identify_zeros = kDistinguishZeros) { - return Truncation(TruncationKind::kOddballAndBigIntToNumber, - identify_zeros); - } - static Truncation Any(IdentifyZeros identify_zeros = kDistinguishZeros) { - return Truncation(TruncationKind::kAny, identify_zeros); - } - - static Truncation Generalize(Truncation t1, Truncation t2) { - return Truncation( - Generalize(t1.kind(), t2.kind()), - GeneralizeIdentifyZeros(t1.identify_zeros(), t2.identify_zeros())); - } - - // Queries. - bool IsUnused() const { return kind_ == TruncationKind::kNone; } - bool IsUsedAsBool() const { - return LessGeneral(kind_, TruncationKind::kBool); - } - bool IsUsedAsWord32() const { - return LessGeneral(kind_, TruncationKind::kWord32); - } - bool IsUsedAsWord64() const { - return LessGeneral(kind_, TruncationKind::kWord64); - } - bool TruncatesOddballAndBigIntToNumber() const { - return LessGeneral(kind_, TruncationKind::kOddballAndBigIntToNumber); - } - bool IdentifiesUndefinedAndZero() { - return LessGeneral(kind_, TruncationKind::kWord32) || - LessGeneral(kind_, TruncationKind::kBool); - } - bool IdentifiesZeroAndMinusZero() const { - return identify_zeros() == kIdentifyZeros; - } - - // Operators. - bool operator==(Truncation other) const { - return kind() == other.kind() && identify_zeros() == other.identify_zeros(); - } - bool operator!=(Truncation other) const { return !(*this == other); } - - // Debug utilities. - const char* description() const; - bool IsLessGeneralThan(Truncation other) const { - return LessGeneral(kind(), other.kind()) && - LessGeneralIdentifyZeros(identify_zeros(), other.identify_zeros()); - } - - IdentifyZeros identify_zeros() const { return identify_zeros_; } - - private: - enum class TruncationKind : uint8_t { - kNone, - kBool, - kWord32, - kWord64, - kOddballAndBigIntToNumber, - kAny - }; - - explicit Truncation(TruncationKind kind, IdentifyZeros identify_zeros) - : kind_(kind), identify_zeros_(identify_zeros) {} - - TruncationKind kind() const { return kind_; } - - friend class SimplifiedLoweringVerifier; - TruncationKind kind_; - IdentifyZeros identify_zeros_; - - static TruncationKind Generalize(TruncationKind rep1, TruncationKind rep2); - static IdentifyZeros GeneralizeIdentifyZeros(IdentifyZeros i1, - IdentifyZeros i2); - static bool LessGeneral(TruncationKind rep1, TruncationKind rep2); - static bool LessGeneralIdentifyZeros(IdentifyZeros u1, IdentifyZeros u2); -}; - -enum class TypeCheckKind : uint8_t { - kNone, - kSignedSmall, - kSigned32, - kSigned64, - kNumber, - kNumberOrBoolean, - kNumberOrOddball, - kHeapObject, - kBigInt, - kArrayIndex -}; - -inline std::ostream& operator<<(std::ostream& os, TypeCheckKind type_check) { - switch (type_check) { - case TypeCheckKind::kNone: - return os << "None"; - case TypeCheckKind::kSignedSmall: - return os << "SignedSmall"; - case TypeCheckKind::kSigned32: - return os << "Signed32"; - case TypeCheckKind::kSigned64: - return os << "Signed64"; - case TypeCheckKind::kNumber: - return os << "Number"; - case TypeCheckKind::kNumberOrBoolean: - return os << "NumberOrBoolean"; - case TypeCheckKind::kNumberOrOddball: - return os << "NumberOrOddball"; - case TypeCheckKind::kHeapObject: - return os << "HeapObject"; - case TypeCheckKind::kBigInt: - return os << "BigInt"; - case TypeCheckKind::kArrayIndex: - return os << "ArrayIndex"; - } - UNREACHABLE(); -} - -// The {UseInfo} class is used to describe a use of an input of a node. -// -// This information is used in two different ways, based on the phase: -// -// 1. During propagation, the use info is used to inform the input node -// about what part of the input is used (we call this truncation) and what -// is the preferred representation. For conversions that will require -// checks, we also keep track of whether a minus zero check is needed. -// -// 2. During lowering, the use info is used to properly convert the input -// to the preferred representation. The preferred representation might be -// insufficient to do the conversion (e.g. word32->float64 conv), so we also -// need the signedness information to produce the correct value. -// Additionally, use info may contain {CheckParameters} which contains -// information for the deoptimizer such as a CallIC on which speculation -// should be disallowed if the check fails. -class UseInfo { - public: - UseInfo(MachineRepresentation representation, Truncation truncation, - TypeCheckKind type_check = TypeCheckKind::kNone, - const FeedbackSource& feedback = FeedbackSource()) - : representation_(representation), - truncation_(truncation), - type_check_(type_check), - feedback_(feedback) {} - static UseInfo TruncatingWord32() { - return UseInfo(MachineRepresentation::kWord32, Truncation::Word32()); - } - static UseInfo CheckedBigIntTruncatingWord64(const FeedbackSource& feedback) { - // Note that Trunction::Word64() can safely use kIdentifyZero, because - // TypeCheckKind::kBigInt will make sure we deopt for anything other than - // type BigInt anyway. - return UseInfo(MachineRepresentation::kWord64, Truncation::Word64(), - TypeCheckKind::kBigInt, feedback); - } - static UseInfo Word64() { - return UseInfo(MachineRepresentation::kWord64, Truncation::Any()); - } - static UseInfo Word() { - return UseInfo(MachineType::PointerRepresentation(), Truncation::Any()); - } - static UseInfo Bool() { - return UseInfo(MachineRepresentation::kBit, Truncation::Bool()); - } - static UseInfo Float32() { - return UseInfo(MachineRepresentation::kFloat32, Truncation::Any()); - } - static UseInfo Float64() { - return UseInfo(MachineRepresentation::kFloat64, Truncation::Any()); - } - static UseInfo TruncatingFloat64( - IdentifyZeros identify_zeros = kDistinguishZeros) { - return UseInfo(MachineRepresentation::kFloat64, - Truncation::OddballAndBigIntToNumber(identify_zeros)); - } - static UseInfo AnyTagged() { - return UseInfo(MachineRepresentation::kTagged, Truncation::Any()); - } - static UseInfo TaggedSigned() { - return UseInfo(MachineRepresentation::kTaggedSigned, Truncation::Any()); - } - static UseInfo TaggedPointer() { - return UseInfo(MachineRepresentation::kTaggedPointer, Truncation::Any()); - } - - // Possibly deoptimizing conversions. - static UseInfo CheckedTaggedAsArrayIndex(const FeedbackSource& feedback) { - return UseInfo(MachineType::PointerRepresentation(), - Truncation::Any(kIdentifyZeros), TypeCheckKind::kArrayIndex, - feedback); - } - static UseInfo CheckedHeapObjectAsTaggedPointer( - const FeedbackSource& feedback) { - return UseInfo(MachineRepresentation::kTaggedPointer, Truncation::Any(), - TypeCheckKind::kHeapObject, feedback); - } - - static UseInfo CheckedBigIntAsTaggedPointer(const FeedbackSource& feedback) { - return UseInfo(MachineRepresentation::kTaggedPointer, Truncation::Any(), - TypeCheckKind::kBigInt, feedback); - } - - static UseInfo CheckedSignedSmallAsTaggedSigned( - const FeedbackSource& feedback, - IdentifyZeros identify_zeros = kDistinguishZeros) { - return UseInfo(MachineRepresentation::kTaggedSigned, - Truncation::Any(identify_zeros), TypeCheckKind::kSignedSmall, - feedback); - } - static UseInfo CheckedSignedSmallAsWord32(IdentifyZeros identify_zeros, - const FeedbackSource& feedback) { - return UseInfo(MachineRepresentation::kWord32, - Truncation::Any(identify_zeros), TypeCheckKind::kSignedSmall, - feedback); - } - static UseInfo CheckedSigned32AsWord32(IdentifyZeros identify_zeros, - const FeedbackSource& feedback) { - return UseInfo(MachineRepresentation::kWord32, - Truncation::Any(identify_zeros), TypeCheckKind::kSigned32, - feedback); - } - static UseInfo CheckedSigned64AsWord64(IdentifyZeros identify_zeros, - const FeedbackSource& feedback) { - return UseInfo(MachineRepresentation::kWord64, - Truncation::Any(identify_zeros), TypeCheckKind::kSigned64, - feedback); - } - static UseInfo CheckedNumberAsFloat64(IdentifyZeros identify_zeros, - const FeedbackSource& feedback) { - return UseInfo(MachineRepresentation::kFloat64, - Truncation::Any(identify_zeros), TypeCheckKind::kNumber, - feedback); - } - static UseInfo CheckedNumberAsWord32(const FeedbackSource& feedback) { - return UseInfo(MachineRepresentation::kWord32, Truncation::Word32(), - TypeCheckKind::kNumber, feedback); - } - static UseInfo CheckedNumberOrBooleanAsFloat64( - IdentifyZeros identify_zeros, const FeedbackSource& feedback) { - return UseInfo(MachineRepresentation::kFloat64, - Truncation::Any(identify_zeros), - TypeCheckKind::kNumberOrBoolean, feedback); - } - static UseInfo CheckedNumberOrOddballAsFloat64( - IdentifyZeros identify_zeros, const FeedbackSource& feedback) { - return UseInfo(MachineRepresentation::kFloat64, - Truncation::Any(identify_zeros), - TypeCheckKind::kNumberOrOddball, feedback); - } - static UseInfo CheckedNumberOrOddballAsWord32( - const FeedbackSource& feedback) { - return UseInfo(MachineRepresentation::kWord32, Truncation::Word32(), - TypeCheckKind::kNumberOrOddball, feedback); - } - - // Undetermined representation. - static UseInfo Any() { - return UseInfo(MachineRepresentation::kNone, Truncation::Any()); - } - static UseInfo AnyTruncatingToBool() { - return UseInfo(MachineRepresentation::kNone, Truncation::Bool()); - } - - // Value not used. - static UseInfo None() { - return UseInfo(MachineRepresentation::kNone, Truncation::None()); - } - - MachineRepresentation representation() const { return representation_; } - Truncation truncation() const { return truncation_; } - TypeCheckKind type_check() const { return type_check_; } - CheckForMinusZeroMode minus_zero_check() const { - return truncation().IdentifiesZeroAndMinusZero() - ? CheckForMinusZeroMode::kDontCheckForMinusZero - : CheckForMinusZeroMode::kCheckForMinusZero; - } - const FeedbackSource& feedback() const { return feedback_; } - - private: - MachineRepresentation representation_; - Truncation truncation_; - TypeCheckKind type_check_; - FeedbackSource feedback_; -}; - // Contains logic related to changing the representation of values for constants // and other nodes, as well as lowering Simplified->Machine operators. // Eagerly folds any representation changes for constants. diff --git a/deps/v8/src/compiler/schedule.cc b/deps/v8/src/compiler/schedule.cc index 5aeeff91238bc8..c608dd63ad1287 100644 --- a/deps/v8/src/compiler/schedule.cc +++ b/deps/v8/src/compiler/schedule.cc @@ -198,7 +198,7 @@ BasicBlock* Schedule::NewBasicBlock() { } void Schedule::PlanNode(BasicBlock* block, Node* node) { - if (FLAG_trace_turbo_scheduler) { + if (v8_flags.trace_turbo_scheduler) { StdoutStream{} << "Planning #" << node->id() << ":" << node->op()->mnemonic() << " for future add to id:" << block->id() << "\n"; @@ -208,7 +208,7 @@ void Schedule::PlanNode(BasicBlock* block, Node* node) { } void Schedule::AddNode(BasicBlock* block, Node* node) { - if (FLAG_trace_turbo_scheduler) { + if (v8_flags.trace_turbo_scheduler) { StdoutStream{} << "Adding #" << node->id() << ":" << node->op()->mnemonic() << " to id:" << block->id() << "\n"; } diff --git a/deps/v8/src/compiler/scheduler.cc b/deps/v8/src/compiler/scheduler.cc index e28c848e5f495d..4da855cf6e54f5 100644 --- a/deps/v8/src/compiler/scheduler.cc +++ b/deps/v8/src/compiler/scheduler.cc @@ -22,9 +22,9 @@ namespace v8 { namespace internal { namespace compiler { -#define TRACE(...) \ - do { \ - if (FLAG_trace_turbo_scheduler) PrintF(__VA_ARGS__); \ +#define TRACE(...) \ + do { \ + if (v8_flags.trace_turbo_scheduler) PrintF(__VA_ARGS__); \ } while (false) Scheduler::Scheduler(Zone* zone, Graph* graph, Schedule* schedule, Flags flags, @@ -195,7 +195,7 @@ void Scheduler::IncrementUnscheduledUseCount(Node* node, Node* from) { } ++(GetData(node)->unscheduled_count_); - if (FLAG_trace_turbo_scheduler) { + if (v8_flags.trace_turbo_scheduler) { TRACE(" Use count of #%d:%s (used by #%d:%s)++ = %d\n", node->id(), node->op()->mnemonic(), from->id(), from->op()->mnemonic(), GetData(node)->unscheduled_count_); @@ -215,7 +215,7 @@ void Scheduler::DecrementUnscheduledUseCount(Node* node, Node* from) { DCHECK_LT(0, GetData(node)->unscheduled_count_); --(GetData(node)->unscheduled_count_); - if (FLAG_trace_turbo_scheduler) { + if (v8_flags.trace_turbo_scheduler) { TRACE(" Use count of #%d:%s (used by #%d:%s)-- = %d\n", node->id(), node->op()->mnemonic(), from->id(), from->op()->mnemonic(), GetData(node)->unscheduled_count_); @@ -495,7 +495,7 @@ class CFGBuilder : public ZoneObject { break; } - if (FLAG_warn_about_builtin_profile_data && + if (v8_flags.warn_about_builtin_profile_data && hint_from_profile != BranchHint::kNone && BranchHintOf(branch->op()) != BranchHint::kNone && hint_from_profile != BranchHintOf(branch->op())) { @@ -704,7 +704,7 @@ class SpecialRPONumberer : public ZoneObject { // Print and verify the special reverse-post-order. void PrintAndVerifySpecialRPO() { #if DEBUG - if (FLAG_trace_turbo_scheduler) PrintRPO(); + if (v8_flags.trace_turbo_scheduler) PrintRPO(); VerifySpecialRPO(); #endif } @@ -1481,7 +1481,7 @@ void Scheduler::ScheduleEarly() { } TRACE("--- SCHEDULE EARLY -----------------------------------------\n"); - if (FLAG_trace_turbo_scheduler) { + if (v8_flags.trace_turbo_scheduler) { TRACE("roots: "); for (Node* node : schedule_root_nodes_) { TRACE("#%d:%s ", node->id(), node->op()->mnemonic()); @@ -1847,7 +1847,7 @@ class ScheduleLateNodeVisitor { void Scheduler::ScheduleLate() { TRACE("--- SCHEDULE LATE ------------------------------------------\n"); - if (FLAG_trace_turbo_scheduler) { + if (v8_flags.trace_turbo_scheduler) { TRACE("roots: "); for (Node* node : schedule_root_nodes_) { TRACE("#%d:%s ", node->id(), node->op()->mnemonic()); @@ -1891,7 +1891,7 @@ void Scheduler::SealFinalSchedule() { void Scheduler::FuseFloatingControl(BasicBlock* block, Node* node) { TRACE("--- FUSE FLOATING CONTROL ----------------------------------\n"); - if (FLAG_trace_turbo_scheduler) { + if (v8_flags.trace_turbo_scheduler) { StdoutStream{} << "Schedule before control flow fusion:\n" << *schedule_; } @@ -1919,7 +1919,7 @@ void Scheduler::FuseFloatingControl(BasicBlock* block, Node* node) { } } } - if (FLAG_trace_turbo_scheduler) { + if (v8_flags.trace_turbo_scheduler) { TRACE("propagation roots: "); for (Node* r : propagation_roots) { TRACE("#%d:%s ", r->id(), r->op()->mnemonic()); @@ -1934,7 +1934,7 @@ void Scheduler::FuseFloatingControl(BasicBlock* block, Node* node) { scheduled_nodes_.resize(schedule_->BasicBlockCount()); MovePlannedNodes(block, schedule_->block(node)); - if (FLAG_trace_turbo_scheduler) { + if (v8_flags.trace_turbo_scheduler) { StdoutStream{} << "Schedule after control flow fusion:\n" << *schedule_; } } diff --git a/deps/v8/src/compiler/simplified-lowering-verifier.cc b/deps/v8/src/compiler/simplified-lowering-verifier.cc index 5a376eed5ce68c..d39cd97accdc74 100644 --- a/deps/v8/src/compiler/simplified-lowering-verifier.cc +++ b/deps/v8/src/compiler/simplified-lowering-verifier.cc @@ -26,6 +26,10 @@ bool IsNonTruncatingMachineTypeFor(const MachineType& mt, const Type& type) { if (type.IsNone()) return true; // TODO(nicohartmann@): Add more cases here. if (type.Is(Type::BigInt())) { + if (mt.representation() == MachineRepresentation::kWord64) { + return type.Is(Type::SignedBigInt64()) || + type.Is(Type::UnsignedBigInt64()); + } return mt.representation() == MachineRepresentation::kTaggedPointer || mt.representation() == MachineRepresentation::kTagged; } @@ -279,6 +283,13 @@ void SimplifiedLoweringVerifier::VisitNode(Node* node, CheckAndSet(node, input_type, InputTruncation(node, 0)); break; } + case IrOpcode::kCheckBigInt64: { + Type input_type = InputType(node, 0); + input_type = + Type::Intersect(input_type, Type::SignedBigInt64(), graph_zone()); + CheckAndSet(node, input_type, InputTruncation(node, 0)); + break; + } case IrOpcode::kReturn: { const int return_value_count = ValueInputCountOfReturn(node->op()); for (int i = 0; i < return_value_count; ++i) { @@ -412,6 +423,7 @@ void SimplifiedLoweringVerifier::VisitNode(Node* node, CASE(CheckedUint32Div) CASE(CheckedUint32Mod) CASE(CheckedInt32Mul) + CASE(CheckedBigInt64Add) CASE(CheckedInt32ToTaggedSigned) CASE(CheckedInt64ToInt32) CASE(CheckedInt64ToTaggedSigned) @@ -420,6 +432,7 @@ void SimplifiedLoweringVerifier::VisitNode(Node* node, CASE(CheckedUint32ToTaggedSigned) CASE(CheckedUint64Bounds) CASE(CheckedUint64ToInt32) + CASE(CheckedUint64ToInt64) CASE(CheckedUint64ToTaggedSigned) CASE(CheckedFloat64ToInt64) CASE(CheckedTaggedSignedToInt32) @@ -475,6 +488,9 @@ void SimplifiedLoweringVerifier::VisitNode(Node* node, CASE(Int64Sub) CASE(Int64SubWithOverflow) CASE(Int64Mul) + CASE(Int64MulHigh) + CASE(Uint64MulHigh) + CASE(Int64MulWithOverflow) CASE(Int64Div) CASE(Int64Mod) CASE(Uint64Div) diff --git a/deps/v8/src/compiler/simplified-lowering.cc b/deps/v8/src/compiler/simplified-lowering.cc index a0e4583f2ea2fb..b734532777aa5c 100644 --- a/deps/v8/src/compiler/simplified-lowering.cc +++ b/deps/v8/src/compiler/simplified-lowering.cc @@ -39,9 +39,9 @@ namespace internal { namespace compiler { // Macro for outputting trace information from representation inference. -#define TRACE(...) \ - do { \ - if (FLAG_trace_representation) PrintF(__VA_ARGS__); \ +#define TRACE(...) \ + do { \ + if (v8_flags.trace_representation) PrintF(__VA_ARGS__); \ } while (false) const char* kSimplifiedLoweringReducerName = "SimplifiedLowering"; @@ -223,6 +223,11 @@ bool IsSomePositiveOrderedNumber(Type type) { return type.Is(Type::OrderedNumber()) && (type.IsNone() || type.Min() > 0); } +inline bool IsLargeBigInt(Type type) { + return type.Is(Type::BigInt()) && !type.Is(Type::SignedBigInt64()) && + !type.Is(Type::UnsignedBigInt64()); +} + class JSONGraphWriterWithVerifierTypes : public JSONGraphWriter { public: JSONGraphWriterWithVerifierTypes(std::ostream& os, const Graph* graph, @@ -515,7 +520,7 @@ class RepresentationSelector { if (!type.IsInvalid() && new_type.Is(type)) return false; GetInfo(node)->set_feedback_type(new_type); - if (FLAG_trace_representation) { + if (v8_flags.trace_representation) { PrintNodeFeedbackType(node); } return true; @@ -974,6 +979,13 @@ class RepresentationSelector { use.truncation().description()); if (input_type.IsInvalid()) { input_type = TypeOf(input); + } else { + // This case is reached when ConvertInput is called for TypeGuard nodes + // which explicitly set the {input_type} for their input. In order to + // correctly verify the resulting graph, we have to preserve this + // forced type for the verifier. + DCHECK_EQ(node->opcode(), IrOpcode::kTypeGuard); + input = InsertTypeOverrideForVerifier(input_type, input); } Node* n = changer_->GetRepresentationFor(input, input_rep, input_type, node, use); @@ -1307,6 +1319,14 @@ class RepresentationSelector { return MachineType::AnyTagged(); } if (rep == MachineRepresentation::kWord64) { + if (type.Is(Type::SignedBigInt64())) { + return MachineType::SignedBigInt64(); + } + + if (type.Is(Type::UnsignedBigInt64())) { + return MachineType::UnsignedBigInt64(); + } + if (type.Is(Type::BigInt())) { return MachineType::AnyTagged(); } @@ -1328,13 +1348,11 @@ class RepresentationSelector { void VisitStateValues(Node* node) { if (propagate<T>()) { for (int i = 0; i < node->InputCount(); i++) { - // When lowering 64 bit BigInts to Word64 representation, we have to - // make sure they are rematerialized before deoptimization. By - // propagating a AnyTagged use, the RepresentationChanger is going to - // insert the necessary conversions. - // TODO(nicohartmann): Remove, once the deoptimizer can rematerialize - // truncated BigInts. - if (TypeOf(node->InputAt(i)).Is(Type::BigInt())) { + // BigInt64s are rematerialized in deoptimization. The other BigInts + // must be rematerialized before deoptimization. By propagating an + // AnyTagged use, the RepresentationChanger is going to insert the + // necessary conversions. + if (IsLargeBigInt(TypeOf(node->InputAt(i)))) { EnqueueInput<T>(node, i, UseInfo::AnyTagged()); } else { EnqueueInput<T>(node, i, UseInfo::Any()); @@ -1346,9 +1364,7 @@ class RepresentationSelector { zone->New<ZoneVector<MachineType>>(node->InputCount(), zone); for (int i = 0; i < node->InputCount(); i++) { Node* input = node->InputAt(i); - // TODO(nicohartmann): Remove, once the deoptimizer can rematerialize - // truncated BigInts. - if (TypeOf(input).Is(Type::BigInt())) { + if (IsLargeBigInt(TypeOf(input))) { ConvertInput(node, i, UseInfo::AnyTagged()); } @@ -1377,9 +1393,7 @@ class RepresentationSelector { // state-values node). Node* accumulator = node.stack(); if (propagate<T>()) { - // TODO(nicohartmann): Remove, once the deoptimizer can rematerialize - // truncated BigInts. - if (TypeOf(accumulator).Is(Type::BigInt())) { + if (IsLargeBigInt(TypeOf(accumulator))) { EnqueueInput<T>(node, FrameState::kFrameStateStackInput, UseInfo::AnyTagged()); } else { @@ -1387,9 +1401,7 @@ class RepresentationSelector { UseInfo::Any()); } } else if (lower<T>()) { - // TODO(nicohartmann): Remove, once the deoptimizer can rematerialize - // truncated BigInts. - if (TypeOf(accumulator).Is(Type::BigInt())) { + if (IsLargeBigInt(TypeOf(accumulator))) { ConvertInput(node, FrameState::kFrameStateStackInput, UseInfo::AnyTagged()); } @@ -1424,9 +1436,7 @@ class RepresentationSelector { void VisitObjectState(Node* node) { if (propagate<T>()) { for (int i = 0; i < node->InputCount(); i++) { - // TODO(nicohartmann): Remove, once the deoptimizer can rematerialize - // truncated BigInts. - if (TypeOf(node->InputAt(i)).Is(Type::BigInt())) { + if (IsLargeBigInt(TypeOf(node->InputAt(i)))) { EnqueueInput<T>(node, i, UseInfo::AnyTagged()); } else { EnqueueInput<T>(node, i, UseInfo::Any()); @@ -1440,9 +1450,7 @@ class RepresentationSelector { Node* input = node->InputAt(i); (*types)[i] = DeoptMachineTypeOf(GetInfo(input)->representation(), TypeOf(input)); - // TODO(nicohartmann): Remove, once the deoptimizer can rematerialize - // truncated BigInts. - if (TypeOf(node->InputAt(i)).Is(Type::BigInt())) { + if (IsLargeBigInt(TypeOf(input))) { ConvertInput(node, i, UseInfo::AnyTagged()); } } @@ -1891,9 +1899,9 @@ class RepresentationSelector { FeedbackSource const& feedback) { switch (type.GetSequenceType()) { case CTypeInfo::SequenceType::kScalar: { - // TODO(mslekova): Add clamp. - if (uint8_t(type.GetFlags()) & - uint8_t(CTypeInfo::Flags::kEnforceRangeBit)) { + uint8_t flags = uint8_t(type.GetFlags()); + if (flags & uint8_t(CTypeInfo::Flags::kEnforceRangeBit) || + flags & uint8_t(CTypeInfo::Flags::kClampBit)) { return UseInfo::CheckedNumberAsFloat64(kIdentifyZeros, feedback); } switch (type.GetType()) { @@ -1981,7 +1989,7 @@ class RepresentationSelector { case wasm::kI32: return MachineType::Int32(); case wasm::kI64: - return MachineType::Int64(); + return MachineType::SignedBigInt64(); case wasm::kF32: return MachineType::Float32(); case wasm::kF64: @@ -2964,6 +2972,15 @@ class RepresentationSelector { } return; } + case IrOpcode::kCheckBigInt64: { + if (InputIs(node, Type::BigInt())) { + VisitNoop<T>(node, truncation); + } else { + VisitUnop<T>(node, UseInfo::AnyTagged(), + MachineRepresentation::kTaggedPointer); + } + return; + } case IrOpcode::kSpeculativeBigIntAsIntN: case IrOpcode::kSpeculativeBigIntAsUintN: { const bool is_asuintn = @@ -2979,11 +2996,14 @@ class RepresentationSelector { is_asuintn ? Type::UnsignedBigInt64() : Type::SignedBigInt64()); if (lower<T>()) { if (p.bits() == 0) { - DeferReplacement( - node, InsertTypeOverrideForVerifier(Type::UnsignedBigInt63(), - jsgraph_->ZeroConstant())); + DeferReplacement(node, InsertTypeOverrideForVerifier( + Type::UnsignedBigInt63(), + jsgraph_->Int64Constant(0))); } else if (p.bits() == 64) { - DeferReplacement(node, node->InputAt(0)); + DeferReplacement(node, InsertTypeOverrideForVerifier( + is_asuintn ? Type::UnsignedBigInt64() + : Type::SignedBigInt64(), + node->InputAt(0))); } else { if (is_asuintn) { const uint64_t mask = (1ULL << p.bits()) - 1ULL; @@ -3178,11 +3198,14 @@ class RepresentationSelector { return; } case IrOpcode::kSpeculativeBigIntAdd: { - // TODO(nicohartmann@, chromium:1073440): There should be special - // handling for trunction.IsUnused() that correctly propagates deadness, - // but preserves type checking which may throw exceptions. Until this - // is fully supported, we lower to int64 operations but keep pushing - // type constraints. + if (truncation.IsUnused()) { + Type left_type = GetUpperBound(node->InputAt(0)); + Type right_type = GetUpperBound(node->InputAt(1)); + if (left_type.Is(Type::BigInt()) && right_type.Is(Type::BigInt())) { + VisitUnused<T>(node); + return; + } + } if (truncation.IsUsedAsWord64()) { VisitBinop<T>( node, UseInfo::CheckedBigIntTruncatingWord64(FeedbackSource{}), @@ -3190,17 +3213,42 @@ class RepresentationSelector { if (lower<T>()) { ChangeToPureOp(node, lowering->machine()->Int64Add()); } - } else { - VisitBinop<T>(node, - UseInfo::CheckedBigIntAsTaggedPointer(FeedbackSource{}), - MachineRepresentation::kTaggedPointer); - if (lower<T>()) { - ChangeOp(node, lowering->simplified()->BigIntAdd()); + return; + } + BigIntOperationHint hint = BigIntOperationHintOf(node->op()); + switch (hint) { + case BigIntOperationHint::kBigInt64: { + VisitBinop<T>( + node, UseInfo::CheckedBigInt64AsWord64(FeedbackSource{}), + MachineRepresentation::kWord64, Type::SignedBigInt64()); + if (lower<T>()) { + ChangeOp(node, lowering->simplified()->CheckedBigInt64Add()); + } + break; + } + case BigIntOperationHint::kBigInt: { + VisitBinop<T>( + node, UseInfo::CheckedBigIntAsTaggedPointer(FeedbackSource{}), + MachineRepresentation::kTaggedPointer); + if (lower<T>()) { + ChangeOp(node, lowering->simplified()->BigIntAdd()); + } + break; } + default: + UNREACHABLE(); } return; } case IrOpcode::kSpeculativeBigIntSubtract: { + if (truncation.IsUnused()) { + Type left_type = GetUpperBound(node->InputAt(0)); + Type right_type = GetUpperBound(node->InputAt(1)); + if (left_type.Is(Type::BigInt()) && right_type.Is(Type::BigInt())) { + VisitUnused<T>(node); + return; + } + } if (truncation.IsUsedAsWord64()) { VisitBinop<T>( node, UseInfo::CheckedBigIntTruncatingWord64(FeedbackSource{}), @@ -3219,6 +3267,14 @@ class RepresentationSelector { return; } case IrOpcode::kSpeculativeBigIntMultiply: { + if (truncation.IsUnused()) { + Type left_type = GetUpperBound(node->InputAt(0)); + Type right_type = GetUpperBound(node->InputAt(1)); + if (left_type.Is(Type::BigInt()) && right_type.Is(Type::BigInt())) { + VisitUnused<T>(node); + return; + } + } if (truncation.IsUsedAsWord64()) { VisitBinop<T>( node, UseInfo::CheckedBigIntTruncatingWord64(FeedbackSource{}), @@ -3236,7 +3292,32 @@ class RepresentationSelector { } return; } + case IrOpcode::kSpeculativeBigIntDivide: { + if (truncation.IsUnused()) { + Type left_type = GetUpperBound(node->InputAt(0)); + Type right_type = GetUpperBound(node->InputAt(1)); + if (left_type.Is(Type::BigInt()) && right_type.Is(Type::BigInt())) { + VisitUnused<T>(node); + return; + } + } + VisitBinop<T>(node, + UseInfo::CheckedBigIntAsTaggedPointer(FeedbackSource{}), + MachineRepresentation::kTaggedPointer); + if (lower<T>()) { + ChangeOp(node, lowering->simplified()->BigIntDivide()); + } + return; + } case IrOpcode::kSpeculativeBigIntBitwiseAnd: { + if (truncation.IsUnused()) { + Type left_type = GetUpperBound(node->InputAt(0)); + Type right_type = GetUpperBound(node->InputAt(1)); + if (left_type.Is(Type::BigInt()) && right_type.Is(Type::BigInt())) { + VisitUnused<T>(node); + return; + } + } if (truncation.IsUsedAsWord64()) { VisitBinop<T>( node, UseInfo::CheckedBigIntTruncatingWord64(FeedbackSource{}), @@ -3254,17 +3335,10 @@ class RepresentationSelector { } return; } - case IrOpcode::kSpeculativeBigIntDivide: { - VisitBinop<T>(node, - UseInfo::CheckedBigIntAsTaggedPointer(FeedbackSource{}), - MachineRepresentation::kTaggedPointer); - if (lower<T>()) { - ChangeOp(node, lowering->simplified()->BigIntDivide()); - } - return; - } case IrOpcode::kSpeculativeBigIntNegate: { - if (truncation.IsUsedAsWord64()) { + if (truncation.IsUnused()) { + VisitUnused<T>(node); + } else if (truncation.IsUsedAsWord64()) { VisitUnop<T>(node, UseInfo::CheckedBigIntTruncatingWord64(FeedbackSource{}), MachineRepresentation::kWord64); @@ -4142,7 +4216,7 @@ class RepresentationSelector { if (inputType.CanBeAsserted()) { ChangeOp(node, simplified()->AssertType(inputType)); } else { - if (!FLAG_fuzzing) { + if (!v8_flags.fuzzing) { #ifdef DEBUG inputType.Print(); #endif @@ -4440,7 +4514,7 @@ SimplifiedLowering::SimplifiedLowering( void SimplifiedLowering::LowerAllNodes() { SimplifiedLoweringVerifier* verifier = nullptr; - if (FLAG_verify_simplified_lowering) { + if (v8_flags.verify_simplified_lowering) { verifier = zone_->New<SimplifiedLoweringVerifier>(zone_, graph()); } RepresentationChanger changer(jsgraph(), broker_, verifier); diff --git a/deps/v8/src/compiler/simplified-operator.cc b/deps/v8/src/compiler/simplified-operator.cc index 85e9bc470e70ab..5cc9a7e152fabc 100644 --- a/deps/v8/src/compiler/simplified-operator.cc +++ b/deps/v8/src/compiler/simplified-operator.cc @@ -235,20 +235,6 @@ CheckForMinusZeroMode CheckMinusZeroModeOf(const Operator* op) { return OpParameter<CheckForMinusZeroMode>(op); } -size_t hash_value(CheckForMinusZeroMode mode) { - return static_cast<size_t>(mode); -} - -std::ostream& operator<<(std::ostream& os, CheckForMinusZeroMode mode) { - switch (mode) { - case CheckForMinusZeroMode::kCheckForMinusZero: - return os << "check-for-minus-zero"; - case CheckForMinusZeroMode::kDontCheckForMinusZero: - return os << "dont-check-for-minus-zero"; - } - UNREACHABLE(); -} - std::ostream& operator<<(std::ostream& os, CheckMapsFlags flags) { if (flags & CheckMapsFlag::kTryMigrateInstance) { return os << "TryMigrateInstance"; @@ -505,6 +491,8 @@ std::ostream& operator<<(std::ostream& os, BigIntOperationHint hint) { switch (hint) { case BigIntOperationHint::kBigInt: return os << "BigInt"; + case BigIntOperationHint::kBigInt64: + return os << "BigInt64"; } UNREACHABLE(); } @@ -554,6 +542,12 @@ NumberOperationHint NumberOperationHintOf(const Operator* op) { return OpParameter<NumberOperationHint>(op); } +BigIntOperationHint BigIntOperationHintOf(const Operator* op) { + // TODO(panq): Expand the DCHECK when more BigInt operations are supported. + DCHECK(op->opcode() == IrOpcode::kSpeculativeBigIntAdd); + return OpParameter<BigIntOperationHint>(op); +} + bool operator==(NumberOperationParameters const& lhs, NumberOperationParameters const& rhs) { return lhs.hint() == rhs.hint() && lhs.feedback() == rhs.feedback(); @@ -837,23 +831,26 @@ bool operator==(CheckMinusZeroParameters const& lhs, V(CheckedInt32Mod, 2, 1) \ V(CheckedInt32Sub, 2, 1) \ V(CheckedUint32Div, 2, 1) \ - V(CheckedUint32Mod, 2, 1) - -#define CHECKED_WITH_FEEDBACK_OP_LIST(V) \ - V(CheckNumber, 1, 1) \ - V(CheckSmi, 1, 1) \ - V(CheckString, 1, 1) \ - V(CheckBigInt, 1, 1) \ - V(CheckedInt32ToTaggedSigned, 1, 1) \ - V(CheckedInt64ToInt32, 1, 1) \ - V(CheckedInt64ToTaggedSigned, 1, 1) \ - V(CheckedTaggedToArrayIndex, 1, 1) \ - V(CheckedTaggedSignedToInt32, 1, 1) \ - V(CheckedTaggedToTaggedPointer, 1, 1) \ - V(CheckedTaggedToTaggedSigned, 1, 1) \ - V(CheckedUint32ToInt32, 1, 1) \ - V(CheckedUint32ToTaggedSigned, 1, 1) \ - V(CheckedUint64ToInt32, 1, 1) \ + V(CheckedUint32Mod, 2, 1) \ + V(CheckedBigInt64Add, 2, 1) + +#define CHECKED_WITH_FEEDBACK_OP_LIST(V) \ + V(CheckNumber, 1, 1) \ + V(CheckSmi, 1, 1) \ + V(CheckString, 1, 1) \ + V(CheckBigInt, 1, 1) \ + V(CheckBigInt64, 1, 1) \ + V(CheckedInt32ToTaggedSigned, 1, 1) \ + V(CheckedInt64ToInt32, 1, 1) \ + V(CheckedInt64ToTaggedSigned, 1, 1) \ + V(CheckedTaggedToArrayIndex, 1, 1) \ + V(CheckedTaggedSignedToInt32, 1, 1) \ + V(CheckedTaggedToTaggedPointer, 1, 1) \ + V(CheckedTaggedToTaggedSigned, 1, 1) \ + V(CheckedUint32ToInt32, 1, 1) \ + V(CheckedUint32ToTaggedSigned, 1, 1) \ + V(CheckedUint64ToInt32, 1, 1) \ + V(CheckedUint64ToInt64, 1, 1) \ V(CheckedUint64ToTaggedSigned, 1, 1) #define CHECKED_BOUNDS_OP_LIST(V) \ diff --git a/deps/v8/src/compiler/simplified-operator.h b/deps/v8/src/compiler/simplified-operator.h index 6f20e466ea1aa1..8d082f01b03435 100644 --- a/deps/v8/src/compiler/simplified-operator.h +++ b/deps/v8/src/compiler/simplified-operator.h @@ -13,6 +13,7 @@ #include "src/common/globals.h" #include "src/compiler/common-operator.h" #include "src/compiler/feedback-source.h" +#include "src/compiler/globals.h" #include "src/compiler/node-properties.h" #include "src/compiler/operator.h" #include "src/compiler/types.h" @@ -87,6 +88,12 @@ struct FieldAccess { // initializing a newly // allocated object or part // of a map transition. + bool is_bounded_size_access = false; // Whether this field is stored as a + // bounded size field. In that case, + // the size is shifted to the left to + // guarantee that the value is at most + // kMaxSafeBufferSizeForSandbox after + // decoding. FieldAccess() : base_is_tagged(kTaggedBase), @@ -365,16 +372,6 @@ size_t hash_value(const CheckTaggedInputParameters& params); bool operator==(CheckTaggedInputParameters const&, CheckTaggedInputParameters const&); -enum class CheckForMinusZeroMode : uint8_t { - kCheckForMinusZero, - kDontCheckForMinusZero, -}; - -size_t hash_value(CheckForMinusZeroMode); - -V8_EXPORT_PRIVATE std::ostream& operator<<(std::ostream&, - CheckForMinusZeroMode); - CheckForMinusZeroMode CheckMinusZeroModeOf(const Operator*) V8_WARN_UNUSED_RESULT; @@ -533,6 +530,7 @@ enum class NumberOperationHint : uint8_t { enum class BigIntOperationHint : uint8_t { kBigInt, + kBigInt64, }; size_t hash_value(NumberOperationHint); @@ -542,6 +540,8 @@ V8_EXPORT_PRIVATE std::ostream& operator<<(std::ostream&, NumberOperationHint); V8_EXPORT_PRIVATE std::ostream& operator<<(std::ostream&, BigIntOperationHint); V8_EXPORT_PRIVATE NumberOperationHint NumberOperationHintOf(const Operator* op) V8_WARN_UNUSED_RESULT; +V8_EXPORT_PRIVATE BigIntOperationHint BigIntOperationHintOf(const Operator* op) + V8_WARN_UNUSED_RESULT; class NumberOperationParameters { public: @@ -917,6 +917,7 @@ class V8_EXPORT_PRIVATE SimplifiedOperatorBuilder final const Operator* CheckedInt32Mod(); const Operator* CheckedInt32Mul(CheckForMinusZeroMode); const Operator* CheckedInt32Sub(); + const Operator* CheckedBigInt64Add(); const Operator* CheckedInt32ToTaggedSigned(const FeedbackSource& feedback); const Operator* CheckedInt64ToInt32(const FeedbackSource& feedback); const Operator* CheckedInt64ToTaggedSigned(const FeedbackSource& feedback); @@ -931,6 +932,7 @@ class V8_EXPORT_PRIVATE SimplifiedOperatorBuilder final const Operator* CheckedTaggedToTaggedPointer(const FeedbackSource& feedback); const Operator* CheckedTaggedToTaggedSigned(const FeedbackSource& feedback); const Operator* CheckBigInt(const FeedbackSource& feedback); + const Operator* CheckBigInt64(const FeedbackSource& feedback); const Operator* CheckedTruncateTaggedToWord32(CheckTaggedInputMode, const FeedbackSource& feedback); const Operator* CheckedUint32Div(); @@ -938,6 +940,7 @@ class V8_EXPORT_PRIVATE SimplifiedOperatorBuilder final const Operator* CheckedUint32ToInt32(const FeedbackSource& feedback); const Operator* CheckedUint32ToTaggedSigned(const FeedbackSource& feedback); const Operator* CheckedUint64ToInt32(const FeedbackSource& feedback); + const Operator* CheckedUint64ToInt64(const FeedbackSource& feedback); const Operator* CheckedUint64ToTaggedSigned(const FeedbackSource& feedback); const Operator* ConvertReceiver(ConvertReceiverMode); diff --git a/deps/v8/src/compiler/store-store-elimination.cc b/deps/v8/src/compiler/store-store-elimination.cc index 75a5ea421e9b67..f6ef3d52429500 100644 --- a/deps/v8/src/compiler/store-store-elimination.cc +++ b/deps/v8/src/compiler/store-store-elimination.cc @@ -18,7 +18,7 @@ namespace compiler { #define TRACE(fmt, ...) \ do { \ - if (FLAG_trace_store_elimination) { \ + if (v8_flags.trace_store_elimination) { \ PrintF("RedundantStoreFinder: " fmt "\n", ##__VA_ARGS__); \ } \ } while (false) @@ -553,7 +553,7 @@ void StoreStoreElimination::Run(JSGraph* js_graph, TickCounter* tick_counter, // Remove superfluous nodes for (Node* node : finder.to_remove_const()) { - if (FLAG_trace_store_elimination) { + if (v8_flags.trace_store_elimination) { PrintF("StoreStoreElimination::Run: Eliminating node #%d:%s\n", node->id(), node->op()->mnemonic()); } diff --git a/deps/v8/src/compiler/turboshaft/assembler.h b/deps/v8/src/compiler/turboshaft/assembler.h index f7ed2415ee2188..f862d16c634597 100644 --- a/deps/v8/src/compiler/turboshaft/assembler.h +++ b/deps/v8/src/compiler/turboshaft/assembler.h @@ -11,16 +11,14 @@ #include <memory> #include <type_traits> -#include "src/base/iterator.h" #include "src/base/logging.h" #include "src/base/macros.h" #include "src/base/small-vector.h" #include "src/base/template-utils.h" -#include "src/codegen/machine-type.h" -#include "src/codegen/source-position.h" +#include "src/codegen/reloc-info.h" #include "src/compiler/turboshaft/graph.h" +#include "src/compiler/turboshaft/operation-matching.h" #include "src/compiler/turboshaft/operations.h" -#include "src/zone/zone-containers.h" namespace v8::internal::compiler::turboshaft { @@ -32,232 +30,376 @@ class AssemblerInterface : public Superclass { using Superclass::Superclass; using Base = Superclass; -#define DECL_MULTI_REP_BINOP(name, operation, kind) \ - OpIndex name(OpIndex left, OpIndex right, MachineRepresentation rep) { \ +#define DECL_MULTI_REP_BINOP(name, operation, rep_type, kind) \ + OpIndex name(OpIndex left, OpIndex right, rep_type rep) { \ return subclass().operation(left, right, operation##Op::Kind::k##kind, \ rep); \ } #define DECL_SINGLE_REP_BINOP(name, operation, kind, rep) \ OpIndex name(OpIndex left, OpIndex right) { \ return subclass().operation(left, right, operation##Op::Kind::k##kind, \ - MachineRepresentation::k##rep); \ - } -#define DECL_SINGLE_REP_BINOP_NO_KIND(name, operation, rep) \ - OpIndex name(OpIndex left, OpIndex right) { \ - return subclass().operation(left, right, MachineRepresentation::k##rep); \ - } - DECL_MULTI_REP_BINOP(WordAdd, WordBinop, Add) - DECL_SINGLE_REP_BINOP(Word32Add, WordBinop, Add, Word32) - DECL_SINGLE_REP_BINOP(Word64Add, WordBinop, Add, Word64) - - DECL_MULTI_REP_BINOP(WordMul, WordBinop, Mul) - DECL_SINGLE_REP_BINOP(Word32Mul, WordBinop, Mul, Word32) - DECL_SINGLE_REP_BINOP(Word64Mul, WordBinop, Mul, Word64) - - DECL_MULTI_REP_BINOP(WordBitwiseAnd, WordBinop, BitwiseAnd) - DECL_SINGLE_REP_BINOP(Word32BitwiseAnd, WordBinop, BitwiseAnd, Word32) - DECL_SINGLE_REP_BINOP(Word64BitwiseAnd, WordBinop, BitwiseAnd, Word64) - - DECL_MULTI_REP_BINOP(WordBitwiseOr, WordBinop, BitwiseOr) - DECL_SINGLE_REP_BINOP(Word32BitwiseOr, WordBinop, BitwiseOr, Word32) - DECL_SINGLE_REP_BINOP(Word64BitwiseOr, WordBinop, BitwiseOr, Word64) - - DECL_MULTI_REP_BINOP(WordBitwiseXor, WordBinop, BitwiseXor) - DECL_SINGLE_REP_BINOP(Word32BitwiseXor, WordBinop, BitwiseXor, Word32) - DECL_SINGLE_REP_BINOP(Word64BitwiseXor, WordBinop, BitwiseXor, Word64) - - DECL_MULTI_REP_BINOP(WordSub, WordBinop, Sub) - DECL_SINGLE_REP_BINOP(Word32Sub, WordBinop, Sub, Word32) - DECL_SINGLE_REP_BINOP(Word64Sub, WordBinop, Sub, Word64) - - DECL_MULTI_REP_BINOP(IntDiv, WordBinop, SignedDiv) - DECL_SINGLE_REP_BINOP(Int32Div, WordBinop, SignedDiv, Word32) - DECL_SINGLE_REP_BINOP(Int64Div, WordBinop, SignedDiv, Word64) - DECL_MULTI_REP_BINOP(UintDiv, WordBinop, UnsignedDiv) - DECL_SINGLE_REP_BINOP(Uint32Div, WordBinop, UnsignedDiv, Word32) - DECL_SINGLE_REP_BINOP(Uint64Div, WordBinop, UnsignedDiv, Word64) - DECL_MULTI_REP_BINOP(IntMod, WordBinop, SignedMod) - DECL_SINGLE_REP_BINOP(Int32Mod, WordBinop, SignedMod, Word32) - DECL_SINGLE_REP_BINOP(Int64Mod, WordBinop, SignedMod, Word64) - DECL_MULTI_REP_BINOP(UintMod, WordBinop, UnsignedMod) - DECL_SINGLE_REP_BINOP(Uint32Mod, WordBinop, UnsignedMod, Word32) - DECL_SINGLE_REP_BINOP(Uint64Mod, WordBinop, UnsignedMod, Word64) + rep); \ + } +#define DECL_SINGLE_REP_BINOP_NO_KIND(name, operation, rep) \ + OpIndex name(OpIndex left, OpIndex right) { \ + return subclass().operation(left, right, rep); \ + } + DECL_MULTI_REP_BINOP(WordAdd, WordBinop, WordRepresentation, Add) + DECL_SINGLE_REP_BINOP(Word32Add, WordBinop, Add, WordRepresentation::Word32()) + DECL_SINGLE_REP_BINOP(Word64Add, WordBinop, Add, WordRepresentation::Word64()) + + DECL_MULTI_REP_BINOP(WordMul, WordBinop, WordRepresentation, Mul) + DECL_SINGLE_REP_BINOP(Word32Mul, WordBinop, Mul, WordRepresentation::Word32()) + DECL_SINGLE_REP_BINOP(Word64Mul, WordBinop, Mul, WordRepresentation::Word64()) + + DECL_MULTI_REP_BINOP(WordBitwiseAnd, WordBinop, WordRepresentation, + BitwiseAnd) + DECL_SINGLE_REP_BINOP(Word32BitwiseAnd, WordBinop, BitwiseAnd, + WordRepresentation::Word32()) + DECL_SINGLE_REP_BINOP(Word64BitwiseAnd, WordBinop, BitwiseAnd, + WordRepresentation::Word64()) + + DECL_MULTI_REP_BINOP(WordBitwiseOr, WordBinop, WordRepresentation, BitwiseOr) + DECL_SINGLE_REP_BINOP(Word32BitwiseOr, WordBinop, BitwiseOr, + WordRepresentation::Word32()) + DECL_SINGLE_REP_BINOP(Word64BitwiseOr, WordBinop, BitwiseOr, + WordRepresentation::Word64()) + + DECL_MULTI_REP_BINOP(WordBitwiseXor, WordBinop, WordRepresentation, + BitwiseXor) + DECL_SINGLE_REP_BINOP(Word32BitwiseXor, WordBinop, BitwiseXor, + WordRepresentation::Word32()) + DECL_SINGLE_REP_BINOP(Word64BitwiseXor, WordBinop, BitwiseXor, + WordRepresentation::Word64()) + + DECL_MULTI_REP_BINOP(WordSub, WordBinop, WordRepresentation, Sub) + DECL_SINGLE_REP_BINOP(Word32Sub, WordBinop, Sub, WordRepresentation::Word32()) + DECL_SINGLE_REP_BINOP(Word64Sub, WordBinop, Sub, WordRepresentation::Word64()) + + DECL_MULTI_REP_BINOP(IntDiv, WordBinop, WordRepresentation, SignedDiv) + DECL_SINGLE_REP_BINOP(Int32Div, WordBinop, SignedDiv, + WordRepresentation::Word32()) + DECL_SINGLE_REP_BINOP(Int64Div, WordBinop, SignedDiv, + WordRepresentation::Word64()) + DECL_MULTI_REP_BINOP(UintDiv, WordBinop, WordRepresentation, UnsignedDiv) + DECL_SINGLE_REP_BINOP(Uint32Div, WordBinop, UnsignedDiv, + WordRepresentation::Word32()) + DECL_SINGLE_REP_BINOP(Uint64Div, WordBinop, UnsignedDiv, + WordRepresentation::Word64()) + DECL_MULTI_REP_BINOP(IntMod, WordBinop, WordRepresentation, SignedMod) + DECL_SINGLE_REP_BINOP(Int32Mod, WordBinop, SignedMod, + WordRepresentation::Word32()) + DECL_SINGLE_REP_BINOP(Int64Mod, WordBinop, SignedMod, + WordRepresentation::Word64()) + DECL_MULTI_REP_BINOP(UintMod, WordBinop, WordRepresentation, UnsignedMod) + DECL_SINGLE_REP_BINOP(Uint32Mod, WordBinop, UnsignedMod, + WordRepresentation::Word32()) + DECL_SINGLE_REP_BINOP(Uint64Mod, WordBinop, UnsignedMod, + WordRepresentation::Word64()) + DECL_MULTI_REP_BINOP(IntMulOverflownBits, WordBinop, WordRepresentation, + SignedMulOverflownBits) DECL_SINGLE_REP_BINOP(Int32MulOverflownBits, WordBinop, - SignedMulOverflownBits, Word32) + SignedMulOverflownBits, WordRepresentation::Word32()) + DECL_SINGLE_REP_BINOP(Int64MulOverflownBits, WordBinop, + SignedMulOverflownBits, WordRepresentation::Word64()) + DECL_MULTI_REP_BINOP(UintMulOverflownBits, WordBinop, WordRepresentation, + UnsignedMulOverflownBits) DECL_SINGLE_REP_BINOP(Uint32MulOverflownBits, WordBinop, - UnsignedMulOverflownBits, Word32) + UnsignedMulOverflownBits, WordRepresentation::Word32()) + DECL_SINGLE_REP_BINOP(Uint64MulOverflownBits, WordBinop, + UnsignedMulOverflownBits, WordRepresentation::Word64()) - DECL_MULTI_REP_BINOP(IntAddCheckOverflow, OverflowCheckedBinop, SignedAdd) + DECL_MULTI_REP_BINOP(IntAddCheckOverflow, OverflowCheckedBinop, + WordRepresentation, SignedAdd) DECL_SINGLE_REP_BINOP(Int32AddCheckOverflow, OverflowCheckedBinop, SignedAdd, - Word32) + WordRepresentation::Word32()) DECL_SINGLE_REP_BINOP(Int64AddCheckOverflow, OverflowCheckedBinop, SignedAdd, - Word64) - DECL_MULTI_REP_BINOP(IntSubCheckOverflow, OverflowCheckedBinop, SignedSub) + WordRepresentation::Word64()) + DECL_MULTI_REP_BINOP(IntSubCheckOverflow, OverflowCheckedBinop, + WordRepresentation, SignedSub) DECL_SINGLE_REP_BINOP(Int32SubCheckOverflow, OverflowCheckedBinop, SignedSub, - Word32) + WordRepresentation::Word32()) DECL_SINGLE_REP_BINOP(Int64SubCheckOverflow, OverflowCheckedBinop, SignedSub, - Word64) - DECL_MULTI_REP_BINOP(IntMulCheckOverflow, OverflowCheckedBinop, SignedMul) + WordRepresentation::Word64()) + DECL_MULTI_REP_BINOP(IntMulCheckOverflow, OverflowCheckedBinop, + WordRepresentation, SignedMul) DECL_SINGLE_REP_BINOP(Int32MulCheckOverflow, OverflowCheckedBinop, SignedMul, - Word32) + WordRepresentation::Word32()) DECL_SINGLE_REP_BINOP(Int64MulCheckOverflow, OverflowCheckedBinop, SignedMul, - Word64) - - DECL_MULTI_REP_BINOP(FloatAdd, FloatBinop, Add) - DECL_SINGLE_REP_BINOP(Float32Add, FloatBinop, Add, Float32) - DECL_SINGLE_REP_BINOP(Float64Add, FloatBinop, Add, Float64) - DECL_MULTI_REP_BINOP(FloatMul, FloatBinop, Mul) - DECL_SINGLE_REP_BINOP(Float32Mul, FloatBinop, Mul, Float32) - DECL_SINGLE_REP_BINOP(Float64Mul, FloatBinop, Mul, Float64) - DECL_MULTI_REP_BINOP(FloatSub, FloatBinop, Sub) - DECL_SINGLE_REP_BINOP(Float32Sub, FloatBinop, Sub, Float32) - DECL_SINGLE_REP_BINOP(Float64Sub, FloatBinop, Sub, Float64) - DECL_MULTI_REP_BINOP(FloatDiv, FloatBinop, Div) - DECL_SINGLE_REP_BINOP(Float32Div, FloatBinop, Div, Float32) - DECL_SINGLE_REP_BINOP(Float64Div, FloatBinop, Div, Float64) - DECL_MULTI_REP_BINOP(FloatMin, FloatBinop, Min) - DECL_SINGLE_REP_BINOP(Float32Min, FloatBinop, Min, Float32) - DECL_SINGLE_REP_BINOP(Float64Min, FloatBinop, Min, Float64) - DECL_MULTI_REP_BINOP(FloatMax, FloatBinop, Max) - DECL_SINGLE_REP_BINOP(Float32Max, FloatBinop, Max, Float32) - DECL_SINGLE_REP_BINOP(Float64Max, FloatBinop, Max, Float64) - DECL_SINGLE_REP_BINOP(Float64Mod, FloatBinop, Mod, Float64) - DECL_SINGLE_REP_BINOP(Float64Power, FloatBinop, Power, Float64) - DECL_SINGLE_REP_BINOP(Float64Atan2, FloatBinop, Atan2, Float64) + WordRepresentation::Word64()) + + DECL_MULTI_REP_BINOP(FloatAdd, FloatBinop, FloatRepresentation, Add) + DECL_SINGLE_REP_BINOP(Float32Add, FloatBinop, Add, + FloatRepresentation::Float32()) + DECL_SINGLE_REP_BINOP(Float64Add, FloatBinop, Add, + FloatRepresentation::Float64()) + DECL_MULTI_REP_BINOP(FloatMul, FloatBinop, FloatRepresentation, Mul) + DECL_SINGLE_REP_BINOP(Float32Mul, FloatBinop, Mul, + FloatRepresentation::Float32()) + DECL_SINGLE_REP_BINOP(Float64Mul, FloatBinop, Mul, + FloatRepresentation::Float64()) + DECL_MULTI_REP_BINOP(FloatSub, FloatBinop, FloatRepresentation, Sub) + DECL_SINGLE_REP_BINOP(Float32Sub, FloatBinop, Sub, + FloatRepresentation::Float32()) + DECL_SINGLE_REP_BINOP(Float64Sub, FloatBinop, Sub, + FloatRepresentation::Float64()) + DECL_MULTI_REP_BINOP(FloatDiv, FloatBinop, FloatRepresentation, Div) + DECL_SINGLE_REP_BINOP(Float32Div, FloatBinop, Div, + FloatRepresentation::Float32()) + DECL_SINGLE_REP_BINOP(Float64Div, FloatBinop, Div, + FloatRepresentation::Float64()) + DECL_MULTI_REP_BINOP(FloatMin, FloatBinop, FloatRepresentation, Min) + DECL_SINGLE_REP_BINOP(Float32Min, FloatBinop, Min, + FloatRepresentation::Float32()) + DECL_SINGLE_REP_BINOP(Float64Min, FloatBinop, Min, + FloatRepresentation::Float64()) + DECL_MULTI_REP_BINOP(FloatMax, FloatBinop, FloatRepresentation, Max) + DECL_SINGLE_REP_BINOP(Float32Max, FloatBinop, Max, + FloatRepresentation::Float32()) + DECL_SINGLE_REP_BINOP(Float64Max, FloatBinop, Max, + FloatRepresentation::Float64()) + DECL_SINGLE_REP_BINOP(Float64Mod, FloatBinop, Mod, + FloatRepresentation::Float64()) + DECL_SINGLE_REP_BINOP(Float64Power, FloatBinop, Power, + FloatRepresentation::Float64()) + DECL_SINGLE_REP_BINOP(Float64Atan2, FloatBinop, Atan2, + FloatRepresentation::Float64()) DECL_MULTI_REP_BINOP(ShiftRightArithmeticShiftOutZeros, Shift, - ShiftRightArithmeticShiftOutZeros) + WordRepresentation, ShiftRightArithmeticShiftOutZeros) DECL_SINGLE_REP_BINOP(Word32ShiftRightArithmeticShiftOutZeros, Shift, - ShiftRightArithmeticShiftOutZeros, Word32) + ShiftRightArithmeticShiftOutZeros, + WordRepresentation::Word32()) DECL_SINGLE_REP_BINOP(Word64ShiftRightArithmeticShiftOutZeros, Shift, - ShiftRightArithmeticShiftOutZeros, Word64) - DECL_MULTI_REP_BINOP(ShiftRightArithmetic, Shift, ShiftRightArithmetic) + ShiftRightArithmeticShiftOutZeros, + WordRepresentation::Word64()) + DECL_MULTI_REP_BINOP(ShiftRightArithmetic, Shift, WordRepresentation, + ShiftRightArithmetic) DECL_SINGLE_REP_BINOP(Word32ShiftRightArithmetic, Shift, ShiftRightArithmetic, - Word32) + WordRepresentation::Word32()) DECL_SINGLE_REP_BINOP(Word64ShiftRightArithmetic, Shift, ShiftRightArithmetic, - Word64) - DECL_MULTI_REP_BINOP(ShiftRightLogical, Shift, ShiftRightLogical) + WordRepresentation::Word64()) + DECL_MULTI_REP_BINOP(ShiftRightLogical, Shift, WordRepresentation, + ShiftRightLogical) DECL_SINGLE_REP_BINOP(Word32ShiftRightLogical, Shift, ShiftRightLogical, - Word32) + WordRepresentation::Word32()) DECL_SINGLE_REP_BINOP(Word64ShiftRightLogical, Shift, ShiftRightLogical, - Word64) - DECL_MULTI_REP_BINOP(ShiftLeft, Shift, ShiftLeft) - DECL_SINGLE_REP_BINOP(Word32ShiftLeft, Shift, ShiftLeft, Word32) - DECL_SINGLE_REP_BINOP(Word64ShiftLeft, Shift, ShiftLeft, Word64) - DECL_MULTI_REP_BINOP(RotateRight, Shift, RotateRight) - DECL_SINGLE_REP_BINOP(Word32RotateRight, Shift, RotateRight, Word32) - DECL_SINGLE_REP_BINOP(Word64RotateRight, Shift, RotateRight, Word64) - DECL_MULTI_REP_BINOP(RotateLeft, Shift, RotateLeft) - DECL_SINGLE_REP_BINOP(Word32RotateLeft, Shift, RotateLeft, Word32) - DECL_SINGLE_REP_BINOP(Word64RotateLeft, Shift, RotateLeft, Word64) + WordRepresentation::Word64()) + DECL_MULTI_REP_BINOP(ShiftLeft, Shift, WordRepresentation, ShiftLeft) + DECL_SINGLE_REP_BINOP(Word32ShiftLeft, Shift, ShiftLeft, + WordRepresentation::Word32()) + DECL_SINGLE_REP_BINOP(Word64ShiftLeft, Shift, ShiftLeft, + WordRepresentation::Word64()) + DECL_MULTI_REP_BINOP(RotateRight, Shift, WordRepresentation, RotateRight) + DECL_SINGLE_REP_BINOP(Word32RotateRight, Shift, RotateRight, + WordRepresentation::Word32()) + DECL_SINGLE_REP_BINOP(Word64RotateRight, Shift, RotateRight, + WordRepresentation::Word64()) + DECL_MULTI_REP_BINOP(RotateLeft, Shift, WordRepresentation, RotateLeft) + DECL_SINGLE_REP_BINOP(Word32RotateLeft, Shift, RotateLeft, + WordRepresentation::Word32()) + DECL_SINGLE_REP_BINOP(Word64RotateLeft, Shift, RotateLeft, + WordRepresentation::Word64()) OpIndex ShiftRightLogical(OpIndex left, uint32_t right, - MachineRepresentation rep) { + WordRepresentation rep) { DCHECK_GE(right, 0); - DCHECK_LT(right, ElementSizeInBits(rep)); - return ShiftRightLogical(left, Word32Constant(right), rep); + DCHECK_LT(right, rep.bit_width()); + return ShiftRightLogical(left, this->Word32Constant(right), rep); } OpIndex ShiftRightArithmetic(OpIndex left, uint32_t right, - MachineRepresentation rep) { + WordRepresentation rep) { DCHECK_GE(right, 0); - DCHECK_LT(right, ElementSizeInBits(rep)); - return ShiftRightArithmetic(left, Word32Constant(right), rep); - } - - DECL_SINGLE_REP_BINOP_NO_KIND(Word32Equal, Equal, Word32) - DECL_SINGLE_REP_BINOP_NO_KIND(Word64Equal, Equal, Word64) - DECL_SINGLE_REP_BINOP_NO_KIND(Float32Equal, Equal, Float32) - DECL_SINGLE_REP_BINOP_NO_KIND(Float64Equal, Equal, Float64) - - DECL_MULTI_REP_BINOP(IntLessThan, Comparison, SignedLessThan) - DECL_SINGLE_REP_BINOP(Int32LessThan, Comparison, SignedLessThan, Word32) - DECL_SINGLE_REP_BINOP(Int64LessThan, Comparison, SignedLessThan, Word64) - DECL_MULTI_REP_BINOP(UintLessThan, Comparison, UnsignedLessThan) - DECL_SINGLE_REP_BINOP(Uint32LessThan, Comparison, UnsignedLessThan, Word32) - DECL_SINGLE_REP_BINOP(Uint64LessThan, Comparison, UnsignedLessThan, Word64) - DECL_MULTI_REP_BINOP(FloatLessThan, Comparison, SignedLessThan) - DECL_SINGLE_REP_BINOP(Float32LessThan, Comparison, SignedLessThan, Float32) - DECL_SINGLE_REP_BINOP(Float64LessThan, Comparison, SignedLessThan, Float64) - - DECL_MULTI_REP_BINOP(IntLessThanOrEqual, Comparison, SignedLessThanOrEqual) + DCHECK_LT(right, rep.bit_width()); + return ShiftRightArithmetic(left, this->Word32Constant(right), rep); + } + + DECL_SINGLE_REP_BINOP_NO_KIND(Word32Equal, Equal, + WordRepresentation::Word32()) + DECL_SINGLE_REP_BINOP_NO_KIND(Word64Equal, Equal, + WordRepresentation::Word64()) + DECL_SINGLE_REP_BINOP_NO_KIND(Float32Equal, Equal, + FloatRepresentation::Float32()) + DECL_SINGLE_REP_BINOP_NO_KIND(Float64Equal, Equal, + FloatRepresentation::Float64()) + + DECL_MULTI_REP_BINOP(IntLessThan, Comparison, RegisterRepresentation, + SignedLessThan) + DECL_SINGLE_REP_BINOP(Int32LessThan, Comparison, SignedLessThan, + WordRepresentation::Word32()) + DECL_SINGLE_REP_BINOP(Int64LessThan, Comparison, SignedLessThan, + WordRepresentation::Word64()) + DECL_MULTI_REP_BINOP(UintLessThan, Comparison, RegisterRepresentation, + UnsignedLessThan) + DECL_SINGLE_REP_BINOP(Uint32LessThan, Comparison, UnsignedLessThan, + WordRepresentation::Word32()) + DECL_SINGLE_REP_BINOP(Uint64LessThan, Comparison, UnsignedLessThan, + WordRepresentation::Word64()) + DECL_MULTI_REP_BINOP(FloatLessThan, Comparison, RegisterRepresentation, + SignedLessThan) + DECL_SINGLE_REP_BINOP(Float32LessThan, Comparison, SignedLessThan, + FloatRepresentation::Float32()) + DECL_SINGLE_REP_BINOP(Float64LessThan, Comparison, SignedLessThan, + FloatRepresentation::Float64()) + + DECL_MULTI_REP_BINOP(IntLessThanOrEqual, Comparison, RegisterRepresentation, + SignedLessThanOrEqual) DECL_SINGLE_REP_BINOP(Int32LessThanOrEqual, Comparison, SignedLessThanOrEqual, - Word32) + WordRepresentation::Word32()) DECL_SINGLE_REP_BINOP(Int64LessThanOrEqual, Comparison, SignedLessThanOrEqual, - Word64) - DECL_MULTI_REP_BINOP(UintLessThanOrEqual, Comparison, UnsignedLessThanOrEqual) + WordRepresentation::Word64()) + DECL_MULTI_REP_BINOP(UintLessThanOrEqual, Comparison, RegisterRepresentation, + UnsignedLessThanOrEqual) DECL_SINGLE_REP_BINOP(Uint32LessThanOrEqual, Comparison, - UnsignedLessThanOrEqual, Word32) + UnsignedLessThanOrEqual, WordRepresentation::Word32()) DECL_SINGLE_REP_BINOP(Uint64LessThanOrEqual, Comparison, - UnsignedLessThanOrEqual, Word64) - DECL_MULTI_REP_BINOP(FloatLessThanOrEqual, Comparison, SignedLessThanOrEqual) + UnsignedLessThanOrEqual, WordRepresentation::Word64()) + DECL_MULTI_REP_BINOP(FloatLessThanOrEqual, Comparison, RegisterRepresentation, + SignedLessThanOrEqual) DECL_SINGLE_REP_BINOP(Float32LessThanOrEqual, Comparison, - SignedLessThanOrEqual, Float32) + SignedLessThanOrEqual, FloatRepresentation::Float32()) DECL_SINGLE_REP_BINOP(Float64LessThanOrEqual, Comparison, - SignedLessThanOrEqual, Float64) + SignedLessThanOrEqual, FloatRepresentation::Float64()) #undef DECL_SINGLE_REP_BINOP #undef DECL_MULTI_REP_BINOP #undef DECL_SINGLE_REP_BINOP_NO_KIND -#define DECL_MULTI_REP_UNARY(name, operation, kind) \ - OpIndex name(OpIndex input, MachineRepresentation rep) { \ +#define DECL_MULTI_REP_UNARY(name, operation, rep_type, kind) \ + OpIndex name(OpIndex input, rep_type rep) { \ return subclass().operation(input, operation##Op::Kind::k##kind, rep); \ } -#define DECL_SINGLE_REP_UNARY(name, operation, kind, rep) \ - OpIndex name(OpIndex input) { \ - return subclass().operation(input, operation##Op::Kind::k##kind, \ - MachineRepresentation::k##rep); \ - } - - DECL_MULTI_REP_UNARY(FloatAbs, FloatUnary, Abs) - DECL_SINGLE_REP_UNARY(Float32Abs, FloatUnary, Abs, Float32) - DECL_SINGLE_REP_UNARY(Float64Abs, FloatUnary, Abs, Float64) - DECL_MULTI_REP_UNARY(FloatNegate, FloatUnary, Negate) - DECL_SINGLE_REP_UNARY(Float32Negate, FloatUnary, Negate, Float32) - DECL_SINGLE_REP_UNARY(Float64Negate, FloatUnary, Negate, Float64) - DECL_SINGLE_REP_UNARY(Float64SilenceNaN, FloatUnary, SilenceNaN, Float64) - DECL_MULTI_REP_UNARY(FloatRoundDown, FloatUnary, RoundDown) - DECL_SINGLE_REP_UNARY(Float32RoundDown, FloatUnary, RoundDown, Float32) - DECL_SINGLE_REP_UNARY(Float64RoundDown, FloatUnary, RoundDown, Float64) - DECL_MULTI_REP_UNARY(FloatRoundUp, FloatUnary, RoundUp) - DECL_SINGLE_REP_UNARY(Float32RoundUp, FloatUnary, RoundUp, Float32) - DECL_SINGLE_REP_UNARY(Float64RoundUp, FloatUnary, RoundUp, Float64) - DECL_MULTI_REP_UNARY(FloatRoundToZero, FloatUnary, RoundToZero) - DECL_SINGLE_REP_UNARY(Float32RoundToZero, FloatUnary, RoundToZero, Float32) - DECL_SINGLE_REP_UNARY(Float64RoundToZero, FloatUnary, RoundToZero, Float64) - DECL_MULTI_REP_UNARY(FloatRoundTiesEven, FloatUnary, RoundTiesEven) +#define DECL_SINGLE_REP_UNARY(name, operation, kind, rep) \ + OpIndex name(OpIndex input) { \ + return subclass().operation(input, operation##Op::Kind::k##kind, rep); \ + } + + DECL_MULTI_REP_UNARY(FloatAbs, FloatUnary, FloatRepresentation, Abs) + DECL_SINGLE_REP_UNARY(Float32Abs, FloatUnary, Abs, + FloatRepresentation::Float32()) + DECL_SINGLE_REP_UNARY(Float64Abs, FloatUnary, Abs, + FloatRepresentation::Float64()) + DECL_MULTI_REP_UNARY(FloatNegate, FloatUnary, FloatRepresentation, Negate) + DECL_SINGLE_REP_UNARY(Float32Negate, FloatUnary, Negate, + FloatRepresentation::Float32()) + DECL_SINGLE_REP_UNARY(Float64Negate, FloatUnary, Negate, + FloatRepresentation::Float64()) + DECL_SINGLE_REP_UNARY(Float64SilenceNaN, FloatUnary, SilenceNaN, + FloatRepresentation::Float64()) + DECL_MULTI_REP_UNARY(FloatRoundDown, FloatUnary, FloatRepresentation, + RoundDown) + DECL_SINGLE_REP_UNARY(Float32RoundDown, FloatUnary, RoundDown, + FloatRepresentation::Float32()) + DECL_SINGLE_REP_UNARY(Float64RoundDown, FloatUnary, RoundDown, + FloatRepresentation::Float64()) + DECL_MULTI_REP_UNARY(FloatRoundUp, FloatUnary, FloatRepresentation, RoundUp) + DECL_SINGLE_REP_UNARY(Float32RoundUp, FloatUnary, RoundUp, + FloatRepresentation::Float32()) + DECL_SINGLE_REP_UNARY(Float64RoundUp, FloatUnary, RoundUp, + FloatRepresentation::Float64()) + DECL_MULTI_REP_UNARY(FloatRoundToZero, FloatUnary, FloatRepresentation, + RoundToZero) + DECL_SINGLE_REP_UNARY(Float32RoundToZero, FloatUnary, RoundToZero, + FloatRepresentation::Float32()) + DECL_SINGLE_REP_UNARY(Float64RoundToZero, FloatUnary, RoundToZero, + FloatRepresentation::Float64()) + DECL_MULTI_REP_UNARY(FloatRoundTiesEven, FloatUnary, FloatRepresentation, + RoundTiesEven) DECL_SINGLE_REP_UNARY(Float32RoundTiesEven, FloatUnary, RoundTiesEven, - Float32) + FloatRepresentation::Float32()) DECL_SINGLE_REP_UNARY(Float64RoundTiesEven, FloatUnary, RoundTiesEven, - Float64) - DECL_SINGLE_REP_UNARY(Float64Log, FloatUnary, Log, Float64) - DECL_MULTI_REP_UNARY(FloatSqrt, FloatUnary, Sqrt) - DECL_SINGLE_REP_UNARY(Float32Sqrt, FloatUnary, Sqrt, Float32) - DECL_SINGLE_REP_UNARY(Float64Sqrt, FloatUnary, Sqrt, Float64) - DECL_SINGLE_REP_UNARY(Float64Exp, FloatUnary, Exp, Float64) - DECL_SINGLE_REP_UNARY(Float64Expm1, FloatUnary, Expm1, Float64) - DECL_SINGLE_REP_UNARY(Float64Sin, FloatUnary, Sin, Float64) - DECL_SINGLE_REP_UNARY(Float64Cos, FloatUnary, Cos, Float64) - DECL_SINGLE_REP_UNARY(Float64Sinh, FloatUnary, Sinh, Float64) - DECL_SINGLE_REP_UNARY(Float64Cosh, FloatUnary, Cosh, Float64) - DECL_SINGLE_REP_UNARY(Float64Asin, FloatUnary, Asin, Float64) - DECL_SINGLE_REP_UNARY(Float64Acos, FloatUnary, Acos, Float64) - DECL_SINGLE_REP_UNARY(Float64Asinh, FloatUnary, Asinh, Float64) - DECL_SINGLE_REP_UNARY(Float64Acosh, FloatUnary, Acosh, Float64) - DECL_SINGLE_REP_UNARY(Float64Tan, FloatUnary, Tan, Float64) - DECL_SINGLE_REP_UNARY(Float64Tanh, FloatUnary, Tanh, Float64) - - DECL_MULTI_REP_UNARY(WordReverseBytes, WordUnary, ReverseBytes) - DECL_SINGLE_REP_UNARY(Word32ReverseBytes, WordUnary, ReverseBytes, Word32) - DECL_SINGLE_REP_UNARY(Word64ReverseBytes, WordUnary, ReverseBytes, Word64) - DECL_MULTI_REP_UNARY(WordCountLeadingZeros, WordUnary, CountLeadingZeros) + FloatRepresentation::Float64()) + DECL_SINGLE_REP_UNARY(Float64Log, FloatUnary, Log, + FloatRepresentation::Float64()) + DECL_MULTI_REP_UNARY(FloatSqrt, FloatUnary, FloatRepresentation, Sqrt) + DECL_SINGLE_REP_UNARY(Float32Sqrt, FloatUnary, Sqrt, + FloatRepresentation::Float32()) + DECL_SINGLE_REP_UNARY(Float64Sqrt, FloatUnary, Sqrt, + FloatRepresentation::Float64()) + DECL_SINGLE_REP_UNARY(Float64Exp, FloatUnary, Exp, + FloatRepresentation::Float64()) + DECL_SINGLE_REP_UNARY(Float64Expm1, FloatUnary, Expm1, + FloatRepresentation::Float64()) + DECL_SINGLE_REP_UNARY(Float64Sin, FloatUnary, Sin, + FloatRepresentation::Float64()) + DECL_SINGLE_REP_UNARY(Float64Cos, FloatUnary, Cos, + FloatRepresentation::Float64()) + DECL_SINGLE_REP_UNARY(Float64Sinh, FloatUnary, Sinh, + FloatRepresentation::Float64()) + DECL_SINGLE_REP_UNARY(Float64Cosh, FloatUnary, Cosh, + FloatRepresentation::Float64()) + DECL_SINGLE_REP_UNARY(Float64Asin, FloatUnary, Asin, + FloatRepresentation::Float64()) + DECL_SINGLE_REP_UNARY(Float64Acos, FloatUnary, Acos, + FloatRepresentation::Float64()) + DECL_SINGLE_REP_UNARY(Float64Asinh, FloatUnary, Asinh, + FloatRepresentation::Float64()) + DECL_SINGLE_REP_UNARY(Float64Acosh, FloatUnary, Acosh, + FloatRepresentation::Float64()) + DECL_SINGLE_REP_UNARY(Float64Tan, FloatUnary, Tan, + FloatRepresentation::Float64()) + DECL_SINGLE_REP_UNARY(Float64Tanh, FloatUnary, Tanh, + FloatRepresentation::Float64()) + DECL_SINGLE_REP_UNARY(Float64Log2, FloatUnary, Log2, + FloatRepresentation::Float64()) + DECL_SINGLE_REP_UNARY(Float64Log10, FloatUnary, Log10, + FloatRepresentation::Float64()) + DECL_SINGLE_REP_UNARY(Float64Log1p, FloatUnary, Log1p, + FloatRepresentation::Float64()) + DECL_SINGLE_REP_UNARY(Float64Atan, FloatUnary, Atan, + FloatRepresentation::Float64()) + DECL_SINGLE_REP_UNARY(Float64Atanh, FloatUnary, Atanh, + FloatRepresentation::Float64()) + DECL_SINGLE_REP_UNARY(Float64Cbrt, FloatUnary, Cbrt, + FloatRepresentation::Float64()) + + DECL_MULTI_REP_UNARY(WordReverseBytes, WordUnary, WordRepresentation, + ReverseBytes) + DECL_SINGLE_REP_UNARY(Word32ReverseBytes, WordUnary, ReverseBytes, + WordRepresentation::Word32()) + DECL_SINGLE_REP_UNARY(Word64ReverseBytes, WordUnary, ReverseBytes, + WordRepresentation::Word64()) + DECL_MULTI_REP_UNARY(WordCountLeadingZeros, WordUnary, WordRepresentation, + CountLeadingZeros) DECL_SINGLE_REP_UNARY(Word32CountLeadingZeros, WordUnary, CountLeadingZeros, - Word32) + WordRepresentation::Word32()) DECL_SINGLE_REP_UNARY(Word64CountLeadingZeros, WordUnary, CountLeadingZeros, - Word64) + WordRepresentation::Word64()) + DECL_MULTI_REP_UNARY(WordCountTrailingZeros, WordUnary, WordRepresentation, + CountTrailingZeros) + DECL_SINGLE_REP_UNARY(Word32CountTrailingZeros, WordUnary, CountTrailingZeros, + WordRepresentation::Word32()) + DECL_SINGLE_REP_UNARY(Word64CountTrailingZeros, WordUnary, CountTrailingZeros, + WordRepresentation::Word64()) + DECL_MULTI_REP_UNARY(WordPopCount, WordUnary, WordRepresentation, PopCount) + DECL_SINGLE_REP_UNARY(Word32PopCount, WordUnary, PopCount, + WordRepresentation::Word32()) + DECL_SINGLE_REP_UNARY(Word64PopCount, WordUnary, PopCount, + WordRepresentation::Word64()) + DECL_MULTI_REP_UNARY(WordSignExtend8, WordUnary, WordRepresentation, + SignExtend8) + DECL_SINGLE_REP_UNARY(Word32SignExtend8, WordUnary, SignExtend8, + WordRepresentation::Word32()) + DECL_SINGLE_REP_UNARY(Word64SignExtend8, WordUnary, SignExtend8, + WordRepresentation::Word64()) + DECL_MULTI_REP_UNARY(WordSignExtend16, WordUnary, WordRepresentation, + SignExtend16) + DECL_SINGLE_REP_UNARY(Word32SignExtend16, WordUnary, SignExtend16, + WordRepresentation::Word32()) + DECL_SINGLE_REP_UNARY(Word64SignExtend16, WordUnary, SignExtend16, + WordRepresentation::Word64()) #undef DECL_SINGLE_REP_UNARY #undef DECL_MULTI_REP_UNARY + OpIndex Word32Select(OpIndex condition, OpIndex left, OpIndex right) { + return subclass().Select(condition, left, right, + WordRepresentation::Word32()); + } + OpIndex Word64Select(OpIndex condition, OpIndex left, OpIndex right) { + return subclass().Select(condition, left, right, + WordRepresentation::Word64()); + } + OpIndex Word32Constant(uint32_t value) { return subclass().Constant(ConstantOp::Kind::kWord32, uint64_t{value}); } @@ -270,15 +412,12 @@ class AssemblerInterface : public Superclass { OpIndex Word64Constant(int64_t value) { return Word64Constant(static_cast<uint64_t>(value)); } - OpIndex WordConstant(uint64_t value, MachineRepresentation rep) { - switch (rep) { - case MachineRepresentation::kWord32: - DCHECK(value <= MaxUnsignedValue(MachineRepresentation::kWord32)); + OpIndex WordConstant(uint64_t value, WordRepresentation rep) { + switch (rep.value()) { + case WordRepresentation::Word32(): return Word32Constant(static_cast<uint32_t>(value)); - case MachineRepresentation::kWord64: + case WordRepresentation::Word64(): return Word64Constant(value); - default: - UNREACHABLE(); } } OpIndex Float32Constant(float value) { @@ -287,14 +426,12 @@ class AssemblerInterface : public Superclass { OpIndex Float64Constant(double value) { return subclass().Constant(ConstantOp::Kind::kFloat64, value); } - OpIndex FloatConstant(double value, MachineRepresentation rep) { - switch (rep) { - case MachineRepresentation::kFloat32: + OpIndex FloatConstant(double value, FloatRepresentation rep) { + switch (rep.value()) { + case FloatRepresentation::Float32(): return Float32Constant(static_cast<float>(value)); - case MachineRepresentation::kFloat64: + case FloatRepresentation::Float64(): return Float64Constant(value); - default: - UNREACHABLE(); } } OpIndex NumberConstant(double value) { @@ -313,38 +450,109 @@ class AssemblerInterface : public Superclass { OpIndex ExternalConstant(ExternalReference value) { return subclass().Constant(ConstantOp::Kind::kExternal, value); } + OpIndex RelocatableConstant(int64_t value, RelocInfo::Mode mode) { + DCHECK_EQ(mode, any_of(RelocInfo::WASM_CALL, RelocInfo::WASM_STUB_CALL)); + return subclass().Constant(mode == RelocInfo::WASM_CALL + ? ConstantOp::Kind::kRelocatableWasmCall + : ConstantOp::Kind::kRelocatableWasmStubCall, + static_cast<uint64_t>(value)); + } + +#define DECL_CHANGE(name, kind, assumption, from, to) \ + OpIndex name(OpIndex input) { \ + return subclass().Change( \ + input, ChangeOp::Kind::kind, ChangeOp::Assumption::assumption, \ + RegisterRepresentation::from(), RegisterRepresentation::to()); \ + } +#define DECL_TRY_CHANGE(name, kind, from, to) \ + OpIndex name(OpIndex input) { \ + return subclass().TryChange(input, TryChangeOp::Kind::kind, \ + FloatRepresentation::from(), \ + WordRepresentation::to()); \ + } + + DECL_CHANGE(BitcastWord32ToWord64, kBitcast, kNoAssumption, Word32, Word64) + DECL_CHANGE(BitcastFloat32ToWord32, kBitcast, kNoAssumption, Float32, Word32) + DECL_CHANGE(BitcastWord32ToFloat32, kBitcast, kNoAssumption, Word32, Float32) + DECL_CHANGE(BitcastFloat64ToWord64, kBitcast, kNoAssumption, Float64, Word64) + DECL_CHANGE(BitcastWord64ToFloat64, kBitcast, kNoAssumption, Word64, Float64) + DECL_CHANGE(ChangeUint32ToUint64, kZeroExtend, kNoAssumption, Word32, Word64) + DECL_CHANGE(ChangeInt32ToInt64, kSignExtend, kNoAssumption, Word32, Word64) + DECL_CHANGE(ChangeInt32ToFloat64, kSignedToFloat, kNoAssumption, Word32, + Float64) + DECL_CHANGE(ChangeInt64ToFloat64, kSignedToFloat, kNoAssumption, Word64, + Float64) + DECL_CHANGE(ChangeInt32ToFloat32, kSignedToFloat, kNoAssumption, Word32, + Float32) + DECL_CHANGE(ChangeInt64ToFloat32, kSignedToFloat, kNoAssumption, Word64, + Float32) + DECL_CHANGE(ChangeUint32ToFloat32, kUnsignedToFloat, kNoAssumption, Word32, + Float32) + DECL_CHANGE(ChangeUint64ToFloat32, kUnsignedToFloat, kNoAssumption, Word64, + Float32) + DECL_CHANGE(ReversibleInt64ToFloat64, kSignedToFloat, kReversible, Word64, + Float64) + DECL_CHANGE(ChangeUint64ToFloat64, kUnsignedToFloat, kNoAssumption, Word64, + Float64) + DECL_CHANGE(ReversibleUint64ToFloat64, kUnsignedToFloat, kReversible, Word64, + Float64) + DECL_CHANGE(ChangeUint32ToFloat64, kUnsignedToFloat, kNoAssumption, Word32, + Float64) + DECL_CHANGE(ChangeFloat64ToFloat32, kFloatConversion, kNoAssumption, Float64, + Float32) + DECL_CHANGE(ChangeFloat32ToFloat64, kFloatConversion, kNoAssumption, Float32, + Float64) + DECL_CHANGE(JSTruncateFloat64ToWord32, kJSFloatTruncate, kNoAssumption, + Float64, Word32) -#define DECL_CHANGE(name, kind, from, to) \ - OpIndex name(OpIndex input) { \ - return subclass().Change(input, ChangeOp::Kind::k##kind, \ - MachineRepresentation::k##from, \ - MachineRepresentation::k##to); \ - } - - DECL_CHANGE(BitcastWord32ToWord64, Bitcast, Word32, Word64) - DECL_CHANGE(BitcastFloat32ToWord32, Bitcast, Float32, Word32) - DECL_CHANGE(BitcastWord32ToFloat32, Bitcast, Word32, Float32) - DECL_CHANGE(BitcastFloat64ToWord64, Bitcast, Float64, Word64) - DECL_CHANGE(BitcastWord6464ToFloat64, Bitcast, Word64, Float64) - DECL_CHANGE(ChangeUint32ToUint64, ZeroExtend, Word32, Word64) - DECL_CHANGE(ChangeInt32ToInt64, SignExtend, Word32, Word64) - DECL_CHANGE(ChangeInt32ToFloat64, SignedToFloat, Word32, Float64) - DECL_CHANGE(ChangeInt64ToFloat64, SignedToFloat, Word64, Float64) - DECL_CHANGE(ChangeUint32ToFloat64, UnsignedToFloat, Word32, Float64) - DECL_CHANGE(ChangeFloat64ToFloat32, FloatConversion, Float64, Float32) - DECL_CHANGE(ChangeFloat32ToFloat64, FloatConversion, Float32, Float64) - DECL_CHANGE(JSTruncateFloat64ToWord32, JSFloatTruncate, Float64, Word32) - DECL_CHANGE(TruncateFloat64ToInt32OverflowUndefined, SignedFloatTruncate, +#define DECL_SIGNED_FLOAT_TRUNCATE(FloatBits, ResultBits) \ + DECL_CHANGE(TruncateFloat##FloatBits##ToInt##ResultBits##OverflowUndefined, \ + kSignedFloatTruncateOverflowToMin, kNoOverflow, \ + Float##FloatBits, Word##ResultBits) \ + DECL_CHANGE(TruncateFloat##FloatBits##ToInt##ResultBits##OverflowToMin, \ + kSignedFloatTruncateOverflowToMin, kNoAssumption, \ + Float##FloatBits, Word##ResultBits) \ + DECL_TRY_CHANGE(TryTruncateFloat##FloatBits##ToInt##ResultBits, \ + kSignedFloatTruncateOverflowUndefined, Float##FloatBits, \ + Word##ResultBits) + + DECL_SIGNED_FLOAT_TRUNCATE(64, 64) + DECL_SIGNED_FLOAT_TRUNCATE(64, 32) + DECL_SIGNED_FLOAT_TRUNCATE(32, 64) + DECL_SIGNED_FLOAT_TRUNCATE(32, 32) +#undef DECL_SIGNED_FLOAT_TRUNCATE + +#define DECL_UNSIGNED_FLOAT_TRUNCATE(FloatBits, ResultBits) \ + DECL_CHANGE(TruncateFloat##FloatBits##ToUint##ResultBits##OverflowUndefined, \ + kUnsignedFloatTruncateOverflowToMin, kNoOverflow, \ + Float##FloatBits, Word##ResultBits) \ + DECL_CHANGE(TruncateFloat##FloatBits##ToUint##ResultBits##OverflowToMin, \ + kUnsignedFloatTruncateOverflowToMin, kNoAssumption, \ + Float##FloatBits, Word##ResultBits) \ + DECL_TRY_CHANGE(TryTruncateFloat##FloatBits##ToUint##ResultBits, \ + kUnsignedFloatTruncateOverflowUndefined, Float##FloatBits, \ + Word##ResultBits) + + DECL_UNSIGNED_FLOAT_TRUNCATE(64, 64) + DECL_UNSIGNED_FLOAT_TRUNCATE(64, 32) + DECL_UNSIGNED_FLOAT_TRUNCATE(32, 64) + DECL_UNSIGNED_FLOAT_TRUNCATE(32, 32) +#undef DECL_UNSIGNED_FLOAT_TRUNCATE + + DECL_CHANGE(ReversibleFloat64ToInt32, kSignedFloatTruncateOverflowToMin, + kReversible, Float64, Word32) + DECL_CHANGE(ReversibleFloat64ToUint32, kUnsignedFloatTruncateOverflowToMin, + kReversible, Float64, Word32) + DECL_CHANGE(ReversibleFloat64ToInt64, kSignedFloatTruncateOverflowToMin, + kReversible, Float64, Word64) + DECL_CHANGE(ReversibleFloat64ToUint64, kUnsignedFloatTruncateOverflowToMin, + kReversible, Float64, Word64) + DECL_CHANGE(Float64ExtractLowWord32, kExtractLowHalf, kNoAssumption, Float64, + Word32) + DECL_CHANGE(Float64ExtractHighWord32, kExtractHighHalf, kNoAssumption, Float64, Word32) - DECL_CHANGE(TruncateFloat64ToInt32OverflowToMin, - SignedFloatTruncateOverflowToMin, Float64, Word32) - DECL_CHANGE(NarrowFloat64ToInt32, SignedNarrowing, Float64, Word32) - DECL_CHANGE(NarrowFloat64ToUint32, UnsignedNarrowing, Float64, Word32) - DECL_CHANGE(NarrowFloat64ToInt64, SignedNarrowing, Float64, Word64) - DECL_CHANGE(NarrowFloat64ToUint64, UnsignedNarrowing, Float64, Word64) - DECL_CHANGE(Float64ExtractLowWord32, ExtractLowHalf, Float64, Word32) - DECL_CHANGE(Float64ExtractHighWord32, ExtractHighHalf, Float64, Word32) #undef DECL_CHANGE +#undef DECL_TRY_CHANGE using Base::Tuple; OpIndex Tuple(OpIndex a, OpIndex b) { @@ -370,7 +578,8 @@ class AssemblerBase { }; class Assembler - : public AssemblerInterface<Assembler, AssemblerBase<Assembler>> { + : public AssemblerInterface<Assembler, AssemblerBase<Assembler>>, + public OperationMatching<Assembler> { public: Block* NewBlock(Block::Kind kind) { return graph_.NewBlock(kind); } @@ -388,7 +597,7 @@ class Assembler current_operation_origin_ = operation_origin; } - OpIndex Phi(base::Vector<const OpIndex> inputs, MachineRepresentation rep) { + OpIndex Phi(base::Vector<const OpIndex> inputs, RegisterRepresentation rep) { DCHECK(current_block()->IsMerge() && inputs.size() == current_block()->Predecessors().size()); return Base::Phi(inputs, rep); diff --git a/deps/v8/src/compiler/turboshaft/decompression-optimization.cc b/deps/v8/src/compiler/turboshaft/decompression-optimization.cc index 5b5ad9c6d8031f..68356b083a63c1 100644 --- a/deps/v8/src/compiler/turboshaft/decompression-optimization.cc +++ b/deps/v8/src/compiler/turboshaft/decompression-optimization.cc @@ -77,21 +77,22 @@ void DecompressionAnalyzer::ProcessOperation(const Operation& op) { case Opcode::kStore: { auto& store = op.Cast<StoreOp>(); MarkAsNeedsDecompression(store.base()); - if (!IsAnyTagged(store.stored_rep)) + if (!store.stored_rep.IsTagged()) { MarkAsNeedsDecompression(store.value()); + } break; } case Opcode::kIndexedStore: { auto& store = op.Cast<IndexedStoreOp>(); MarkAsNeedsDecompression(store.base()); MarkAsNeedsDecompression(store.index()); - if (!IsAnyTagged(store.stored_rep)) + if (!store.stored_rep.IsTagged()) { MarkAsNeedsDecompression(store.value()); + } break; } case Opcode::kFrameState: - // The deopt code knows how to handle Compressed inputs, both - // MachineRepresentation kCompressed values and CompressedHeapConstants. + // The deopt code knows how to handle compressed inputs. break; case Opcode::kPhi: { // Replicate the phi's state for its inputs. @@ -107,7 +108,7 @@ void DecompressionAnalyzer::ProcessOperation(const Operation& op) { } case Opcode::kEqual: { auto& equal = op.Cast<EqualOp>(); - if (equal.rep == MachineRepresentation::kWord64) { + if (equal.rep == WordRepresentation::Word64()) { MarkAsNeedsDecompression(equal.left()); MarkAsNeedsDecompression(equal.right()); } @@ -115,7 +116,7 @@ void DecompressionAnalyzer::ProcessOperation(const Operation& op) { } case Opcode::kComparison: { auto& comp = op.Cast<ComparisonOp>(); - if (comp.rep == MachineRepresentation::kWord64) { + if (comp.rep == WordRepresentation::Word64()) { MarkAsNeedsDecompression(comp.left()); MarkAsNeedsDecompression(comp.right()); } @@ -123,7 +124,7 @@ void DecompressionAnalyzer::ProcessOperation(const Operation& op) { } case Opcode::kWordBinop: { auto& binary_op = op.Cast<WordBinopOp>(); - if (binary_op.rep == MachineRepresentation::kWord64) { + if (binary_op.rep == WordRepresentation::Word64()) { MarkAsNeedsDecompression(binary_op.left()); MarkAsNeedsDecompression(binary_op.right()); } @@ -131,15 +132,14 @@ void DecompressionAnalyzer::ProcessOperation(const Operation& op) { } case Opcode::kShift: { auto& shift_op = op.Cast<ShiftOp>(); - if (shift_op.rep == MachineRepresentation::kWord64) { + if (shift_op.rep == WordRepresentation::Word64()) { MarkAsNeedsDecompression(shift_op.left()); } break; } case Opcode::kChange: { auto& change = op.Cast<ChangeOp>(); - if (change.to == MachineRepresentation::kWord64 && - NeedsDecompression(op)) { + if (change.to == WordRepresentation::Word64() && NeedsDecompression(op)) { MarkAsNeedsDecompression(change.input()); } break; @@ -187,28 +187,28 @@ void RunDecompressionOptimization(Graph& graph, Zone* phase_zone) { } case Opcode::kPhi: { auto& phi = op.Cast<PhiOp>(); - if (phi.rep == MachineRepresentation::kTagged) { - phi.rep = MachineRepresentation::kCompressed; - } else if (phi.rep == MachineRepresentation::kTaggedPointer) { - phi.rep = MachineRepresentation::kCompressedPointer; + if (phi.rep == RegisterRepresentation::Tagged()) { + phi.rep = RegisterRepresentation::Tagged(); } break; } case Opcode::kLoad: { auto& load = op.Cast<LoadOp>(); - if (load.loaded_rep == MachineType::AnyTagged()) { - load.loaded_rep = MachineType::AnyCompressed(); - } else if (load.loaded_rep == MachineType::TaggedPointer()) { - load.loaded_rep = MachineType::CompressedPointer(); + if (load.loaded_rep.IsTagged()) { + DCHECK_EQ(load.result_rep, + any_of(RegisterRepresentation::Tagged(), + RegisterRepresentation::Compressed())); + load.result_rep = RegisterRepresentation::Compressed(); } break; } case Opcode::kIndexedLoad: { auto& load = op.Cast<IndexedLoadOp>(); - if (load.loaded_rep == MachineType::AnyTagged()) { - load.loaded_rep = MachineType::AnyCompressed(); - } else if (load.loaded_rep == MachineType::TaggedPointer()) { - load.loaded_rep = MachineType::CompressedPointer(); + if (load.loaded_rep.IsTagged()) { + DCHECK_EQ(load.result_rep, + any_of(RegisterRepresentation::Tagged(), + RegisterRepresentation::Compressed())); + load.result_rep = RegisterRepresentation::Compressed(); } break; } diff --git a/deps/v8/src/compiler/turboshaft/deopt-data.h b/deps/v8/src/compiler/turboshaft/deopt-data.h index 216dbf83bbbcf5..def0bee47e1614 100644 --- a/deps/v8/src/compiler/turboshaft/deopt-data.h +++ b/deps/v8/src/compiler/turboshaft/deopt-data.h @@ -5,6 +5,7 @@ #ifndef V8_COMPILER_TURBOSHAFT_DEOPT_DATA_H_ #define V8_COMPILER_TURBOSHAFT_DEOPT_DATA_H_ +#include "src/base/small-vector.h" #include "src/common/globals.h" #include "src/compiler/turboshaft/operations.h" diff --git a/deps/v8/src/compiler/turboshaft/graph-builder.cc b/deps/v8/src/compiler/turboshaft/graph-builder.cc index 3ff92abdcd750a..7c5d2ba5d0c835 100644 --- a/deps/v8/src/compiler/turboshaft/graph-builder.cc +++ b/deps/v8/src/compiler/turboshaft/graph-builder.cc @@ -19,7 +19,6 @@ #include "src/compiler/machine-operator.h" #include "src/compiler/node-aux-data.h" #include "src/compiler/node-origin-table.h" -#include "src/compiler/node-properties.h" #include "src/compiler/opcodes.h" #include "src/compiler/operator.h" #include "src/compiler/schedule.h" @@ -202,6 +201,7 @@ base::Optional<BailoutReason> GraphBuilder::Run() { case BasicBlock::kReturn: case BasicBlock::kDeoptimize: case BasicBlock::kThrow: + case BasicBlock::kTailCall: break; case BasicBlock::kCall: { Node* call = block->control_input(); @@ -216,8 +216,6 @@ base::Optional<BailoutReason> GraphBuilder::Run() { op_mapping.Set(if_exception_node, catch_exception); break; } - case BasicBlock::kTailCall: - UNIMPLEMENTED(); case BasicBlock::kNone: UNREACHABLE(); } @@ -287,7 +285,9 @@ OpIndex GraphBuilder::Process( case IrOpcode::kPhi: { int input_count = op->ValueInputCount(); - MachineRepresentation rep = PhiRepresentationOf(op); + RegisterRepresentation rep = + RegisterRepresentation::FromMachineRepresentation( + PhiRepresentationOf(op)); if (assembler.current_block()->IsLoop()) { DCHECK_EQ(input_count, 2); return assembler.PendingLoopPhi(Map(node->InputAt(0)), rep, @@ -321,7 +321,10 @@ OpIndex GraphBuilder::Process( return assembler.CompressedHeapConstant(HeapConstantOf(op)); case IrOpcode::kExternalConstant: return assembler.ExternalConstant(OpParameter<ExternalReference>(op)); - + case IrOpcode::kRelocatableInt64Constant: + return assembler.RelocatableConstant( + OpParameter<RelocatablePtrConstantInfo>(op).value(), + OpParameter<RelocatablePtrConstantInfo>(op).rmode()); #define BINOP_CASE(opcode, assembler_op) \ case IrOpcode::k##opcode: \ return assembler.assembler_op(Map(node->InputAt(0)), Map(node->InputAt(1))); @@ -347,7 +350,9 @@ OpIndex GraphBuilder::Process( BINOP_CASE(Int64Mod, Int64Mod) BINOP_CASE(Uint64Mod, Uint64Mod) BINOP_CASE(Int32MulHigh, Int32MulOverflownBits) + BINOP_CASE(Int64MulHigh, Int64MulOverflownBits) BINOP_CASE(Uint32MulHigh, Uint32MulOverflownBits) + BINOP_CASE(Uint64MulHigh, Uint64MulOverflownBits) BINOP_CASE(Float32Add, Float32Add) BINOP_CASE(Float64Add, Float64Add) @@ -405,9 +410,9 @@ OpIndex GraphBuilder::Process( case IrOpcode::kWord64Sar: case IrOpcode::kWord32Sar: { - MachineRepresentation rep = opcode == IrOpcode::kWord64Sar - ? MachineRepresentation::kWord64 - : MachineRepresentation::kWord32; + WordRepresentation rep = opcode == IrOpcode::kWord64Sar + ? WordRepresentation::Word64() + : WordRepresentation::Word32(); ShiftOp::Kind kind; switch (ShiftKindOf(op)) { case ShiftKind::kShiftOutZeros: @@ -429,6 +434,14 @@ OpIndex GraphBuilder::Process( UNARY_CASE(Word64ReverseBytes, Word64ReverseBytes) UNARY_CASE(Word32Clz, Word32CountLeadingZeros) UNARY_CASE(Word64Clz, Word64CountLeadingZeros) + UNARY_CASE(Word32Ctz, Word32CountTrailingZeros) + UNARY_CASE(Word64Ctz, Word64CountTrailingZeros) + UNARY_CASE(Word32Popcnt, Word32PopCount) + UNARY_CASE(Word64Popcnt, Word64PopCount) + UNARY_CASE(SignExtendWord8ToInt32, Word32SignExtend8) + UNARY_CASE(SignExtendWord16ToInt32, Word32SignExtend16) + UNARY_CASE(SignExtendWord8ToInt64, Word64SignExtend8) + UNARY_CASE(SignExtendWord16ToInt64, Word64SignExtend16) UNARY_CASE(Float32Abs, Float32Abs) UNARY_CASE(Float64Abs, Float64Abs) @@ -458,53 +471,84 @@ OpIndex GraphBuilder::Process( UNARY_CASE(Float64Acosh, Float64Acosh) UNARY_CASE(Float64Tan, Float64Tan) UNARY_CASE(Float64Tanh, Float64Tanh) + UNARY_CASE(Float64Log2, Float64Log2) + UNARY_CASE(Float64Log10, Float64Log10) + UNARY_CASE(Float64Log1p, Float64Log1p) + UNARY_CASE(Float64Atan, Float64Atan) + UNARY_CASE(Float64Atanh, Float64Atanh) + UNARY_CASE(Float64Cbrt, Float64Cbrt) + + UNARY_CASE(BitcastWord32ToWord64, BitcastWord32ToWord64) + UNARY_CASE(BitcastFloat32ToInt32, BitcastFloat32ToWord32) + UNARY_CASE(BitcastInt32ToFloat32, BitcastWord32ToFloat32) + UNARY_CASE(BitcastFloat64ToInt64, BitcastFloat64ToWord64) + UNARY_CASE(BitcastInt64ToFloat64, BitcastWord64ToFloat64) + UNARY_CASE(ChangeUint32ToUint64, ChangeUint32ToUint64) + UNARY_CASE(ChangeInt32ToInt64, ChangeInt32ToInt64) + UNARY_CASE(SignExtendWord32ToInt64, ChangeInt32ToInt64) + + UNARY_CASE(ChangeFloat32ToFloat64, ChangeFloat32ToFloat64) + + UNARY_CASE(ChangeFloat64ToInt32, ReversibleFloat64ToInt32) + UNARY_CASE(ChangeFloat64ToInt64, ReversibleFloat64ToInt64) + UNARY_CASE(ChangeFloat64ToUint32, ReversibleFloat64ToUint32) + UNARY_CASE(ChangeFloat64ToUint64, ReversibleFloat64ToUint64) + + UNARY_CASE(ChangeInt32ToFloat64, ChangeInt32ToFloat64) + UNARY_CASE(ChangeInt64ToFloat64, ReversibleInt64ToFloat64) + UNARY_CASE(ChangeUint32ToFloat64, ChangeUint32ToFloat64) + + UNARY_CASE(RoundFloat64ToInt32, TruncateFloat64ToInt32OverflowUndefined) + UNARY_CASE(RoundInt32ToFloat32, ChangeInt32ToFloat32) + UNARY_CASE(RoundInt64ToFloat32, ChangeInt64ToFloat32) + UNARY_CASE(RoundInt64ToFloat64, ChangeInt64ToFloat64) + UNARY_CASE(RoundUint32ToFloat32, ChangeUint32ToFloat32) + UNARY_CASE(RoundUint64ToFloat32, ChangeUint64ToFloat32) + UNARY_CASE(RoundUint64ToFloat64, ChangeUint64ToFloat64) + UNARY_CASE(TruncateFloat64ToFloat32, ChangeFloat64ToFloat32) + UNARY_CASE(TruncateFloat64ToUint32, + TruncateFloat64ToUint32OverflowUndefined) + UNARY_CASE(TruncateFloat64ToWord32, JSTruncateFloat64ToWord32) + UNARY_CASE(TryTruncateFloat32ToInt64, TryTruncateFloat32ToInt64) + UNARY_CASE(TryTruncateFloat32ToUint64, TryTruncateFloat32ToUint64) + UNARY_CASE(TryTruncateFloat64ToInt32, TryTruncateFloat64ToInt32) + UNARY_CASE(TryTruncateFloat64ToInt64, TryTruncateFloat64ToInt64) + UNARY_CASE(TryTruncateFloat64ToUint32, TryTruncateFloat64ToUint32) + UNARY_CASE(TryTruncateFloat64ToUint64, TryTruncateFloat64ToUint64) + + UNARY_CASE(Float64ExtractLowWord32, Float64ExtractLowWord32) + UNARY_CASE(Float64ExtractHighWord32, Float64ExtractHighWord32) #undef UNARY_CASE - -#define CHANGE_CASE(opcode, kind, from, to) \ - case IrOpcode::k##opcode: \ - return assembler.Change(Map(node->InputAt(0)), ChangeOp::Kind::k##kind, \ - MachineRepresentation::k##from, \ - MachineRepresentation::k##to); - - CHANGE_CASE(BitcastWord32ToWord64, Bitcast, Word32, Word64) - CHANGE_CASE(BitcastFloat32ToInt32, Bitcast, Float32, Word32) - CHANGE_CASE(BitcastInt32ToFloat32, Bitcast, Word32, Float32) - CHANGE_CASE(BitcastFloat64ToInt64, Bitcast, Float64, Word64) - CHANGE_CASE(BitcastInt64ToFloat64, Bitcast, Word64, Float64) - CHANGE_CASE(ChangeUint32ToUint64, ZeroExtend, Word32, Word64) - CHANGE_CASE(ChangeInt32ToInt64, SignExtend, Word32, Word64) - CHANGE_CASE(ChangeInt32ToFloat64, SignedToFloat, Word32, Float64) - CHANGE_CASE(ChangeInt64ToFloat64, SignedToFloat, Word64, Float64) - CHANGE_CASE(ChangeUint32ToFloat64, UnsignedToFloat, Word32, Float64) - CHANGE_CASE(TruncateFloat64ToWord32, JSFloatTruncate, Float64, Word32) - CHANGE_CASE(TruncateFloat64ToFloat32, FloatConversion, Float64, Float32) - CHANGE_CASE(ChangeFloat32ToFloat64, FloatConversion, Float32, Float64) - CHANGE_CASE(RoundFloat64ToInt32, SignedFloatTruncate, Float64, Word32) - CHANGE_CASE(ChangeFloat64ToInt32, SignedNarrowing, Float64, Word32) - CHANGE_CASE(ChangeFloat64ToUint32, UnsignedNarrowing, Float64, Word32) - CHANGE_CASE(ChangeFloat64ToInt64, SignedNarrowing, Float64, Word64) - CHANGE_CASE(ChangeFloat64ToUint64, UnsignedNarrowing, Float64, Word64) - CHANGE_CASE(Float64ExtractLowWord32, ExtractLowHalf, Float64, Word32) - CHANGE_CASE(Float64ExtractHighWord32, ExtractHighHalf, Float64, Word32) -#undef CHANGE_CASE case IrOpcode::kTruncateInt64ToInt32: // 64- to 32-bit truncation is implicit in Turboshaft. return Map(node->InputAt(0)); - case IrOpcode::kTruncateFloat64ToInt64: { - ChangeOp::Kind kind; - switch (OpParameter<TruncateKind>(op)) { + case IrOpcode::kTruncateFloat32ToInt32: + switch (OpParameter<TruncateKind>(node->op())) { case TruncateKind::kArchitectureDefault: - kind = ChangeOp::Kind::kSignedFloatTruncate; - break; + return assembler.TruncateFloat32ToInt32OverflowUndefined( + Map(node->InputAt(0))); case TruncateKind::kSetOverflowToMin: - kind = ChangeOp::Kind::kSignedFloatTruncateOverflowToMin; - break; + return assembler.TruncateFloat32ToInt32OverflowToMin( + Map(node->InputAt(0))); + } + case IrOpcode::kTruncateFloat32ToUint32: + switch (OpParameter<TruncateKind>(node->op())) { + case TruncateKind::kArchitectureDefault: + return assembler.TruncateFloat32ToUint32OverflowUndefined( + Map(node->InputAt(0))); + case TruncateKind::kSetOverflowToMin: + return assembler.TruncateFloat32ToUint32OverflowToMin( + Map(node->InputAt(0))); + } + case IrOpcode::kTruncateFloat64ToInt64: + switch (OpParameter<TruncateKind>(node->op())) { + case TruncateKind::kArchitectureDefault: + return assembler.TruncateFloat64ToInt64OverflowUndefined( + Map(node->InputAt(0))); + case TruncateKind::kSetOverflowToMin: + return assembler.TruncateFloat64ToInt64OverflowToMin( + Map(node->InputAt(0))); } - return assembler.Change(Map(node->InputAt(0)), kind, - MachineRepresentation::kFloat64, - MachineRepresentation::kWord64); - } - case IrOpcode::kFloat64InsertLowWord32: return assembler.Float64InsertWord32( Map(node->InputAt(0)), Map(node->InputAt(1)), @@ -516,70 +560,101 @@ OpIndex GraphBuilder::Process( case IrOpcode::kBitcastTaggedToWord: return assembler.TaggedBitcast(Map(node->InputAt(0)), - MachineRepresentation::kTagged, - MachineType::PointerRepresentation()); + RegisterRepresentation::Tagged(), + RegisterRepresentation::PointerSized()); case IrOpcode::kBitcastWordToTagged: return assembler.TaggedBitcast(Map(node->InputAt(0)), - MachineType::PointerRepresentation(), - MachineRepresentation::kTagged); + RegisterRepresentation::PointerSized(), + RegisterRepresentation::Tagged()); + + case IrOpcode::kWord32Select: + return assembler.Word32Select( + Map(node->InputAt(0)), Map(node->InputAt(1)), Map(node->InputAt(2))); + case IrOpcode::kWord64Select: + return assembler.Word64Select( + Map(node->InputAt(0)), Map(node->InputAt(1)), Map(node->InputAt(2))); case IrOpcode::kLoad: + case IrOpcode::kLoadImmutable: case IrOpcode::kUnalignedLoad: { - MachineType loaded_rep = LoadRepresentationOf(op); + MemoryRepresentation loaded_rep = + MemoryRepresentation::FromMachineType(LoadRepresentationOf(op)); + RegisterRepresentation result_rep = loaded_rep.ToRegisterRepresentation(); Node* base = node->InputAt(0); Node* index = node->InputAt(1); - LoadOp::Kind kind = opcode == IrOpcode::kLoad - ? LoadOp::Kind::kRawAligned - : LoadOp::Kind::kRawUnaligned; + // It's ok to merge LoadImmutable into Load after scheduling. + LoadOp::Kind kind = opcode == IrOpcode::kUnalignedLoad + ? LoadOp::Kind::kRawUnaligned + : LoadOp::Kind::kRawAligned; if (index->opcode() == IrOpcode::kInt32Constant) { int32_t offset = OpParameter<int32_t>(index->op()); - return assembler.Load(Map(base), kind, loaded_rep, offset); + return assembler.Load(Map(base), kind, loaded_rep, result_rep, offset); } if (index->opcode() == IrOpcode::kInt64Constant) { int64_t offset = OpParameter<int64_t>(index->op()); if (base::IsValueInRangeForNumericType<int32_t>(offset)) { - return assembler.Load(Map(base), kind, loaded_rep, + return assembler.Load(Map(base), kind, loaded_rep, result_rep, static_cast<int32_t>(offset)); } } int32_t offset = 0; uint8_t element_size_log2 = 0; return assembler.IndexedLoad(Map(base), Map(index), kind, loaded_rep, - offset, element_size_log2); + result_rep, offset, element_size_log2); + } + case IrOpcode::kProtectedLoad: { + MemoryRepresentation loaded_rep = + MemoryRepresentation::FromMachineType(LoadRepresentationOf(op)); + RegisterRepresentation result_rep = loaded_rep.ToRegisterRepresentation(); + return assembler.ProtectedLoad( + Map(node->InputAt(0)), Map(node->InputAt(1)), loaded_rep, result_rep); } case IrOpcode::kStore: case IrOpcode::kUnalignedStore: { - bool aligned = opcode == IrOpcode::kStore; + bool aligned = opcode != IrOpcode::kUnalignedStore; StoreRepresentation store_rep = aligned ? StoreRepresentationOf(op) : StoreRepresentation(UnalignedStoreRepresentationOf(op), WriteBarrierKind::kNoWriteBarrier); - StoreOp::Kind kind = - aligned ? StoreOp::Kind::kRawAligned : StoreOp::Kind::kRawUnaligned; + StoreOp::Kind kind = opcode == IrOpcode::kStore + ? StoreOp::Kind::kRawAligned + : StoreOp::Kind::kRawUnaligned; + Node* base = node->InputAt(0); Node* index = node->InputAt(1); Node* value = node->InputAt(2); if (index->opcode() == IrOpcode::kInt32Constant) { int32_t offset = OpParameter<int32_t>(index->op()); return assembler.Store(Map(base), Map(value), kind, - store_rep.representation(), + MemoryRepresentation::FromMachineRepresentation( + store_rep.representation()), store_rep.write_barrier_kind(), offset); } if (index->opcode() == IrOpcode::kInt64Constant) { int64_t offset = OpParameter<int64_t>(index->op()); if (base::IsValueInRangeForNumericType<int32_t>(offset)) { return assembler.Store( - Map(base), Map(value), kind, store_rep.representation(), + Map(base), Map(value), kind, + MemoryRepresentation::FromMachineRepresentation( + store_rep.representation()), store_rep.write_barrier_kind(), static_cast<int32_t>(offset)); } } int32_t offset = 0; uint8_t element_size_log2 = 0; return assembler.IndexedStore( - Map(base), Map(index), Map(value), kind, store_rep.representation(), + Map(base), Map(index), Map(value), kind, + MemoryRepresentation::FromMachineRepresentation( + store_rep.representation()), store_rep.write_barrier_kind(), offset, element_size_log2); } + case IrOpcode::kProtectedStore: { + return assembler.ProtectedStore( + Map(node->InputAt(0)), Map(node->InputAt(1)), Map(node->InputAt(2)), + MemoryRepresentation::FromMachineRepresentation( + OpParameter<MachineRepresentation>(node->op()))); + } case IrOpcode::kRetain: return assembler.Retain(Map(node->InputAt(0))); @@ -639,6 +714,21 @@ OpIndex GraphBuilder::Process( return call; } + case IrOpcode::kTailCall: { + auto call_descriptor = CallDescriptorOf(op); + base::SmallVector<OpIndex, 16> arguments; + // The input `0` is the callee, the following value inputs are the + // arguments. `CallDescriptor::InputCount()` counts the callee and + // arguments. + OpIndex callee = Map(node->InputAt(0)); + for (int i = 1; i < static_cast<int>(call_descriptor->InputCount()); + ++i) { + arguments.emplace_back(Map(node->InputAt(i))); + } + return assembler.TailCall(callee, base::VectorOf(arguments), + call_descriptor); + } + case IrOpcode::kFrameState: { FrameState frame_state{node}; FrameStateData::Builder builder; @@ -658,6 +748,13 @@ OpIndex GraphBuilder::Process( &DeoptimizeParametersOf(op)); } + case IrOpcode::kTrapIf: + case IrOpcode::kTrapUnless: { + OpIndex condition = Map(node->InputAt(0)); + bool negated = op->opcode() == IrOpcode::kTrapUnless; + return assembler.TrapIf(condition, negated, TrapIdOf(op)); + } + case IrOpcode::kDeoptimize: { OpIndex frame_state = Map(node->InputAt(0)); return assembler.Deoptimize(frame_state, &DeoptimizeParametersOf(op)); @@ -687,8 +784,8 @@ OpIndex GraphBuilder::Process( } default: - std::cout << "unsupported node type: " << *node->op() << "\n"; - node->Print(); + std::cerr << "unsupported node type: " << *node->op() << "\n"; + node->Print(std::cerr); UNIMPLEMENTED(); } } diff --git a/deps/v8/src/compiler/turboshaft/graph.cc b/deps/v8/src/compiler/turboshaft/graph.cc index bf03a6ba2453e4..c1744dd68f09af 100644 --- a/deps/v8/src/compiler/turboshaft/graph.cc +++ b/deps/v8/src/compiler/turboshaft/graph.cc @@ -4,102 +4,12 @@ #include "src/compiler/turboshaft/graph.h" +#include <algorithm> #include <iomanip> -namespace v8::internal::compiler::turboshaft { - -void Graph::GenerateDominatorTree() { - for (Block* block : bound_blocks_) { - if (block->index() == StartBlock().index()) { - // Start block has no dominators. We create a jmp_ edge to itself, so that - // the SetDominator algorithm does not need a special case for when the - // start block is reached. - block->jmp_ = block; - block->nxt_ = nullptr; - block->len_ = 0; - block->jmp_len_ = 0; - continue; - } - if (block->kind_ == Block::Kind::kBranchTarget) { - // kBranchTarget blocks always have a single predecessor, which dominates - // them. - DCHECK_EQ(block->PredecessorCount(), 1); - block->SetDominator(block->LastPredecessor()); - } else if (block->kind_ == Block::Kind::kLoopHeader) { - // kLoopHeader blocks have 2 predecessors, but their dominator is - // always their first predecessor (the 2nd one is the loop's backedge). - DCHECK_EQ(block->PredecessorCount(), 2); - block->SetDominator(block->LastPredecessor()->NeighboringPredecessor()); - } else { - // kMerge has (more or less) an arbitrary number of predecessors. We need - // to find the lowest common ancestor (LCA) of all of the predecessors. - DCHECK_EQ(block->kind_, Block::Kind::kMerge); - Block* dominator = block->LastPredecessor(); - for (Block* pred = dominator->NeighboringPredecessor(); pred != nullptr; - pred = pred->NeighboringPredecessor()) { - dominator = dominator->GetCommonDominator(pred); - } - block->SetDominator(dominator); - } - } -} - -template <class Derived> -void RandomAccessStackDominatorNode<Derived>::SetDominator(Derived* dominator) { - DCHECK_NOT_NULL(dominator); - // Determining the jmp pointer - Derived* t = dominator->jmp_; - if (dominator->len_ - t->len_ == t->len_ - t->jmp_len_) { - t = t->jmp_; - } else { - t = dominator; - } - // Initializing fields - nxt_ = dominator; - jmp_ = t; - len_ = dominator->len_ + 1; - jmp_len_ = jmp_->len_; - dominator->AddChild(static_cast<Derived*>(this)); -} +#include "src/base/logging.h" -template <class Derived> -Derived* RandomAccessStackDominatorNode<Derived>::GetCommonDominator( - RandomAccessStackDominatorNode<Derived>* b) { - RandomAccessStackDominatorNode* a = this; - if (b->len_ > a->len_) { - // Swapping |a| and |b| so that |a| always has a greater length. - std::swap(a, b); - } - DCHECK_GE(a->len_, 0); - DCHECK_GE(b->len_, 0); - - // Going up the dominators of |a| in order to reach the level of |b|. - while (a->len_ != b->len_) { - DCHECK_GE(a->len_, 0); - if (a->jmp_len_ >= b->len_) { - a = a->jmp_; - } else { - a = a->nxt_; - } - } - - // Going up the dominators of |a| and |b| simultaneously until |a| == |b| - while (a != b) { - DCHECK_EQ(a->len_, b->len_); - DCHECK_GE(a->len_, 0); - if (a->jmp_ == b->jmp_) { - // We found a common dominator, but we actually want to find the smallest - // one, so we go down in the current subtree. - a = a->nxt_; - b = b->nxt_; - } else { - a = a->jmp_; - b = b->jmp_; - } - } - - return static_cast<Derived*>(a); -} +namespace v8::internal::compiler::turboshaft { // PrintDominatorTree prints the dominator tree in a format that looks like: // diff --git a/deps/v8/src/compiler/turboshaft/graph.h b/deps/v8/src/compiler/turboshaft/graph.h index ee2c30df9bf977..0685a4a0a61432 100644 --- a/deps/v8/src/compiler/turboshaft/graph.h +++ b/deps/v8/src/compiler/turboshaft/graph.h @@ -183,6 +183,11 @@ class OperationBuffer { uint16_t* operation_sizes_; }; +template <class Derived> +class DominatorForwardTreeNode; +template <class Derived> +class RandomAccessStackDominatorNode; + template <class Derived> class DominatorForwardTreeNode { // A class storing a forward representation of the dominator tree, since the @@ -210,8 +215,9 @@ class DominatorForwardTreeNode { } private: - friend class Block; - +#ifdef DEBUG + friend class RandomAccessStackDominatorNode<Derived>; +#endif Derived* neighboring_child_ = nullptr; Derived* last_child_ = nullptr; }; @@ -226,16 +232,26 @@ class RandomAccessStackDominatorNode // the height of the dominator tree. public: void SetDominator(Derived* dominator); + void SetAsDominatorRoot(); Derived* GetDominator() { return nxt_; } // Returns the lowest common dominator of {this} and {other}. - Derived* GetCommonDominator(RandomAccessStackDominatorNode<Derived>* other); + Derived* GetCommonDominator( + RandomAccessStackDominatorNode<Derived>* other) const; + + bool IsDominatedBy(const Derived* other) const { + // TODO(dmercadier): we don't have to call GetCommonDominator and could + // determine quicker that {this} isn't dominated by {other}. + return GetCommonDominator(other) == other; + } int Depth() const { return len_; } private: - friend class Graph; friend class DominatorForwardTreeNode<Derived>; +#ifdef DEBUG + friend class Block; +#endif int len_ = 0; Derived* nxt_ = nullptr; @@ -254,9 +270,11 @@ class Block : public RandomAccessStackDominatorNode<Block> { bool IsLoopOrMerge() const { return IsLoop() || IsMerge(); } bool IsLoop() const { return kind_ == Kind::kLoopHeader; } bool IsMerge() const { return kind_ == Kind::kMerge; } + bool IsBranchTarget() const { return kind_ == Kind::kBranchTarget; } bool IsHandler() const { return false; } bool IsSwitchCase() const { return false; } Kind kind() const { return kind_; } + void SetKind(Kind kind) { kind_ = kind; } BlockIndex index() const { return index_; } @@ -287,8 +305,7 @@ class Block : public RandomAccessStackDominatorNode<Block> { return result; } -#ifdef DEBUG - int PredecessorCount() { + int PredecessorCount() const { int count = 0; for (Block* pred = last_predecessor_; pred != nullptr; pred = pred->neighboring_predecessor_) { @@ -296,7 +313,6 @@ class Block : public RandomAccessStackDominatorNode<Block> { } return count; } -#endif Block* LastPredecessor() const { return last_predecessor_; } Block* NeighboringPredecessor() const { return neighboring_predecessor_; } @@ -320,6 +336,10 @@ class Block : public RandomAccessStackDominatorNode<Block> { return end_; } + // Computes the dominators of the this block, assuming that the dominators of + // its predecessors are already computed. + void ComputeDominator(); + void PrintDominatorTree( std::vector<const char*> tree_symbols = std::vector<const char*>(), bool has_next = false) const; @@ -363,8 +383,6 @@ class Graph { next_block_ = 0; } - void GenerateDominatorTree(); - const Operation& Get(OpIndex i) const { // `Operation` contains const fields and can be overwritten with placement // new. Therefore, std::launder is necessary to avoid undefined behavior. @@ -475,6 +493,7 @@ class Graph { DCHECK_EQ(block->index_, BlockIndex::Invalid()); block->index_ = BlockIndex(static_cast<uint32_t>(bound_blocks_.size())); bound_blocks_.push_back(block); + block->ComputeDominator(); return true; } @@ -483,6 +502,19 @@ class Graph { block->end_ = next_operation_index(); } + void TurnLoopIntoMerge(Block* loop) { + DCHECK(loop->IsLoop()); + DCHECK_EQ(loop->PredecessorCount(), 1); + loop->kind_ = Block::Kind::kMerge; + for (Operation& op : operations(*loop)) { + if (auto* pending_phi = op.TryCast<PendingLoopPhiOp>()) { + Replace<PhiOp>(Index(*pending_phi), + base::VectorOf({pending_phi->first()}), + pending_phi->rep); + } + } + } + OpIndex next_operation_index() const { return operations_.EndIndex(); } Zone* graph_zone() const { return graph_zone_; } @@ -717,6 +749,106 @@ std::ostream& operator<<(std::ostream& os, PrintAsBlockHeader block); std::ostream& operator<<(std::ostream& os, const Graph& graph); std::ostream& operator<<(std::ostream& os, const Block::Kind& kind); +inline void Block::ComputeDominator() { + if (V8_UNLIKELY(LastPredecessor() == nullptr)) { + // If the block has no predecessors, then it's the start block. We create a + // jmp_ edge to itself, so that the SetDominator algorithm does not need a + // special case for when the start block is reached. + SetAsDominatorRoot(); + } else { + // If the block has one or more predecessors, the dominator is the lowest + // common ancestor (LCA) of all of the predecessors. + + // Note that for BranchTarget, there is a single predecessor. This doesn't + // change the logic: the loop won't be entered, and the first (and only) + // predecessor is set as the dominator. + // Similarly, since we compute dominators on the fly, when we reach a + // kLoopHeader, we haven't visited its body yet, and it should only have one + // predecessor (the backedge is not here yet), which is its dominator. + DCHECK_IMPLIES(kind_ == Block::Kind::kLoopHeader, PredecessorCount() == 1); + + Block* dominator = LastPredecessor(); + for (Block* pred = dominator->NeighboringPredecessor(); pred != nullptr; + pred = pred->NeighboringPredecessor()) { + dominator = dominator->GetCommonDominator(pred); + } + SetDominator(dominator); + } + DCHECK_NE(jmp_, nullptr); + DCHECK_IMPLIES(nxt_ == nullptr, LastPredecessor() == nullptr); + DCHECK_IMPLIES(len_ == 0, LastPredecessor() == nullptr); +} + +template <class Derived> +inline void RandomAccessStackDominatorNode<Derived>::SetAsDominatorRoot() { + jmp_ = static_cast<Derived*>(this); + nxt_ = nullptr; + len_ = 0; + jmp_len_ = 0; +} + +template <class Derived> +inline void RandomAccessStackDominatorNode<Derived>::SetDominator( + Derived* dominator) { + DCHECK_NOT_NULL(dominator); + DCHECK_NULL(static_cast<Block*>(this)->neighboring_child_); + DCHECK_NULL(static_cast<Block*>(this)->last_child_); + // Determining the jmp pointer + Derived* t = dominator->jmp_; + if (dominator->len_ - t->len_ == t->len_ - t->jmp_len_) { + t = t->jmp_; + } else { + t = dominator; + } + // Initializing fields + nxt_ = dominator; + jmp_ = t; + len_ = dominator->len_ + 1; + jmp_len_ = jmp_->len_; + dominator->AddChild(static_cast<Derived*>(this)); +} + +template <class Derived> +inline Derived* RandomAccessStackDominatorNode<Derived>::GetCommonDominator( + RandomAccessStackDominatorNode<Derived>* other) const { + const RandomAccessStackDominatorNode* a = this; + const RandomAccessStackDominatorNode* b = other; + if (b->len_ > a->len_) { + // Swapping |a| and |b| so that |a| always has a greater length. + std::swap(a, b); + } + DCHECK_GE(a->len_, 0); + DCHECK_GE(b->len_, 0); + + // Going up the dominators of |a| in order to reach the level of |b|. + while (a->len_ != b->len_) { + DCHECK_GE(a->len_, 0); + if (a->jmp_len_ >= b->len_) { + a = a->jmp_; + } else { + a = a->nxt_; + } + } + + // Going up the dominators of |a| and |b| simultaneously until |a| == |b| + while (a != b) { + DCHECK_EQ(a->len_, b->len_); + DCHECK_GE(a->len_, 0); + if (a->jmp_ == b->jmp_) { + // We found a common dominator, but we actually want to find the smallest + // one, so we go down in the current subtree. + a = a->nxt_; + b = b->nxt_; + } else { + a = a->jmp_; + b = b->jmp_; + } + } + + return static_cast<Derived*>( + const_cast<RandomAccessStackDominatorNode<Derived>*>(a)); +} + } // namespace v8::internal::compiler::turboshaft #endif // V8_COMPILER_TURBOSHAFT_GRAPH_H_ diff --git a/deps/v8/src/compiler/turboshaft/machine-optimization-assembler.h b/deps/v8/src/compiler/turboshaft/machine-optimization-assembler.h new file mode 100644 index 00000000000000..b18325b1f35349 --- /dev/null +++ b/deps/v8/src/compiler/turboshaft/machine-optimization-assembler.h @@ -0,0 +1,1960 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef V8_COMPILER_TURBOSHAFT_MACHINE_OPTIMIZATION_ASSEMBLER_H_ +#define V8_COMPILER_TURBOSHAFT_MACHINE_OPTIMIZATION_ASSEMBLER_H_ + +#include <algorithm> +#include <cmath> +#include <cstring> +#include <limits> +#include <type_traits> + +#include "src/base/bits.h" +#include "src/base/division-by-constant.h" +#include "src/base/functional.h" +#include "src/base/ieee754.h" +#include "src/base/logging.h" +#include "src/base/macros.h" +#include "src/base/overflowing-math.h" +#include "src/base/template-utils.h" +#include "src/base/vector.h" +#include "src/codegen/machine-type.h" +#include "src/compiler/backend/instruction.h" +#include "src/compiler/turboshaft/assembler.h" +#include "src/compiler/turboshaft/operations.h" +#include "src/numbers/conversions.h" + +namespace v8 ::internal::compiler::turboshaft { + +// The MachineOptimizationAssembler performs basic optimizations on low-level +// operations that can be performed on-the-fly, without requiring type analysis +// or analyzing uses. It largely corresponds to MachineOperatorReducer in +// sea-of-nodes Turbofan. +template <class Base, bool signalling_nan_possible> +class MachineOptimizationAssembler + : public AssemblerInterface< + MachineOptimizationAssembler<Base, signalling_nan_possible>, Base> { + public: + template <class T> + bool Is(OpIndex op) { + return Base::template Is<T>(op); + } + template <class T> + const T& Cast(OpIndex op) { + return Base::template Cast<T>(op); + } + template <class T> + const T* TryCast(OpIndex op) { + return Base::template TryCast<T>(op); + } + using Base::Get; + using Base::graph; + + MachineOptimizationAssembler(Graph* graph, Zone* phase_zone) + : AssemblerInterface<MachineOptimizationAssembler, Base>(graph, + phase_zone) {} + + OpIndex Change(OpIndex input, ChangeOp::Kind kind, + ChangeOp::Assumption assumption, RegisterRepresentation from, + RegisterRepresentation to) { + if (ShouldSkipOptimizationStep()) { + return Base::Change(input, kind, assumption, from, to); + } + using Kind = ChangeOp::Kind; + if (from == WordRepresentation::Word32()) { + input = TryRemoveWord32ToWord64Conversion(input); + } + if (uint64_t value; + from.IsWord() && + this->MatchWordConstant(input, WordRepresentation(from), &value)) { + if (kind == Kind::kSignExtend && from == WordRepresentation::Word32() && + to == WordRepresentation::Word64()) { + return this->Word64Constant(int64_t{static_cast<int32_t>(value)}); + } + if (kind == any_of(Kind::kZeroExtend, Kind::kBitcast) && + from == WordRepresentation::Word32() && + to == WordRepresentation::Word64()) { + return this->Word64Constant(uint64_t{static_cast<uint32_t>(value)}); + } + if (kind == Kind::kBitcast && from == WordRepresentation::Word32() && + to == RegisterRepresentation::Float32()) { + return this->Float32Constant( + base::bit_cast<float>(static_cast<uint32_t>(value))); + } + if (kind == Kind::kBitcast && from == WordRepresentation::Word64() && + to == RegisterRepresentation::Float64()) { + return this->Float64Constant(base::bit_cast<double>(value)); + } + if (kind == Kind::kSignedToFloat && + from == WordRepresentation::Word32() && + to == RegisterRepresentation::Float64()) { + return this->Float64Constant( + static_cast<double>(static_cast<int32_t>(value))); + } + if (kind == Kind::kSignedToFloat && + from == WordRepresentation::Word64() && + to == RegisterRepresentation::Float64()) { + return this->Float64Constant( + static_cast<double>(static_cast<int64_t>(value))); + } + if (kind == Kind::kUnsignedToFloat && + from == WordRepresentation::Word32() && + to == RegisterRepresentation::Float64()) { + return this->Float64Constant( + static_cast<double>(static_cast<uint32_t>(value))); + } + } + if (float value; from == RegisterRepresentation::Float32() && + this->MatchFloat32Constant(input, &value)) { + if (kind == Kind::kFloatConversion && + to == RegisterRepresentation::Float64()) { + return this->Float64Constant(value); + } + if (kind == Kind::kBitcast && to == WordRepresentation::Word32()) { + return this->Word32Constant(base::bit_cast<uint32_t>(value)); + } + } + if (double value; from == RegisterRepresentation::Float64() && + this->MatchFloat64Constant(input, &value)) { + if (kind == Kind::kFloatConversion && + to == RegisterRepresentation::Float32()) { + return this->Float32Constant(DoubleToFloat32_NoInline(value)); + } + if (kind == Kind::kBitcast && to == WordRepresentation::Word64()) { + return this->Word64Constant(base::bit_cast<uint64_t>(value)); + } + if (kind == Kind::kSignedFloatTruncateOverflowToMin) { + double truncated = std::trunc(value); + if (to == WordRepresentation::Word64()) { + int64_t result = std::numeric_limits<int64_t>::min(); + if (truncated >= std::numeric_limits<int64_t>::min() && + truncated <= kMaxDoubleRepresentableInt64) { + result = static_cast<int64_t>(truncated); + } + return this->Word64Constant(result); + } + if (to == WordRepresentation::Word32()) { + int32_t result = std::numeric_limits<int32_t>::min(); + if (truncated >= std::numeric_limits<int32_t>::min() && + truncated <= std::numeric_limits<int32_t>::max()) { + result = static_cast<int32_t>(truncated); + } + return this->Word32Constant(result); + } + } + if (kind == Kind::kJSFloatTruncate && + to == WordRepresentation::Word32()) { + return this->Word32Constant(DoubleToInt32_NoInline(value)); + } + } + if (float value; from == RegisterRepresentation::Float32() && + this->MatchFloat32Constant(input, &value)) { + if (kind == Kind::kFloatConversion && + to == RegisterRepresentation::Float64()) { + return this->Float64Constant(value); + } + } + + const Operation& input_op = Get(input); + if (const ChangeOp* change_op = input_op.TryCast<ChangeOp>()) { + if (change_op->from == to && change_op->to == from && + change_op->IsReversibleBy(kind, signalling_nan_possible)) { + return change_op->input(); + } + } + return Base::Change(input, kind, assumption, from, to); + } + + OpIndex Float64InsertWord32(OpIndex float64, OpIndex word32, + Float64InsertWord32Op::Kind kind) { + if (ShouldSkipOptimizationStep()) { + return Base::Float64InsertWord32(float64, word32, kind); + } + double f; + uint32_t w; + if (this->MatchFloat64Constant(float64, &f) && + this->MatchWord32Constant(word32, &w)) { + uint64_t float_as_word = base::bit_cast<uint64_t>(f); + switch (kind) { + case Float64InsertWord32Op::Kind::kLowHalf: + return this->Float64Constant(base::bit_cast<double>( + (float_as_word & uint64_t{0xFFFFFFFF00000000}) | w)); + case Float64InsertWord32Op::Kind::kHighHalf: + return this->Float64Constant(base::bit_cast<double>( + (float_as_word & uint64_t{0xFFFFFFFF}) | (uint64_t{w} << 32))); + } + } + return Base::Float64InsertWord32(float64, word32, kind); + } + + OpIndex TaggedBitcast(OpIndex input, RegisterRepresentation from, + RegisterRepresentation to) { + if (ShouldSkipOptimizationStep()) { + return Base::TaggedBitcast(input, from, to); + } + // A Tagged -> Untagged -> Tagged sequence can be short-cut. + // An Untagged -> Tagged -> Untagged sequence however cannot be removed, + // because the GC might have modified the pointer. + if (auto* input_bitcast = TryCast<TaggedBitcastOp>(input)) { + if (all_of(input_bitcast->to, from) == + RegisterRepresentation::PointerSized() && + all_of(input_bitcast->from, to) == RegisterRepresentation::Tagged()) { + return input_bitcast->input(); + } + } + return Base::TaggedBitcast(input, from, to); + } + + OpIndex FloatUnary(OpIndex input, FloatUnaryOp::Kind kind, + FloatRepresentation rep) { + if (ShouldSkipOptimizationStep()) { + return Base::FloatUnary(input, kind, rep); + } + if (float k; rep == FloatRepresentation::Float32() && + this->MatchFloat32Constant(input, &k)) { + if (std::isnan(k)) { + return this->Float32Constant(std::numeric_limits<float>::quiet_NaN()); + } + switch (kind) { + case FloatUnaryOp::Kind::kAbs: + return this->Float32Constant(std::abs(k)); + case FloatUnaryOp::Kind::kNegate: + return this->Float32Constant(-k); + case FloatUnaryOp::Kind::kSilenceNaN: + DCHECK(!std::isnan(k)); + return this->Float32Constant(k); + case FloatUnaryOp::Kind::kRoundDown: + return this->Float32Constant(std::floor(k)); + case FloatUnaryOp::Kind::kRoundUp: + return this->Float32Constant(std::ceil(k)); + case FloatUnaryOp::Kind::kRoundToZero: + return this->Float32Constant(std::trunc(k)); + case FloatUnaryOp::Kind::kRoundTiesEven: + DCHECK_EQ(std::nearbyint(1.5), 2); + DCHECK_EQ(std::nearbyint(2.5), 2); + return this->Float32Constant(std::nearbyint(k)); + case FloatUnaryOp::Kind::kLog: + return this->Float32Constant(base::ieee754::log(k)); + case FloatUnaryOp::Kind::kSqrt: + return this->Float32Constant(std::sqrt(k)); + case FloatUnaryOp::Kind::kExp: + return this->Float32Constant(base::ieee754::exp(k)); + case FloatUnaryOp::Kind::kExpm1: + return this->Float32Constant(base::ieee754::expm1(k)); + case FloatUnaryOp::Kind::kSin: + return this->Float32Constant(base::ieee754::sin(k)); + case FloatUnaryOp::Kind::kCos: + return this->Float32Constant(base::ieee754::cos(k)); + case FloatUnaryOp::Kind::kSinh: + return this->Float32Constant(base::ieee754::sinh(k)); + case FloatUnaryOp::Kind::kCosh: + return this->Float32Constant(base::ieee754::cosh(k)); + case FloatUnaryOp::Kind::kAcos: + return this->Float32Constant(base::ieee754::acos(k)); + case FloatUnaryOp::Kind::kAsin: + return this->Float32Constant(base::ieee754::asin(k)); + case FloatUnaryOp::Kind::kAsinh: + return this->Float32Constant(base::ieee754::asinh(k)); + case FloatUnaryOp::Kind::kAcosh: + return this->Float32Constant(base::ieee754::acosh(k)); + case FloatUnaryOp::Kind::kTan: + return this->Float32Constant(base::ieee754::tan(k)); + case FloatUnaryOp::Kind::kTanh: + return this->Float32Constant(base::ieee754::tanh(k)); + case FloatUnaryOp::Kind::kLog2: + return this->Float32Constant(base::ieee754::log2(k)); + case FloatUnaryOp::Kind::kLog10: + return this->Float32Constant(base::ieee754::log10(k)); + case FloatUnaryOp::Kind::kLog1p: + return this->Float32Constant(base::ieee754::log1p(k)); + case FloatUnaryOp::Kind::kCbrt: + return this->Float32Constant(base::ieee754::cbrt(k)); + case FloatUnaryOp::Kind::kAtan: + return this->Float32Constant(base::ieee754::atan(k)); + case FloatUnaryOp::Kind::kAtanh: + return this->Float32Constant(base::ieee754::atanh(k)); + } + } else if (double k; rep == FloatRepresentation::Float64() && + this->MatchFloat64Constant(input, &k)) { + if (std::isnan(k)) { + return this->Float64Constant(std::numeric_limits<double>::quiet_NaN()); + } + switch (kind) { + case FloatUnaryOp::Kind::kAbs: + return this->Float64Constant(std::abs(k)); + case FloatUnaryOp::Kind::kNegate: + return this->Float64Constant(-k); + case FloatUnaryOp::Kind::kSilenceNaN: + DCHECK(!std::isnan(k)); + return this->Float64Constant(k); + case FloatUnaryOp::Kind::kRoundDown: + return this->Float64Constant(std::floor(k)); + case FloatUnaryOp::Kind::kRoundUp: + return this->Float64Constant(std::ceil(k)); + case FloatUnaryOp::Kind::kRoundToZero: + return this->Float64Constant(std::trunc(k)); + case FloatUnaryOp::Kind::kRoundTiesEven: + DCHECK_EQ(std::nearbyint(1.5), 2); + DCHECK_EQ(std::nearbyint(2.5), 2); + return this->Float64Constant(std::nearbyint(k)); + case FloatUnaryOp::Kind::kLog: + return this->Float64Constant(base::ieee754::log(k)); + case FloatUnaryOp::Kind::kSqrt: + return this->Float64Constant(std::sqrt(k)); + case FloatUnaryOp::Kind::kExp: + return this->Float64Constant(base::ieee754::exp(k)); + case FloatUnaryOp::Kind::kExpm1: + return this->Float64Constant(base::ieee754::expm1(k)); + case FloatUnaryOp::Kind::kSin: + return this->Float64Constant(base::ieee754::sin(k)); + case FloatUnaryOp::Kind::kCos: + return this->Float64Constant(base::ieee754::cos(k)); + case FloatUnaryOp::Kind::kSinh: + return this->Float64Constant(base::ieee754::sinh(k)); + case FloatUnaryOp::Kind::kCosh: + return this->Float64Constant(base::ieee754::cosh(k)); + case FloatUnaryOp::Kind::kAcos: + return this->Float64Constant(base::ieee754::acos(k)); + case FloatUnaryOp::Kind::kAsin: + return this->Float64Constant(base::ieee754::asin(k)); + case FloatUnaryOp::Kind::kAsinh: + return this->Float64Constant(base::ieee754::asinh(k)); + case FloatUnaryOp::Kind::kAcosh: + return this->Float64Constant(base::ieee754::acosh(k)); + case FloatUnaryOp::Kind::kTan: + return this->Float64Constant(base::ieee754::tan(k)); + case FloatUnaryOp::Kind::kTanh: + return this->Float64Constant(base::ieee754::tanh(k)); + case FloatUnaryOp::Kind::kLog2: + return this->Float64Constant(base::ieee754::log2(k)); + case FloatUnaryOp::Kind::kLog10: + return this->Float64Constant(base::ieee754::log10(k)); + case FloatUnaryOp::Kind::kLog1p: + return this->Float64Constant(base::ieee754::log1p(k)); + case FloatUnaryOp::Kind::kCbrt: + return this->Float64Constant(base::ieee754::cbrt(k)); + case FloatUnaryOp::Kind::kAtan: + return this->Float64Constant(base::ieee754::atan(k)); + case FloatUnaryOp::Kind::kAtanh: + return this->Float64Constant(base::ieee754::atanh(k)); + } + } + return Base::FloatUnary(input, kind, rep); + } + + OpIndex WordUnary(OpIndex input, WordUnaryOp::Kind kind, + WordRepresentation rep) { + if (ShouldSkipOptimizationStep()) { + return Base::WordUnary(input, kind, rep); + } + if (rep == WordRepresentation::Word32()) { + input = TryRemoveWord32ToWord64Conversion(input); + } + if (uint32_t k; rep == WordRepresentation::Word32() && + this->MatchWord32Constant(input, &k)) { + switch (kind) { + case WordUnaryOp::Kind::kReverseBytes: + return this->Word32Constant(base::bits::ReverseBytes(k)); + case WordUnaryOp::Kind::kCountLeadingZeros: + return this->Word32Constant(base::bits::CountLeadingZeros(k)); + case WordUnaryOp::Kind::kCountTrailingZeros: + return this->Word32Constant(base::bits::CountTrailingZeros(k)); + case WordUnaryOp::Kind::kPopCount: + return this->Word32Constant(base::bits::CountPopulation(k)); + case WordUnaryOp::Kind::kSignExtend8: + return this->Word32Constant(int32_t{static_cast<int8_t>(k)}); + case WordUnaryOp::Kind::kSignExtend16: + return this->Word32Constant(int32_t{static_cast<int16_t>(k)}); + } + } else if (uint64_t k; rep == WordRepresentation::Word64() && + this->MatchWord64Constant(input, &k)) { + switch (kind) { + case WordUnaryOp::Kind::kReverseBytes: + return this->Word64Constant(base::bits::ReverseBytes(k)); + case WordUnaryOp::Kind::kCountLeadingZeros: + return this->Word64Constant( + uint64_t{base::bits::CountLeadingZeros(k)}); + case WordUnaryOp::Kind::kCountTrailingZeros: + return this->Word64Constant( + uint64_t{base::bits::CountTrailingZeros(k)}); + case WordUnaryOp::Kind::kPopCount: + return this->Word64Constant(uint64_t{base::bits::CountPopulation(k)}); + case WordUnaryOp::Kind::kSignExtend8: + return this->Word64Constant(int64_t{static_cast<int8_t>(k)}); + case WordUnaryOp::Kind::kSignExtend16: + return this->Word64Constant(int64_t{static_cast<int16_t>(k)}); + } + } + return Base::WordUnary(input, kind, rep); + } + + OpIndex FloatBinop(OpIndex lhs, OpIndex rhs, FloatBinopOp::Kind kind, + FloatRepresentation rep) { + if (ShouldSkipOptimizationStep()) { + return Base::FloatBinop(lhs, rhs, kind, rep); + } + + using Kind = FloatBinopOp::Kind; + + // Place constant on the right for commutative operators. + if (FloatBinopOp::IsCommutative(kind) && Is<ConstantOp>(lhs) && + !Is<ConstantOp>(rhs)) { + return FloatBinop(rhs, lhs, kind, rep); + } + + // constant folding + if (float k1, k2; rep == FloatRepresentation::Float32() && + this->MatchFloat32Constant(lhs, &k1) && + this->MatchFloat32Constant(rhs, &k2)) { + switch (kind) { + case Kind::kAdd: + return this->Float32Constant(k1 + k2); + case Kind::kMul: + return this->Float32Constant(k1 * k2); + case Kind::kSub: + return this->Float32Constant(k1 - k2); + case Kind::kMin: + return this->Float32Constant(JSMin(k1, k2)); + case Kind::kMax: + return this->Float32Constant(JSMax(k1, k2)); + case Kind::kDiv: + return this->Float32Constant(k1 / k2); + case Kind::kPower: + return this->Float32Constant(base::ieee754::pow(k1, k2)); + case Kind::kAtan2: + return this->Float32Constant(base::ieee754::atan2(k1, k2)); + case Kind::kMod: + UNREACHABLE(); + } + } + if (double k1, k2; rep == FloatRepresentation::Float64() && + this->MatchFloat64Constant(lhs, &k1) && + this->MatchFloat64Constant(rhs, &k2)) { + switch (kind) { + case Kind::kAdd: + return this->Float64Constant(k1 + k2); + case Kind::kMul: + return this->Float64Constant(k1 * k2); + case Kind::kSub: + return this->Float64Constant(k1 - k2); + case Kind::kMin: + return this->Float64Constant(JSMin(k1, k2)); + case Kind::kMax: + return this->Float64Constant(JSMax(k1, k2)); + case Kind::kDiv: + return this->Float64Constant(k1 / k2); + case Kind::kMod: + return this->Float64Constant(Modulo(k1, k2)); + case Kind::kPower: + return this->Float64Constant(base::ieee754::pow(k1, k2)); + case Kind::kAtan2: + return this->Float64Constant(base::ieee754::atan2(k1, k2)); + } + } + + // lhs <op> NaN => NaN + if (this->MatchNaN(rhs) || (this->MatchNaN(lhs) && kind != Kind::kPower)) { + // Return a quiet NaN since Wasm operations could have signalling NaN as + // input but not as output. + return this->FloatConstant(std::numeric_limits<double>::quiet_NaN(), rep); + } + + if (Is<ConstantOp>(rhs)) { + if (kind == Kind::kMul) { + // lhs * 1 => lhs + if (!signalling_nan_possible && this->MatchFloat(rhs, 1.0)) { + return lhs; + } + // lhs * 2 => lhs + lhs + if (this->MatchFloat(rhs, 2.0)) { + return this->FloatAdd(lhs, lhs, rep); + } + // lhs * -1 => -lhs + if (this->MatchFloat(rhs, -1.0)) { + return this->FloatNegate(lhs, rep); + } + } + + if (kind == Kind::kDiv) { + // lhs / 1 => lhs + if (!signalling_nan_possible && this->MatchFloat(rhs, 1.0)) { + return lhs; + } + // lhs / -1 => -lhs + if (this->MatchFloat(rhs, -1.0)) { + return this->FloatNegate(lhs, rep); + } + // All reciprocals of non-denormal powers of two can be represented + // exactly, so division by power of two can be reduced to + // multiplication by reciprocal, with the same result. + // x / k => x * (1 / k) + if (rep == FloatRepresentation::Float32()) { + if (float k; + this->MatchFloat32Constant(rhs, &k) && std::isnormal(k) && + k != 0 && std::isfinite(k) && + base::bits::IsPowerOfTwo(base::Double(k).Significand())) { + return this->FloatMul(lhs, this->FloatConstant(1.0 / k, rep), rep); + } + } else { + DCHECK_EQ(rep, FloatRepresentation::Float64()); + if (double k; + this->MatchFloat64Constant(rhs, &k) && std::isnormal(k) && + k != 0 && std::isfinite(k) && + base::bits::IsPowerOfTwo(base::Double(k).Significand())) { + return this->FloatMul(lhs, this->FloatConstant(1.0 / k, rep), rep); + } + } + } + + if (kind == Kind::kMod) { + // x % 0 => NaN + if (this->MatchFloat(rhs, 0.0)) { + return this->FloatConstant(std::numeric_limits<double>::quiet_NaN(), + rep); + } + } + + if (kind == Kind::kSub) { + // lhs - +0.0 => lhs + if (!signalling_nan_possible && this->MatchFloat(rhs, +0.0)) { + return lhs; + } + } + + if (kind == Kind::kPower) { + if (this->MatchFloat(rhs, 0.0) || this->MatchFloat(rhs, -0.0)) { + return this->FloatConstant(1.0, rep); + } + if (this->MatchFloat(rhs, 2.0)) { + return this->FloatMul(lhs, lhs, rep); + } + if (this->MatchFloat(rhs, 0.5)) { + Block* if_neg_infinity = this->NewBlock(Block::Kind::kBranchTarget); + if_neg_infinity->SetDeferred(true); + Block* otherwise = this->NewBlock(Block::Kind::kBranchTarget); + Block* merge = this->NewBlock(Block::Kind::kMerge); + this->Branch(this->FloatLessThanOrEqual( + lhs, this->FloatConstant(-V8_INFINITY, rep), rep), + if_neg_infinity, otherwise); + + this->Bind(if_neg_infinity); + OpIndex infty = this->FloatConstant(V8_INFINITY, rep); + this->Goto(merge); + + this->Bind(otherwise); + OpIndex sqrt = this->FloatSqrt(lhs, rep); + this->Goto(merge); + + this->Bind(merge); + return this->Phi(base::VectorOf({infty, sqrt}), rep); + } + } + } + + if (!signalling_nan_possible && kind == Kind::kSub && + this->MatchFloat(lhs, -0.0)) { + // -0.0 - round_down(-0.0 - y) => round_up(y) + if (OpIndex a, b, c; + FloatUnaryOp::IsSupported(FloatUnaryOp::Kind::kRoundUp, rep) && + this->MatchFloatRoundDown(rhs, &a, rep) && + this->MatchFloatSub(a, &b, &c, rep) && this->MatchFloat(b, -0.0)) { + return this->FloatRoundUp(c, rep); + } + // -0.0 - rhs => -rhs + return this->FloatNegate(rhs, rep); + } + + return Base::FloatBinop(lhs, rhs, kind, rep); + } + + OpIndex WordBinop(OpIndex left, OpIndex right, WordBinopOp::Kind kind, + WordRepresentation rep) { + if (ShouldSkipOptimizationStep()) { + return Base::WordBinop(left, right, kind, rep); + } + + using Kind = WordBinopOp::Kind; + + DCHECK_EQ(rep, any_of(WordRepresentation::Word32(), + WordRepresentation::Word64())); + bool is_64 = rep == WordRepresentation::Word64(); + + if (!is_64) { + left = TryRemoveWord32ToWord64Conversion(left); + right = TryRemoveWord32ToWord64Conversion(right); + } + + // Place constant on the right for commutative operators. + if (WordBinopOp::IsCommutative(kind) && Is<ConstantOp>(left) && + !Is<ConstantOp>(right)) { + return WordBinop(right, left, kind, rep); + } + // constant folding + if (uint64_t k1, k2; this->MatchWordConstant(left, rep, &k1) && + this->MatchWordConstant(right, rep, &k2)) { + switch (kind) { + case Kind::kAdd: + return this->WordConstant(k1 + k2, rep); + case Kind::kMul: + return this->WordConstant(k1 * k2, rep); + case Kind::kBitwiseAnd: + return this->WordConstant(k1 & k2, rep); + case Kind::kBitwiseOr: + return this->WordConstant(k1 | k2, rep); + case Kind::kBitwiseXor: + return this->WordConstant(k1 ^ k2, rep); + case Kind::kSub: + return this->WordConstant(k1 - k2, rep); + case Kind::kSignedMulOverflownBits: + return this->WordConstant( + is_64 ? base::bits::SignedMulHigh64(static_cast<int64_t>(k1), + static_cast<int64_t>(k2)) + : base::bits::SignedMulHigh32(static_cast<int32_t>(k1), + static_cast<int32_t>(k2)), + rep); + case Kind::kUnsignedMulOverflownBits: + return this->WordConstant( + is_64 ? base::bits::UnsignedMulHigh64(k1, k2) + : base::bits::UnsignedMulHigh32(static_cast<uint32_t>(k1), + static_cast<uint32_t>(k2)), + rep); + case Kind::kSignedDiv: + return this->WordConstant( + is_64 ? base::bits::SignedDiv64(k1, k2) + : base::bits::SignedDiv32(static_cast<int32_t>(k1), + static_cast<int32_t>(k2)), + rep); + case Kind::kUnsignedDiv: + return this->WordConstant( + is_64 ? base::bits::UnsignedDiv64(k1, k2) + : base::bits::UnsignedDiv32(static_cast<uint32_t>(k1), + static_cast<uint32_t>(k2)), + rep); + case Kind::kSignedMod: + return this->WordConstant( + is_64 ? base::bits::SignedMod64(k1, k2) + : base::bits::SignedMod32(static_cast<int32_t>(k1), + static_cast<int32_t>(k2)), + rep); + case Kind::kUnsignedMod: + return this->WordConstant( + is_64 ? base::bits::UnsignedMod64(k1, k2) + : base::bits::UnsignedMod32(static_cast<uint32_t>(k1), + static_cast<uint32_t>(k2)), + rep); + } + } + + // TODO(tebbi): Detect and merge multiple bitfield checks for CSA/Torque + // code. + + if (uint64_t right_value; + this->MatchWordConstant(right, rep, &right_value)) { + int64_t right_value_signed = + is_64 ? static_cast<int64_t>(right_value) + : int64_t{static_cast<int32_t>(right_value)}; + // (a <op> k1) <op> k2 => a <op> (k1 <op> k2) + if (OpIndex a, k1; WordBinopOp::IsAssociative(kind) && + this->MatchWordBinop(left, &a, &k1, kind, rep) && + Is<ConstantOp>(k1)) { + OpIndex k2 = right; + return WordBinop(a, WordBinop(k1, k2, kind, rep), kind, rep); + } + switch (kind) { + case Kind::kSub: + // left - k => left + -k + return WordBinop(left, this->WordConstant(-right_value, rep), + Kind::kAdd, rep); + case Kind::kAdd: + // left + 0 => left + if (right_value == 0) { + return left; + } + break; + case Kind::kBitwiseXor: + // left ^ 0 => left + if (right_value == 0) { + return left; + } + // left ^ 1 => left == 0 if left is 0 or 1 + if (right_value == 1 && IsBit(left)) { + return this->Word32Equal(left, this->Word32Constant(0)); + } + // (x ^ -1) ^ -1 => x + { + OpIndex x, y; + int64_t k; + if (right_value_signed == -1 && + this->MatchBitwiseAnd(left, &x, &y, rep) && + this->MatchWordConstant(y, rep, &k) && k == -1) { + return x; + } + } + break; + case Kind::kBitwiseOr: + // left | 0 => left + if (right_value == 0) { + return left; + } + // left | -1 => -1 + if (right_value_signed == -1) { + return right; + } + // (x & K1) | K2 => x | K2 if K2 has ones for every zero bit in K1. + // This case can be constructed by UpdateWord and UpdateWord32 in CSA. + { + OpIndex x, y; + uint64_t k1; + uint64_t k2 = right_value; + if (this->MatchBitwiseAnd(left, &x, &y, rep) && + this->MatchWordConstant(y, rep, &k1) && + (k1 | k2) == rep.MaxUnsignedValue()) { + return this->WordBitwiseOr(x, right, rep); + } + } + break; + case Kind::kMul: + // left * 0 => 0 + if (right_value == 0) { + return this->WordConstant(0, rep); + } + // left * 1 => left + if (right_value == 1) { + return left; + } + // left * -1 => 0 - left + if (right_value_signed == -1) { + return this->WordSub(this->WordConstant(0, rep), left, rep); + } + // left * 2^k => left << k + if (base::bits::IsPowerOfTwo(right_value)) { + OpIndex shift_amount = this->WordConstant( + base::bits::WhichPowerOfTwo(right_value), rep); + return this->ShiftLeft(left, shift_amount, rep); + } + break; + case Kind::kBitwiseAnd: + // left & -1 => left + if (right_value_signed == -1) { + return left; + } + // x & 0 => 0 + if (right_value == 0) { + return right; + } + + if (right_value == 1) { + // (x + x) & 1 => 0 + OpIndex left_ignore_extensions = + IsWord32ConvertedToWord64(left) + ? UndoWord32ToWord64Conversion(left) + : left; + if (OpIndex a, b; + this->MatchWordAdd(left_ignore_extensions, &a, &b, + WordRepresentation::Word32()) && + a == b) { + return this->WordConstant(0, rep); + } + + // CMP & 1 => CMP + if (IsBit(left_ignore_extensions)) { + return left; + } + } + break; + case WordBinopOp::Kind::kSignedDiv: + return ReduceSignedDiv(left, right_value_signed, rep); + case WordBinopOp::Kind::kUnsignedDiv: + return ReduceUnsignedDiv(left, right_value, rep); + case WordBinopOp::Kind::kSignedMod: + // left % 0 => 0 + // left % 1 => 0 + // left % -1 => 0 + if (right_value_signed == any_of(0, 1, -1)) { + return this->WordConstant(0, rep); + } + if (right_value_signed != rep.MinSignedValue()) { + right_value_signed = Abs(right_value_signed); + } + // left % 2^n => ((left + m) & (2^n - 1)) - m + // where m = (left >> bits-1) >>> bits-n + // This is a branch-free version of the following: + // left >= 0 ? left & (2^n - 1) + // : ((left + (2^n - 1)) & (2^n - 1)) - (2^n - 1) + // Adding and subtracting (2^n - 1) before and after the bitwise-and + // keeps the result congruent modulo 2^n, but shifts the resulting + // value range to become -(2^n - 1) ... 0. + if (base::bits::IsPowerOfTwo(right_value_signed)) { + uint32_t bits = rep.bit_width(); + uint32_t n = base::bits::WhichPowerOfTwo(right_value_signed); + OpIndex m = this->ShiftRightLogical( + this->ShiftRightArithmetic(left, bits - 1, rep), bits - n, rep); + return this->WordSub( + this->WordBitwiseAnd( + this->WordAdd(left, m, rep), + this->WordConstant(right_value_signed - 1, rep), rep), + m, rep); + } + // The `IntDiv` with a constant right-hand side will be turned into a + // multiplication, avoiding the expensive integer division. + return this->WordSub( + left, this->WordMul(this->IntDiv(left, right, rep), right, rep), + rep); + case WordBinopOp::Kind::kUnsignedMod: + // left % 0 => 0 + // left % 1 => 0 + if (right_value == 0 || right_value == 1) { + return this->WordConstant(0, rep); + } + // x % 2^n => x & (2^n - 1) + if (base::bits::IsPowerOfTwo(right_value)) { + return this->WordBitwiseAnd( + left, this->WordConstant(right_value - 1, rep), rep); + } + // The `UintDiv` with a constant right-hand side will be turned into a + // multiplication, avoiding the expensive integer division. + return this->WordSub( + left, this->WordMul(right, this->UintDiv(left, right, rep), rep), + rep); + case WordBinopOp::Kind::kSignedMulOverflownBits: + case WordBinopOp::Kind::kUnsignedMulOverflownBits: + break; + } + } + + if (kind == Kind::kAdd) { + OpIndex x, y, zero; + // (0 - x) + y => y - x + if (this->MatchWordSub(left, &zero, &x, rep) && this->MatchZero(zero)) { + y = right; + return this->WordSub(y, x, rep); + } + // x + (0 - y) => x - y + if (this->MatchWordSub(right, &zero, &y, rep) && this->MatchZero(zero)) { + x = left; + return this->WordSub(x, y, rep); + } + } + + // 0 / right => 0 + // 0 % right => 0 + if (this->MatchZero(left) && + kind == any_of(Kind::kSignedDiv, Kind::kUnsignedDiv, Kind::kUnsignedMod, + Kind::kSignedMod)) { + return this->WordConstant(0, rep); + } + + if (left == right) { + OpIndex x = left; + switch (kind) { + // x & x => x + // x | x => x + case WordBinopOp::Kind::kBitwiseAnd: + case WordBinopOp::Kind::kBitwiseOr: + return x; + // x ^ x => 0 + // x - x => 0 + // x % x => 0 + case WordBinopOp::Kind::kBitwiseXor: + case WordBinopOp::Kind::kSub: + case WordBinopOp::Kind::kSignedMod: + case WordBinopOp::Kind::kUnsignedMod: + return this->WordConstant(0, rep); + // x / x => x != 0 + case WordBinopOp::Kind::kSignedDiv: + case WordBinopOp::Kind::kUnsignedDiv: { + OpIndex zero = this->WordConstant(0, rep); + return this->Equal(this->Equal(left, zero, rep), zero, rep); + } + case WordBinopOp::Kind::kAdd: + case WordBinopOp::Kind::kMul: + case WordBinopOp::Kind::kSignedMulOverflownBits: + case WordBinopOp::Kind::kUnsignedMulOverflownBits: + break; + } + } + + if (base::Optional<OpIndex> ror = TryReduceToRor(left, right, kind, rep)) { + return *ror; + } + + return Base::WordBinop(left, right, kind, rep); + } + + base::Optional<OpIndex> TryReduceToRor(OpIndex left, OpIndex right, + WordBinopOp::Kind kind, + WordRepresentation rep) { + // Recognize rotation, we are this->matching and transforming as follows + // (assuming kWord32, kWord64 is handled correspondingly): + // x << y | x >>> (32 - y) => x ror (32 - y) + // x << (32 - y) | x >>> y => x ror y + // x << y ^ x >>> (32 - y) => x ror (32 - y) if 1 <= y < 32 + // x << (32 - y) ^ x >>> y => x ror y if 1 <= y < 32 + // (As well as the commuted forms.) + // Note the side condition for XOR: the optimization doesn't hold for + // an effective rotation amount of 0. + + if (!(kind == any_of(WordBinopOp::Kind::kBitwiseAnd, + WordBinopOp::Kind::kBitwiseXor))) { + return {}; + } + + const ShiftOp* high = TryCast<ShiftOp>(left); + if (!high) return {}; + const ShiftOp* low = TryCast<ShiftOp>(right); + if (!low) return {}; + + if (low->kind == ShiftOp::Kind::kShiftLeft) { + std::swap(low, high); + } + if (high->kind != ShiftOp::Kind::kShiftLeft || + low->kind != ShiftOp::Kind::kShiftRightLogical) { + return {}; + } + OpIndex x = high->left(); + if (low->left() != x) return {}; + OpIndex amount; + uint64_t k; + if (OpIndex a, b; this->MatchWordSub(high->right(), &a, &b, rep) && + this->MatchWordConstant(a, rep, &k) && + b == low->right() && k == rep.bit_width()) { + amount = b; + } else if (OpIndex a, b; this->MatchWordSub(low->right(), &a, &b, rep) && + a == high->right() && + this->MatchWordConstant(b, rep, &k) && + k == rep.bit_width()) { + amount = low->right(); + } else if (uint64_t k1, k2; + this->MatchWordConstant(high->right(), rep, &k1) && + this->MatchWordConstant(low->right(), rep, &k2) && + k1 + k2 == rep.bit_width() && k1 >= 0 && k2 >= 0) { + if (k1 == 0 || k2 == 0) { + if (kind == WordBinopOp::Kind::kBitwiseXor) { + return this->WordConstant(0, rep); + } else { + DCHECK_EQ(kind, WordBinopOp::Kind::kBitwiseOr); + return x; + } + } + return this->RotateRight(x, low->right(), rep); + } else { + return {}; + } + if (kind == WordBinopOp::Kind::kBitwiseOr) { + return this->RotateRight(x, amount, rep); + } else { + DCHECK_EQ(kind, WordBinopOp::Kind::kBitwiseXor); + // Can't guarantee that rotation amount is not 0. + return {}; + } + } + + OpIndex Projection(OpIndex tuple, uint16_t index) { + if (auto* tuple_op = TryCast<TupleOp>(tuple)) { + return tuple_op->input(index); + } + return Base::Projection(tuple, index); + } + + OpIndex OverflowCheckedBinop(OpIndex left, OpIndex right, + OverflowCheckedBinopOp::Kind kind, + WordRepresentation rep) { + if (ShouldSkipOptimizationStep()) { + return Base::OverflowCheckedBinop(left, right, kind, rep); + } + using Kind = OverflowCheckedBinopOp::Kind; + if (OverflowCheckedBinopOp::IsCommutative(kind) && Is<ConstantOp>(left) && + !Is<ConstantOp>(right)) { + return OverflowCheckedBinop(right, left, kind, rep); + } + if (rep == WordRepresentation::Word32()) { + left = TryRemoveWord32ToWord64Conversion(left); + right = TryRemoveWord32ToWord64Conversion(right); + } + // constant folding + if (rep == WordRepresentation::Word32()) { + if (int32_t k1, k2; this->MatchWord32Constant(left, &k1) && + this->MatchWord32Constant(right, &k2)) { + bool overflow; + int32_t res; + switch (kind) { + case OverflowCheckedBinopOp::Kind::kSignedAdd: + overflow = base::bits::SignedAddOverflow32(k1, k2, &res); + break; + case OverflowCheckedBinopOp::Kind::kSignedMul: + overflow = base::bits::SignedMulOverflow32(k1, k2, &res); + break; + case OverflowCheckedBinopOp::Kind::kSignedSub: + overflow = base::bits::SignedSubOverflow32(k1, k2, &res); + break; + } + return this->Tuple(this->Word32Constant(res), + this->Word32Constant(overflow)); + } + } else { + DCHECK_EQ(rep, WordRepresentation::Word64()); + if (int64_t k1, k2; this->MatchWord64Constant(left, &k1) && + this->MatchWord64Constant(right, &k2)) { + bool overflow; + int64_t res; + switch (kind) { + case OverflowCheckedBinopOp::Kind::kSignedAdd: + overflow = base::bits::SignedAddOverflow64(k1, k2, &res); + break; + case OverflowCheckedBinopOp::Kind::kSignedMul: + UNREACHABLE(); + case OverflowCheckedBinopOp::Kind::kSignedSub: + overflow = base::bits::SignedSubOverflow64(k1, k2, &res); + break; + } + return this->Tuple(this->Word64Constant(res), + this->Word32Constant(overflow)); + } + } + + // left + 0 => (left, false) + // left - 0 => (left, false) + if (kind == any_of(Kind::kSignedAdd, Kind::kSignedSub) && + this->MatchZero(right)) { + return this->Tuple(left, right); + } + + if (kind == Kind::kSignedMul) { + if (int64_t k; this->MatchWordConstant(right, rep, &k)) { + // left * 0 => (0, false) + if (k == 0) { + return this->Tuple(this->WordConstant(0, rep), + this->Word32Constant(false)); + } + // left * 1 => (left, false) + if (k == 1) { + return this->Tuple(left, this->Word32Constant(false)); + } + // left * -1 => 0 - left + if (k == -1) { + return this->IntSubCheckOverflow(this->WordConstant(0, rep), left, + rep); + } + // left * 2 => left + left + if (k == 2) { + return this->IntAddCheckOverflow(left, left, rep); + } + } + } + + return Base::OverflowCheckedBinop(left, right, kind, rep); + } + + OpIndex Equal(OpIndex left, OpIndex right, RegisterRepresentation rep) { + if (ShouldSkipOptimizationStep()) return Base::Equal(left, right, rep); + if (left == right && !rep.IsFloat()) { + return this->Word32Constant(1); + } + if (rep == WordRepresentation::Word32()) { + left = TryRemoveWord32ToWord64Conversion(left); + right = TryRemoveWord32ToWord64Conversion(right); + } + if (Is<ConstantOp>(left) && !Is<ConstantOp>(right)) { + return Equal(right, left, rep); + } + if (Is<ConstantOp>(right)) { + if (Is<ConstantOp>(left)) { + // k1 == k2 => k + switch (rep) { + case RegisterRepresentation::Word32(): + case RegisterRepresentation::Word64(): { + if (uint64_t k1, k2; + this->MatchWordConstant(left, WordRepresentation(rep), &k1) && + this->MatchWordConstant(right, WordRepresentation(rep), &k2)) { + return this->Word32Constant(k1 == k2); + } + break; + } + case RegisterRepresentation::Float32(): { + if (float k1, k2; this->MatchFloat32Constant(left, &k1) && + this->MatchFloat32Constant(right, &k2)) { + return this->Word32Constant(k1 == k2); + } + break; + } + case RegisterRepresentation::Float64(): { + if (double k1, k2; this->MatchFloat64Constant(left, &k1) && + this->MatchFloat64Constant(right, &k2)) { + return this->Word32Constant(k1 == k2); + } + break; + } + default: + UNREACHABLE(); + } + } + if (rep.IsWord()) { + WordRepresentation rep_w{rep}; + // x - y == 0 => x == y + if (OpIndex x, y; + this->MatchWordSub(left, &x, &y, rep_w) && this->MatchZero(right)) { + return Equal(x, y, rep); + } + { + // ((x >> shift_amount) & mask) == k + // => (x & (mask << shift_amount)) == (k << shift_amount) + OpIndex shift, x, mask_op; + int shift_amount; + uint64_t mask, k; + if (this->MatchBitwiseAnd(left, &shift, &mask_op, rep_w) && + this->MatchConstantRightShift(shift, &x, rep_w, &shift_amount) && + this->MatchWordConstant(mask_op, rep_w, &mask) && + this->MatchWordConstant(right, rep_w, &k) && + mask <= rep.MaxUnsignedValue() >> shift_amount && + k <= rep.MaxUnsignedValue() >> shift_amount) { + return Equal( + this->WordBitwiseAnd( + x, this->Word64Constant(mask << shift_amount), rep_w), + this->Word64Constant(k << shift_amount), rep_w); + } + } + { + // (x >> k1) == k2 => x == (k2 << k1) if shifts reversible + // Only perform the transformation if the shift is not used yet, to + // avoid keeping both the shift and x alive. + OpIndex x; + uint16_t k1; + int64_t k2; + if (this->MatchConstantShiftRightArithmeticShiftOutZeros( + left, &x, rep_w, &k1) && + this->MatchWordConstant(right, rep_w, &k2) && + CountLeadingSignBits(k2, rep_w) > k1 && + Get(left).saturated_use_count == 0) { + return this->Equal( + x, this->WordConstant(base::bits::Unsigned(k2) << k1, rep_w), + rep_w); + } + } + } + } + return Base::Equal(left, right, rep); + } + + OpIndex Comparison(OpIndex left, OpIndex right, ComparisonOp::Kind kind, + RegisterRepresentation rep) { + if (ShouldSkipOptimizationStep()) { + return Base::Comparison(left, right, kind, rep); + } + if (rep == WordRepresentation::Word32()) { + left = TryRemoveWord32ToWord64Conversion(left); + right = TryRemoveWord32ToWord64Conversion(right); + } + using Kind = ComparisonOp::Kind; + if (left == right && + !(rep == any_of(RegisterRepresentation::Float32(), + RegisterRepresentation::Float64())) && + kind == any_of(Kind::kSignedLessThanOrEqual, + Kind::kUnsignedLessThanOrEqual)) { + switch (kind) { + case Kind::kUnsignedLessThanOrEqual: + case Kind::kSignedLessThanOrEqual: + return this->Word32Constant(1); + case Kind::kUnsignedLessThan: + case Kind::kSignedLessThan: + return this->Word32Constant(0); + } + } + // constant folding + if (Is<ConstantOp>(right) && Is<ConstantOp>(left)) { + switch (rep) { + case RegisterRepresentation::Word32(): + case RegisterRepresentation::Word64(): { + if (kind == + any_of(Kind::kSignedLessThan, Kind::kSignedLessThanOrEqual)) { + if (int64_t k1, k2; + this->MatchWordConstant(left, WordRepresentation(rep), &k1) && + this->MatchWordConstant(right, WordRepresentation(rep), &k2)) { + switch (kind) { + case ComparisonOp::Kind::kSignedLessThan: + return this->Word32Constant(k1 < k2); + case ComparisonOp::Kind::kSignedLessThanOrEqual: + return this->Word32Constant(k1 <= k2); + case ComparisonOp::Kind::kUnsignedLessThan: + case ComparisonOp::Kind::kUnsignedLessThanOrEqual: + UNREACHABLE(); + } + } + } else { + if (uint64_t k1, k2; + this->MatchWordConstant(left, WordRepresentation(rep), &k1) && + this->MatchWordConstant(right, WordRepresentation(rep), &k2)) { + switch (kind) { + case ComparisonOp::Kind::kUnsignedLessThan: + return this->Word32Constant(k1 < k2); + case ComparisonOp::Kind::kUnsignedLessThanOrEqual: + return this->Word32Constant(k1 <= k2); + case ComparisonOp::Kind::kSignedLessThan: + case ComparisonOp::Kind::kSignedLessThanOrEqual: + UNREACHABLE(); + } + } + } + break; + } + case RegisterRepresentation::Float32(): { + if (float k1, k2; this->MatchFloat32Constant(left, &k1) && + this->MatchFloat32Constant(right, &k2)) { + switch (kind) { + case ComparisonOp::Kind::kSignedLessThan: + return this->Word32Constant(k1 < k2); + case ComparisonOp::Kind::kSignedLessThanOrEqual: + return this->Word32Constant(k1 <= k2); + case ComparisonOp::Kind::kUnsignedLessThan: + case ComparisonOp::Kind::kUnsignedLessThanOrEqual: + UNREACHABLE(); + } + } + break; + } + case RegisterRepresentation::Float64(): { + if (double k1, k2; this->MatchFloat64Constant(left, &k1) && + this->MatchFloat64Constant(right, &k2)) { + switch (kind) { + case ComparisonOp::Kind::kSignedLessThan: + return this->Word32Constant(k1 < k2); + case ComparisonOp::Kind::kSignedLessThanOrEqual: + return this->Word32Constant(k1 <= k2); + case ComparisonOp::Kind::kUnsignedLessThan: + case ComparisonOp::Kind::kUnsignedLessThanOrEqual: + UNREACHABLE(); + } + } + break; + } + default: + UNREACHABLE(); + } + } + if (rep == RegisterRepresentation::Float64() && + IsFloat32ConvertedToFloat64(left) && + IsFloat32ConvertedToFloat64(right)) { + return this->Comparison(UndoFloat32ToFloat64Conversion(left), + UndoFloat32ToFloat64Conversion(right), kind, + RegisterRepresentation::Float32()); + } + if (rep.IsWord()) { + WordRepresentation rep_w{rep}; + if (kind == Kind::kUnsignedLessThanOrEqual) { + // 0 <= x => true + if (uint64_t k; this->MatchWordConstant(left, rep_w, &k) && k == 0) { + return this->Word32Constant(1); + } + // x <= MaxUint => true + if (uint64_t k; this->MatchWordConstant(right, rep_w, &k) && + k == rep.MaxUnsignedValue()) { + return this->Word32Constant(1); + } + } + if (kind == Kind::kUnsignedLessThan) { + // x < 0 => false + if (uint64_t k; this->MatchWordConstant(right, rep_w, &k) && k == 0) { + return this->Word32Constant(0); + } + // MaxUint < x => true + if (uint64_t k; this->MatchWordConstant(left, rep_w, &k) && + k == rep.MaxUnsignedValue()) { + return this->Word32Constant(0); + } + } + { + // (x >> k) </<= (y >> k) => x </<= y if shifts reversible + OpIndex x, y; + uint16_t k1, k2; + if (this->MatchConstantShiftRightArithmeticShiftOutZeros(left, &x, + rep_w, &k1) && + this->MatchConstantShiftRightArithmeticShiftOutZeros(right, &y, + rep_w, &k2) && + k1 == k2) { + return this->Comparison(x, y, kind, rep_w); + } + } + { + // (x >> k1) </<= k2 => x </<= (k2 << k1) if shifts reversible + // Only perform the transformation if the shift is not used yet, to + // avoid keeping both the shift and x alive. + OpIndex x; + uint16_t k1; + int64_t k2; + if (this->MatchConstantShiftRightArithmeticShiftOutZeros(left, &x, + rep_w, &k1) && + this->MatchWordConstant(right, rep_w, &k2) && + CountLeadingSignBits(k2, rep_w) > k1 && + Get(left).saturated_use_count == 0) { + return this->Comparison( + x, this->WordConstant(base::bits::Unsigned(k2) << k1, rep_w), + kind, rep_w); + } + // k2 </<= (x >> k1) => (k2 << k1) </<= x if shifts reversible + // Only perform the transformation if the shift is not used yet, to + // avoid keeping both the shift and x alive. + if (this->MatchConstantShiftRightArithmeticShiftOutZeros(right, &x, + rep_w, &k1) && + this->MatchWordConstant(left, rep_w, &k2) && + CountLeadingSignBits(k2, rep_w) > k1 && + Get(right).saturated_use_count == 0) { + return this->Comparison( + this->WordConstant(base::bits::Unsigned(k2) << k1, rep_w), x, + kind, rep_w); + } + } + // Map 64bit to 32bit comparisons. + if (rep_w == WordRepresentation::Word64()) { + base::Optional<bool> left_sign_extended; + base::Optional<bool> right_sign_extended; + if (IsWord32ConvertedToWord64(left, &left_sign_extended) && + IsWord32ConvertedToWord64(right, &right_sign_extended)) { + if (left_sign_extended != true && right_sign_extended != true) { + // Both sides were zero-extended, so the resulting comparison always + // behaves unsigned even if it was a signed 64bit comparison. + return this->Comparison(UndoWord32ToWord64Conversion(left), + UndoWord32ToWord64Conversion(right), + ComparisonOp::SetSigned(kind, false), + WordRepresentation::Word32()); + } else if (left_sign_extended != false && + right_sign_extended != false) { + // Both sides were sign-extended, this preserves both signed and + // unsigned comparisons. + return this->Comparison(UndoWord32ToWord64Conversion(left), + UndoWord32ToWord64Conversion(right), kind, + WordRepresentation::Word32()); + } + } + } + } + return Base::Comparison(left, right, kind, rep); + } + + OpIndex Shift(OpIndex left, OpIndex right, ShiftOp::Kind kind, + WordRepresentation rep) { + if (ShouldSkipOptimizationStep()) { + return Base::Shift(left, right, kind, rep); + } + using Kind = ShiftOp::Kind; + uint64_t c_unsigned; + int64_t c_signed; + if (this->MatchWordConstant(left, rep, &c_unsigned, &c_signed)) { + if (uint32_t amount; this->MatchWord32Constant(right, &amount)) { + amount = amount & (rep.bit_width() - 1); + switch (kind) { + case Kind::kShiftRightArithmeticShiftOutZeros: + if (base::bits::CountTrailingZeros(c_signed) < amount) { + // This assumes that we never hoist operations to before their + // original place in the control flow. + return this->Unreachable(); + } + [[fallthrough]]; + case Kind::kShiftRightArithmetic: + return this->WordConstant(c_signed >> amount, rep); + case Kind::kShiftRightLogical: + return this->WordConstant(c_unsigned >> amount, rep); + case Kind::kShiftLeft: + return this->WordConstant(c_unsigned << amount, rep); + case Kind::kRotateRight: + if (rep == WordRepresentation::Word32()) { + return this->WordConstant( + base::bits::RotateRight32(static_cast<uint32_t>(c_unsigned), + amount), + rep); + } else { + return this->WordConstant( + base::bits::RotateRight64(c_unsigned, amount), rep); + } + case Kind::kRotateLeft: + if (rep == WordRepresentation::Word32()) { + return this->WordConstant( + base::bits::RotateLeft32(static_cast<uint32_t>(c_unsigned), + amount), + rep); + } else { + return this->WordConstant( + base::bits::RotateLeft64(c_unsigned, amount), rep); + } + } + } + } + if (int32_t amount; this->MatchWord32Constant(right, &amount) && + 0 <= amount && amount < rep.bit_width()) { + if (amount == 0) { + return left; + } + if (kind == Kind::kShiftLeft) { + // If x >> K only shifted out zeros: + // (x >> K) << L => x if K == L + // (x >> K) << L => x >> (K-L) if K > L + // (x >> K) << L => x << (L-K) if K < L + // Since this is used for Smi untagging, we currently only need it for + // signed shifts. + int k; + OpIndex x; + if (this->MatchConstantShift( + left, &x, Kind::kShiftRightArithmeticShiftOutZeros, rep, &k)) { + int32_t l = amount; + if (k == l) { + return x; + } else if (k > l) { + return this->ShiftRightArithmeticShiftOutZeros( + x, this->Word32Constant(k - l), rep); + } else if (k < l) { + return this->ShiftLeft(x, this->Word32Constant(l - k), rep); + } + } + // (x >>> K) << K => x & ~(2^K - 1) + // (x >> K) << K => x & ~(2^K - 1) + if (this->MatchConstantRightShift(left, &x, rep, &k) && k == amount) { + return this->WordBitwiseAnd( + x, this->WordConstant(rep.MaxUnsignedValue() << k, rep), rep); + } + } + if (kind == any_of(Kind::kShiftRightArithmetic, + Kind::kShiftRightArithmeticShiftOutZeros)) { + OpIndex x; + int left_shift_amount; + // (x << k) >> k + if (this->MatchConstantShift(left, &x, ShiftOp::Kind::kShiftLeft, rep, + &left_shift_amount) && + amount == left_shift_amount) { + // x << (bit_width - 1) >> (bit_width - 1) => 0 - x if x is 0 or 1 + if (amount == rep.bit_width() - 1 && IsBit(x)) { + return this->WordSub(this->WordConstant(0, rep), x, rep); + } + // x << (bit_width - 8) >> (bit_width - 8) => x if x is within Int8 + if (amount <= rep.bit_width() - 8 && IsInt8(x)) { + return x; + } + // x << (bit_width - 8) >> (bit_width - 8) => x if x is within Int8 + if (amount <= rep.bit_width() - 16 && IsInt16(x)) { + return x; + } + } + } + if (rep == WordRepresentation::Word32() && + SupportedOperations::word32_shift_is_safe()) { + // Remove the explicit 'and' with 0x1F if the shift provided by the + // machine instruction this->matches that required by JavaScript. + if (OpIndex a, b; this->MatchBitwiseAnd(right, &a, &b, + WordRepresentation::Word32())) { + static_assert(0x1f == WordRepresentation::Word32().bit_width() - 1); + if (uint32_t b_value; + this->MatchWord32Constant(b, &b_value) && b_value == 0x1f) { + return this->Shift(left, a, kind, rep); + } + } + } + } + return Base::Shift(left, right, kind, rep); + } + + OpIndex Branch(OpIndex condition, Block* if_true, Block* if_false) { + if (ShouldSkipOptimizationStep()) { + return Base::Branch(condition, if_true, if_false); + } + if (base::Optional<bool> decision = DecideBranchCondition(condition)) { + return this->Goto(*decision ? if_true : if_false); + } + bool negated = false; + condition = ReduceBranchCondition(condition, &negated); + if (negated) std::swap(if_true, if_false); + return Base::Branch(condition, if_true, if_false); + } + + OpIndex DeoptimizeIf(OpIndex condition, OpIndex frame_state, bool negated, + const DeoptimizeParameters* parameters) { + if (ShouldSkipOptimizationStep()) { + return Base::DeoptimizeIf(condition, frame_state, negated, parameters); + } + if (base::Optional<bool> decision = DecideBranchCondition(condition)) { + if (*decision != negated) { + this->Deoptimize(frame_state, parameters); + } + // `DeoptimizeIf` doesn't produce a value. + return OpIndex::Invalid(); + } + condition = ReduceBranchCondition(condition, &negated); + return Base::DeoptimizeIf(condition, frame_state, negated, parameters); + } + + OpIndex Store(OpIndex base, OpIndex value, StoreOp::Kind kind, + MemoryRepresentation stored_rep, WriteBarrierKind write_barrier, + int32_t offset) { + if (ShouldSkipOptimizationStep()) { + return Base::Store(base, value, kind, stored_rep, write_barrier, offset); + } + return IndexedStore(base, OpIndex::Invalid(), value, kind, stored_rep, + write_barrier, offset, 0); + } + + OpIndex IndexedStore(OpIndex base, OpIndex index, OpIndex value, + IndexedStoreOp::Kind kind, + MemoryRepresentation stored_rep, + WriteBarrierKind write_barrier, int32_t offset, + uint8_t element_scale) { + if (!ShouldSkipOptimizationStep()) { + if (stored_rep.SizeInBytes() <= 4) { + value = TryRemoveWord32ToWord64Conversion(value); + } + index = ReduceMemoryIndex(index, &offset, &element_scale); + switch (stored_rep) { + case MemoryRepresentation::Uint8(): + case MemoryRepresentation::Int8(): + value = + ReduceWithTruncation(value, std::numeric_limits<uint8_t>::max(), + WordRepresentation::Word32()); + break; + case MemoryRepresentation::Uint16(): + case MemoryRepresentation::Int16(): + value = + ReduceWithTruncation(value, std::numeric_limits<uint16_t>::max(), + WordRepresentation::Word32()); + break; + case MemoryRepresentation::Uint32(): + case MemoryRepresentation::Int32(): + value = + ReduceWithTruncation(value, std::numeric_limits<uint32_t>::max(), + WordRepresentation::Word32()); + break; + default: + break; + } + } + if (index.valid()) { + return Base::IndexedStore(base, index, value, kind, stored_rep, + write_barrier, offset, element_scale); + } else { + return Base::Store(base, value, kind, stored_rep, write_barrier, offset); + } + } + + OpIndex Load(OpIndex base, LoadOp::Kind kind, MemoryRepresentation loaded_rep, + RegisterRepresentation result_rep, int32_t offset) { + if (ShouldSkipOptimizationStep()) + return Base::Load(base, kind, loaded_rep, result_rep, offset); + return IndexedLoad(base, OpIndex::Invalid(), kind, loaded_rep, result_rep, + offset, 0); + } + + OpIndex IndexedLoad(OpIndex base, OpIndex index, IndexedLoadOp::Kind kind, + MemoryRepresentation loaded_rep, + RegisterRepresentation result_rep, int32_t offset, + uint8_t element_scale) { + while (true) { + if (ShouldSkipOptimizationStep()) break; + index = ReduceMemoryIndex(index, &offset, &element_scale); + if (kind != IndexedLoadOp::Kind::kTaggedBase && !index.valid()) { + if (OpIndex left, right; + this->MatchWordAdd(base, &left, &right, + WordRepresentation::PointerSized()) && + TryAdjustOffset(&offset, Get(right), element_scale)) { + base = left; + continue; + } + } + break; + } + if (index.valid()) { + return Base::IndexedLoad(base, index, kind, loaded_rep, result_rep, + offset, element_scale); + } else { + return Base::Load(base, kind, loaded_rep, result_rep, offset); + } + } + + private: + bool TryAdjustOffset(int32_t* offset, const Operation& maybe_constant, + uint8_t element_scale) { + if (!maybe_constant.Is<ConstantOp>()) return false; + const ConstantOp& constant = maybe_constant.Cast<ConstantOp>(); + int64_t diff = constant.signed_integral(); + int32_t new_offset; + if (diff <= (std::numeric_limits<int32_t>::max() >> element_scale) && + diff >= (std::numeric_limits<int32_t>::min() >> element_scale) && + !base::bits::SignedAddOverflow32( + *offset, + static_cast<int32_t>(base::bits::Unsigned(diff) << element_scale), + &new_offset)) { + *offset = new_offset; + return true; + } + return false; + } + + bool TryAdjustElementScale(uint8_t* element_scale, OpIndex maybe_constant) { + uint64_t diff; + if (!this->MatchWordConstant(maybe_constant, + WordRepresentation::PointerSized(), &diff)) { + return false; + } + DCHECK_LT(*element_scale, WordRepresentation::PointerSized().bit_width()); + if (diff < (WordRepresentation::PointerSized().bit_width() - + uint64_t{*element_scale})) { + *element_scale += diff; + return true; + } + return false; + } + + // Fold away operations in the computation of `index` while preserving the + // value of `(index << element_scale) + offset)` by updating `offset`, + // `element_scale` and returning the updated `index`. + // Return `OpIndex::Invalid()` if the resulting index is zero. + OpIndex ReduceMemoryIndex(OpIndex index, int32_t* offset, + uint8_t* element_scale) { + while (index.valid()) { + const Operation& index_op = Get(index); + if (TryAdjustOffset(offset, index_op, *element_scale)) { + index = OpIndex::Invalid(); + *element_scale = 0; + } else if (const ShiftOp* shift_op = index_op.TryCast<ShiftOp>()) { + if (shift_op->kind == ShiftOp::Kind::kShiftLeft && + TryAdjustElementScale(element_scale, shift_op->right())) { + index = shift_op->left(); + continue; + } + } else if (const WordBinopOp* binary_op = + index_op.TryCast<WordBinopOp>()) { + if (binary_op->kind == WordBinopOp::Kind::kAdd && + TryAdjustOffset(offset, Get(binary_op->right()), *element_scale)) { + index = binary_op->left(); + continue; + } + } + break; + } + return index; + } + + bool IsFloat32ConvertedToFloat64(OpIndex value) { + if (OpIndex input; + this->MatchChange(value, &input, ChangeOp::Kind::kFloatConversion, + RegisterRepresentation::Float32(), + RegisterRepresentation::Float64())) { + return true; + } + if (double c; + this->MatchFloat64Constant(value, &c) && DoubleToFloat32(c) == c) { + return true; + } + return false; + } + + OpIndex UndoFloat32ToFloat64Conversion(OpIndex value) { + if (OpIndex input; + this->MatchChange(value, &input, ChangeOp::Kind::kFloatConversion, + RegisterRepresentation::Float32(), + RegisterRepresentation::Float64())) { + return input; + } + if (double c; + this->MatchFloat64Constant(value, &c) && DoubleToFloat32(c) == c) { + return this->Float32Constant(DoubleToFloat32(c)); + } + UNREACHABLE(); + } + + bool IsBit(OpIndex value) { + return Is<EqualOp>(value) || Is<ComparisonOp>(value); + } + + bool IsInt8(OpIndex value) { + if (auto* op = TryCast<LoadOp>(value)) { + return op->loaded_rep == MemoryRepresentation::Int8(); + } else if (auto* op = TryCast<IndexedLoadOp>(value)) { + return op->loaded_rep == MemoryRepresentation::Int8(); + } + return false; + } + + bool IsInt16(OpIndex value) { + if (auto* op = TryCast<LoadOp>(value)) { + return op->loaded_rep == any_of(MemoryRepresentation::Int16(), + MemoryRepresentation::Int8()); + } else if (auto* op = TryCast<IndexedLoadOp>(value)) { + return op->loaded_rep == any_of(MemoryRepresentation::Int16(), + MemoryRepresentation::Int8()); + } + return false; + } + + bool IsWord32ConvertedToWord64( + OpIndex value, base::Optional<bool>* sign_extended = nullptr) { + if (const ChangeOp* change_op = TryCast<ChangeOp>(value)) { + if (change_op->from == WordRepresentation::Word32() && + change_op->to == WordRepresentation::Word64()) { + if (change_op->kind == ChangeOp::Kind::kSignExtend) { + if (sign_extended) *sign_extended = true; + return true; + } else if (change_op->kind == ChangeOp::Kind::kZeroExtend) { + if (sign_extended) *sign_extended = false; + return true; + } + } + } + if (int64_t c; this->MatchWord64Constant(value, &c) && + c >= std::numeric_limits<int32_t>::min()) { + if (c < 0) { + if (sign_extended) *sign_extended = true; + return true; + } else if (c <= std::numeric_limits<int32_t>::max()) { + // Sign- and zero-extension produce the same result. + if (sign_extended) *sign_extended = {}; + return true; + } else if (c <= std::numeric_limits<uint32_t>::max()) { + if (sign_extended) *sign_extended = false; + return true; + } + } + return false; + } + + OpIndex UndoWord32ToWord64Conversion(OpIndex value) { + DCHECK(IsWord32ConvertedToWord64(value)); + if (const ChangeOp* op = TryCast<ChangeOp>(value)) { + return op->input(); + } + return this->Word32Constant(this->Cast<ConstantOp>(value).word32()); + } + + OpIndex TryRemoveWord32ToWord64Conversion(OpIndex value) { + if (const ChangeOp* op = TryCast<ChangeOp>(value)) { + if (op->from == WordRepresentation::Word32() && + op->to == WordRepresentation::Word64() && + op->kind == any_of(ChangeOp::Kind::kZeroExtend, + ChangeOp::Kind::kSignExtend)) { + return op->input(); + } + } + return value; + } + + uint64_t TruncateWord(uint64_t value, WordRepresentation rep) { + if (rep == WordRepresentation::Word32()) { + return static_cast<uint32_t>(value); + } else { + DCHECK_EQ(rep, WordRepresentation::Word64()); + return value; + } + } + + // Reduce the given value under the assumption that only the bits set in + // `truncation_mask` will be observed. + OpIndex ReduceWithTruncation(OpIndex value, uint64_t truncation_mask, + WordRepresentation rep) { + { // Remove bitwise-and with a mask whose zero-bits are not observed. + OpIndex input, mask; + uint64_t mask_value; + if (this->MatchBitwiseAnd(value, &input, &mask, rep) && + this->MatchWordConstant(mask, rep, &mask_value)) { + if ((mask_value & truncation_mask) == truncation_mask) { + return ReduceWithTruncation(input, truncation_mask, rep); + } + } + } + { + int left_shift_amount; + int right_shift_amount; + WordRepresentation rep; + OpIndex left_shift; + ShiftOp::Kind right_shift_kind; + OpIndex left_shift_input; + if (this->MatchConstantShift(value, &left_shift, &right_shift_kind, &rep, + &right_shift_amount) && + ShiftOp::IsRightShift(right_shift_kind) && + this->MatchConstantShift(left_shift, &left_shift_input, + ShiftOp::Kind::kShiftLeft, rep, + &left_shift_amount) && + ((rep.MaxUnsignedValue() >> right_shift_amount) & truncation_mask) == + truncation_mask) { + if (left_shift_amount == right_shift_amount) { + return left_shift_input; + } else if (left_shift_amount < right_shift_amount) { + OpIndex shift_amount = + this->WordConstant(right_shift_amount - left_shift_amount, rep); + return this->Shift(left_shift_input, shift_amount, right_shift_kind, + rep); + } else if (left_shift_amount > right_shift_amount) { + OpIndex shift_amount = + this->WordConstant(left_shift_amount - right_shift_amount, rep); + return this->Shift(left_shift_input, shift_amount, + ShiftOp::Kind::kShiftLeft, rep); + } + } + } + return value; + } + + OpIndex ReduceSignedDiv(OpIndex left, int64_t right, WordRepresentation rep) { + // left / -1 => 0 - left + if (right == -1) { + return this->WordSub(this->WordConstant(0, rep), left, rep); + } + // left / 0 => 0 + if (right == 0) { + return this->WordConstant(0, rep); + } + // left / 1 => left + if (right == 1) { + return left; + } + // left / MinSignedValue => left == MinSignedValue + if (right == rep.MinSignedValue()) { + return this->ChangeUint32ToUint64( + this->Equal(left, this->WordConstant(right, rep), rep)); + } + // left / -right => -(left / right) + if (right < 0) { + DCHECK_NE(right, rep.MinSignedValue()); + return this->WordSub(this->WordConstant(0, rep), + ReduceSignedDiv(left, Abs(right), rep), rep); + } + + OpIndex quotient = left; + if (base::bits::IsPowerOfTwo(right)) { + uint32_t shift = base::bits::WhichPowerOfTwo(right); + DCHECK_GT(shift, 0); + if (shift > 1) { + quotient = + this->ShiftRightArithmetic(quotient, rep.bit_width() - 1, rep); + } + quotient = + this->ShiftRightArithmetic(quotient, rep.bit_width() - shift, rep); + quotient = this->WordAdd(quotient, left, rep); + quotient = this->ShiftRightArithmetic(quotient, shift, rep); + return quotient; + } + DCHECK_GT(right, 0); + // Compute the magic number for `right`, using a generic lambda to treat + // 32- and 64-bit uniformly. + auto LowerToMul = [this, left](auto right, WordRepresentation rep) { + base::MagicNumbersForDivision<decltype(right)> magic = + base::SignedDivisionByConstant(right); + OpIndex quotient = this->IntMulOverflownBits( + left, this->WordConstant(magic.multiplier, rep), rep); + if (magic.multiplier < 0) { + quotient = this->WordAdd(quotient, left, rep); + } + OpIndex sign_bit = + this->ShiftRightLogical(left, rep.bit_width() - 1, rep); + return this->WordAdd( + this->ShiftRightArithmetic(quotient, magic.shift, rep), sign_bit, + rep); + }; + if (rep == WordRepresentation::Word32()) { + return LowerToMul(static_cast<int32_t>(right), + WordRepresentation::Word32()); + } else { + DCHECK_EQ(rep, WordRepresentation::Word64()); + return LowerToMul(static_cast<int64_t>(right), + WordRepresentation::Word64()); + } + } + + OpIndex ReduceUnsignedDiv(OpIndex left, uint64_t right, + WordRepresentation rep) { + // left / 0 => 0 + if (right == 0) { + return this->WordConstant(0, rep); + } + // left / 1 => left + if (right == 1) { + return left; + } + // left / 2^k => left >> k + if (base::bits::IsPowerOfTwo(right)) { + return this->ShiftRightLogical(left, base::bits::WhichPowerOfTwo(right), + rep); + } + DCHECK_GT(right, 0); + // If `right` is even, we can avoid using the expensive fixup by + // shifting `left` upfront. + unsigned const shift = base::bits::CountTrailingZeros(right); + left = this->ShiftRightLogical(left, shift, rep); + right >>= shift; + // Compute the magic number for `right`, using a generic lambda to treat + // 32- and 64-bit uniformly. + auto LowerToMul = [this, left, shift](auto right, WordRepresentation rep) { + base::MagicNumbersForDivision<decltype(right)> const mag = + base::UnsignedDivisionByConstant(right, shift); + OpIndex quotient = this->UintMulOverflownBits( + left, this->WordConstant(mag.multiplier, rep), rep); + if (mag.add) { + DCHECK_GE(mag.shift, 1); + // quotient = (((left - quotient) >> 1) + quotient) >> (mag.shift - + // 1) + quotient = this->ShiftRightLogical( + this->WordAdd(this->ShiftRightLogical( + this->WordSub(left, quotient, rep), 1, rep), + quotient, rep), + mag.shift - 1, rep); + } else { + quotient = this->ShiftRightLogical(quotient, mag.shift, rep); + } + return quotient; + }; + if (rep == WordRepresentation::Word32()) { + return LowerToMul(static_cast<uint32_t>(right), + WordRepresentation::Word32()); + } else { + DCHECK_EQ(rep, WordRepresentation::Word64()); + return LowerToMul(static_cast<uint64_t>(right), + WordRepresentation::Word64()); + } + } + + OpIndex ReduceBranchCondition(OpIndex condition, bool* negated) { + while (true) { + condition = TryRemoveWord32ToWord64Conversion(condition); + // x == 0 => x with flipped branches + if (OpIndex left, right; this->MatchEqual(condition, &left, &right, + WordRepresentation::Word32()) && + this->MatchZero(right)) { + condition = left; + *negated = !*negated; + continue; + } + // x - y => x == y with flipped branches + if (OpIndex left, right; this->MatchWordSub( + condition, &left, &right, WordRepresentation::Word32())) { + condition = this->Word32Equal(left, right); + *negated = !*negated; + continue; + } + // x & (1 << k) == (1 << k) => x & (1 << k) + if (OpIndex left, right; this->MatchEqual(condition, &left, &right, + WordRepresentation::Word32())) { + OpIndex x, mask; + uint32_t k1, k2; + if (this->MatchBitwiseAnd(left, &x, &mask, + WordRepresentation::Word32()) && + this->MatchWord32Constant(mask, &k1) && + this->MatchWord32Constant(right, &k2) && k1 == k2 && + base::bits::IsPowerOfTwo(k1)) { + condition = left; + continue; + } + } + break; + } + return condition; + } + + base::Optional<bool> DecideBranchCondition(OpIndex condition) { + if (uint32_t value; this->MatchWord32Constant(condition, &value)) { + return value != 0; + } + return base::nullopt; + } + + uint16_t CountLeadingSignBits(int64_t c, WordRepresentation rep) { + return base::bits::CountLeadingSignBits(c) - (64 - rep.bit_width()); + } +}; + +} // namespace v8::internal::compiler::turboshaft + +#endif // V8_COMPILER_TURBOSHAFT_MACHINE_OPTIMIZATION_ASSEMBLER_H_ diff --git a/deps/v8/src/compiler/turboshaft/operation-matching.h b/deps/v8/src/compiler/turboshaft/operation-matching.h new file mode 100644 index 00000000000000..acf0995b38c2af --- /dev/null +++ b/deps/v8/src/compiler/turboshaft/operation-matching.h @@ -0,0 +1,315 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef V8_COMPILER_TURBOSHAFT_OPERATION_MATCHING_H_ +#define V8_COMPILER_TURBOSHAFT_OPERATION_MATCHING_H_ + +#include "src/compiler/turboshaft/operations.h" + +namespace v8 ::internal::compiler::turboshaft { + +template <class Assembler> +class OperationMatching { + public: + template <class Op> + bool Is(OpIndex op_idx) { + return assembler().graph().Get(op_idx).template Is<Op>(); + } + + template <class Op> + const Op* TryCast(OpIndex op_idx) { + return assembler().graph().Get(op_idx).template TryCast<Op>(); + } + + template <class Op> + const Op& Cast(OpIndex op_idx) { + return assembler().graph().Get(op_idx).template Cast<Op>(); + } + + const Operation& Get(OpIndex op_idx) { + return assembler().graph().Get(op_idx); + } + + bool MatchZero(OpIndex matched) { + const ConstantOp* op = TryCast<ConstantOp>(matched); + if (!op) return false; + switch (op->kind) { + case ConstantOp::Kind::kWord32: + case ConstantOp::Kind::kWord64: + return op->integral() == 0; + case ConstantOp::Kind::kFloat32: + return op->float32() == 0; + case ConstantOp::Kind::kFloat64: + return op->float64() == 0; + default: + return false; + } + } + + bool MatchFloat32Constant(OpIndex matched, float* constant) { + const ConstantOp* op = TryCast<ConstantOp>(matched); + if (!op) return false; + if (op->kind != ConstantOp::Kind::kFloat32) return false; + *constant = op->float32(); + return true; + } + + bool MatchFloat64Constant(OpIndex matched, double* constant) { + const ConstantOp* op = TryCast<ConstantOp>(matched); + if (!op) return false; + if (op->kind != ConstantOp::Kind::kFloat64) return false; + *constant = op->float64(); + return true; + } + + bool MatchFloat(OpIndex matched, double* value) { + const ConstantOp* op = TryCast<ConstantOp>(matched); + if (!op) return false; + if (op->kind == ConstantOp::Kind::kFloat64) { + *value = op->float64(); + return true; + } else if (op->kind == ConstantOp::Kind::kFloat32) { + *value = op->float32(); + return true; + } + return false; + } + + bool MatchFloat(OpIndex matched, double value) { + double k; + if (!MatchFloat(matched, &k)) return false; + return base::bit_cast<uint64_t>(value) == base::bit_cast<uint64_t>(k) || + (std::isnan(k) && std::isnan(value)); + } + + bool MatchNaN(OpIndex matched) { + double k; + return MatchFloat(matched, &k) && std::isnan(k); + } + + bool MatchWordConstant(OpIndex matched, WordRepresentation rep, + uint64_t* unsigned_constant, + int64_t* signed_constant = nullptr) { + const ConstantOp* op = TryCast<ConstantOp>(matched); + if (!op) return false; + switch (op->Representation()) { + case RegisterRepresentation::Word32(): + if (rep != WordRepresentation::Word32()) return false; + break; + case RegisterRepresentation::Word64(): + if (!(rep == any_of(WordRepresentation::Word64(), + WordRepresentation::Word32()))) { + return false; + } + break; + default: + return false; + } + if (unsigned_constant) *unsigned_constant = op->integral(); + if (signed_constant) *signed_constant = op->signed_integral(); + return true; + } + + bool MatchWordConstant(OpIndex matched, WordRepresentation rep, + int64_t* signed_constant) { + return MatchWordConstant(matched, rep, nullptr, signed_constant); + } + + bool MatchWord64Constant(OpIndex matched, uint64_t* constant) { + return MatchWordConstant(matched, WordRepresentation::Word64(), constant); + } + + bool MatchWord32Constant(OpIndex matched, uint32_t* constant) { + if (uint64_t value; + MatchWordConstant(matched, WordRepresentation::Word32(), &value)) { + *constant = static_cast<uint32_t>(value); + return true; + } + return false; + } + + bool MatchWord64Constant(OpIndex matched, int64_t* constant) { + return MatchWordConstant(matched, WordRepresentation::Word64(), constant); + } + + bool MatchWord32Constant(OpIndex matched, int32_t* constant) { + if (int64_t value; + MatchWordConstant(matched, WordRepresentation::Word32(), &value)) { + *constant = static_cast<int32_t>(value); + return true; + } + return false; + } + + bool MatchChange(OpIndex matched, OpIndex* input, ChangeOp::Kind kind, + RegisterRepresentation from, RegisterRepresentation to) { + const ChangeOp* op = TryCast<ChangeOp>(matched); + if (!op || op->kind != kind || op->from != from || op->to != to) { + return false; + } + *input = op->input(); + return true; + } + + bool MatchWordBinop(OpIndex matched, OpIndex* left, OpIndex* right, + WordBinopOp::Kind* kind, WordRepresentation* rep) { + const WordBinopOp* op = TryCast<WordBinopOp>(matched); + if (!op) return false; + *kind = op->kind; + *rep = op->rep; + *left = op->left(); + *right = op->right(); + return true; + } + + bool MatchWordBinop(OpIndex matched, OpIndex* left, OpIndex* right, + WordBinopOp::Kind kind, WordRepresentation rep) { + const WordBinopOp* op = TryCast<WordBinopOp>(matched); + if (!op || kind != op->kind) { + return false; + } + if (!(rep == op->rep || + (WordBinopOp::AllowsWord64ToWord32Truncation(kind) && + rep == WordRepresentation::Word32() && + op->rep == WordRepresentation::Word64()))) { + return false; + } + *left = op->left(); + *right = op->right(); + return true; + } + + bool MatchWordAdd(OpIndex matched, OpIndex* left, OpIndex* right, + WordRepresentation rep) { + return MatchWordBinop(matched, left, right, WordBinopOp::Kind::kAdd, rep); + } + + bool MatchWordSub(OpIndex matched, OpIndex* left, OpIndex* right, + WordRepresentation rep) { + return MatchWordBinop(matched, left, right, WordBinopOp::Kind::kSub, rep); + } + + bool MatchBitwiseAnd(OpIndex matched, OpIndex* left, OpIndex* right, + WordRepresentation rep) { + return MatchWordBinop(matched, left, right, WordBinopOp::Kind::kBitwiseAnd, + rep); + } + + bool MatchEqual(OpIndex matched, OpIndex* left, OpIndex* right, + WordRepresentation rep) { + const EqualOp* op = TryCast<EqualOp>(matched); + if (!op || rep != op->rep) return false; + *left = op->left(); + *right = op->right(); + return true; + } + + bool MatchComparison(OpIndex matched, OpIndex* left, OpIndex* right, + ComparisonOp::Kind* kind, RegisterRepresentation* rep) { + const ComparisonOp* op = TryCast<ComparisonOp>(matched); + if (!op) return false; + *kind = op->kind; + *rep = op->rep; + *left = op->left(); + *right = op->right(); + return true; + } + + bool MatchFloatUnary(OpIndex matched, OpIndex* input, FloatUnaryOp::Kind kind, + FloatRepresentation rep) { + const FloatUnaryOp* op = TryCast<FloatUnaryOp>(matched); + if (!op || op->kind != kind || op->rep != rep) return false; + *input = op->input(); + return true; + } + + bool MatchFloatRoundDown(OpIndex matched, OpIndex* input, + FloatRepresentation rep) { + return MatchFloatUnary(matched, input, FloatUnaryOp::Kind::kRoundDown, rep); + } + + bool MatchFloatBinary(OpIndex matched, OpIndex* left, OpIndex* right, + FloatBinopOp::Kind kind, FloatRepresentation rep) { + const FloatBinopOp* op = TryCast<FloatBinopOp>(matched); + if (!op || op->kind != kind || op->rep != rep) return false; + *left = op->left(); + *right = op->right(); + return true; + } + + bool MatchFloatSub(OpIndex matched, OpIndex* left, OpIndex* right, + FloatRepresentation rep) { + return MatchFloatBinary(matched, left, right, FloatBinopOp::Kind::kSub, + rep); + } + + bool MatchConstantShift(OpIndex matched, OpIndex* input, ShiftOp::Kind* kind, + WordRepresentation* rep, int* amount) { + const ShiftOp* op = TryCast<ShiftOp>(matched); + if (uint32_t rhs_constant; + op && MatchWord32Constant(op->right(), &rhs_constant) && + rhs_constant < static_cast<uint64_t>(op->rep.bit_width())) { + *input = op->left(); + *kind = op->kind; + *rep = op->rep; + *amount = static_cast<int>(rhs_constant); + return true; + } + return false; + } + + bool MatchConstantShift(OpIndex matched, OpIndex* input, ShiftOp::Kind kind, + WordRepresentation rep, int* amount) { + const ShiftOp* op = TryCast<ShiftOp>(matched); + if (uint32_t rhs_constant; + op && op->kind == kind && + (op->rep == rep || (ShiftOp::AllowsWord64ToWord32Truncation(kind) && + rep == WordRepresentation::Word32() && + op->rep == WordRepresentation::Word64())) && + MatchWord32Constant(op->right(), &rhs_constant) && + rhs_constant < static_cast<uint64_t>(rep.bit_width())) { + *input = op->left(); + *amount = static_cast<int>(rhs_constant); + return true; + } + return false; + } + + bool MatchConstantRightShift(OpIndex matched, OpIndex* input, + WordRepresentation rep, int* amount) { + const ShiftOp* op = TryCast<ShiftOp>(matched); + if (uint32_t rhs_constant; + op && ShiftOp::IsRightShift(op->kind) && op->rep == rep && + MatchWord32Constant(op->right(), &rhs_constant) && + rhs_constant < static_cast<uint32_t>(rep.bit_width())) { + *input = op->left(); + *amount = static_cast<int>(rhs_constant); + return true; + } + return false; + } + + bool MatchConstantShiftRightArithmeticShiftOutZeros(OpIndex matched, + OpIndex* input, + WordRepresentation rep, + uint16_t* amount) { + const ShiftOp* op = TryCast<ShiftOp>(matched); + if (uint32_t rhs_constant; + op && op->kind == ShiftOp::Kind::kShiftRightArithmeticShiftOutZeros && + op->rep == rep && MatchWord32Constant(op->right(), &rhs_constant) && + rhs_constant < static_cast<uint64_t>(rep.bit_width())) { + *input = op->left(); + *amount = static_cast<uint16_t>(rhs_constant); + return true; + } + return false; + } + + private: + Assembler& assembler() { return *static_cast<Assembler*>(this); } +}; + +} // namespace v8::internal::compiler::turboshaft + +#endif // V8_COMPILER_TURBOSHAFT_OPERATION_MATCHING_H_ diff --git a/deps/v8/src/compiler/turboshaft/operations.cc b/deps/v8/src/compiler/turboshaft/operations.cc index 4fd6e0ada20e35..50e9ec2dbe9624 100644 --- a/deps/v8/src/compiler/turboshaft/operations.cc +++ b/deps/v8/src/compiler/turboshaft/operations.cc @@ -8,9 +8,7 @@ #include <sstream> #include "src/base/platform/mutex.h" -#include "src/base/platform/platform.h" #include "src/codegen/machine-type.h" -#include "src/common/assert-scope.h" #include "src/common/globals.h" #include "src/compiler/backend/instruction-selector.h" #include "src/compiler/frame-states.h" @@ -49,6 +47,14 @@ std::ostream& operator<<(std::ostream& os, WordUnaryOp::Kind kind) { return os << "ReverseBytes"; case WordUnaryOp::Kind::kCountLeadingZeros: return os << "CountLeadingZeros"; + case WordUnaryOp::Kind::kCountTrailingZeros: + return os << "CountTrailingZeros"; + case WordUnaryOp::Kind::kPopCount: + return os << "PopCount"; + case WordUnaryOp::Kind::kSignExtend8: + return os << "SignExtend8"; + case WordUnaryOp::Kind::kSignExtend16: + return os << "SignExtend16"; } } @@ -70,8 +76,16 @@ std::ostream& operator<<(std::ostream& os, FloatUnaryOp::Kind kind) { return os << "RoundTiesEven"; case FloatUnaryOp::Kind::kLog: return os << "Log"; + case FloatUnaryOp::Kind::kLog2: + return os << "Log2"; + case FloatUnaryOp::Kind::kLog10: + return os << "Log10"; + case FloatUnaryOp::Kind::kLog1p: + return os << "Log1p"; case FloatUnaryOp::Kind::kSqrt: return os << "Sqrt"; + case FloatUnaryOp::Kind::kCbrt: + return os << "Cbrt"; case FloatUnaryOp::Kind::kExp: return os << "Exp"; case FloatUnaryOp::Kind::kExpm1: @@ -96,30 +110,61 @@ std::ostream& operator<<(std::ostream& os, FloatUnaryOp::Kind kind) { return os << "Tan"; case FloatUnaryOp::Kind::kTanh: return os << "Tanh"; + case FloatUnaryOp::Kind::kAtan: + return os << "Atan"; + case FloatUnaryOp::Kind::kAtanh: + return os << "Atanh"; } } // static -bool FloatUnaryOp::IsSupported(Kind kind, MachineRepresentation rep) { +bool FloatUnaryOp::IsSupported(Kind kind, FloatRepresentation rep) { + switch (rep.value()) { + case FloatRepresentation::Float32(): + switch (kind) { + case Kind::kRoundDown: + return SupportedOperations::float32_round_down(); + case Kind::kRoundUp: + return SupportedOperations::float32_round_up(); + case Kind::kRoundToZero: + return SupportedOperations::float32_round_to_zero(); + case Kind::kRoundTiesEven: + return SupportedOperations::float32_round_ties_even(); + default: + return true; + } + case FloatRepresentation::Float64(): + switch (kind) { + case Kind::kRoundDown: + return SupportedOperations::float64_round_down(); + case Kind::kRoundUp: + return SupportedOperations::float64_round_up(); + case Kind::kRoundToZero: + return SupportedOperations::float64_round_to_zero(); + case Kind::kRoundTiesEven: + return SupportedOperations::float64_round_ties_even(); + default: + return true; + } + } +} + +// static +bool WordUnaryOp::IsSupported(Kind kind, WordRepresentation rep) { switch (kind) { - case Kind::kRoundDown: - return rep == MachineRepresentation::kFloat32 - ? SupportedOperations::float32_round_down() - : SupportedOperations::float64_round_down(); - case Kind::kRoundUp: - return rep == MachineRepresentation::kFloat32 - ? SupportedOperations::float32_round_up() - : SupportedOperations::float64_round_up(); - case Kind::kRoundToZero: - return rep == MachineRepresentation::kFloat32 - ? SupportedOperations::float32_round_to_zero() - : SupportedOperations::float64_round_to_zero(); - case Kind::kRoundTiesEven: - return rep == MachineRepresentation::kFloat32 - ? SupportedOperations::float32_round_ties_even() - : SupportedOperations::float64_round_ties_even(); - default: + case Kind::kCountLeadingZeros: + case Kind::kReverseBytes: + case Kind::kSignExtend8: + case Kind::kSignExtend16: return true; + case Kind::kCountTrailingZeros: + return rep == WordRepresentation::Word32() + ? SupportedOperations::word32_ctz() + : SupportedOperations::word64_ctz(); + case Kind::kPopCount: + return rep == WordRepresentation::Word32() + ? SupportedOperations::word32_popcnt() + : SupportedOperations::word64_popcnt(); } } @@ -155,18 +200,14 @@ std::ostream& operator<<(std::ostream& os, ComparisonOp::Kind kind) { std::ostream& operator<<(std::ostream& os, ChangeOp::Kind kind) { switch (kind) { - case ChangeOp::Kind::kSignedNarrowing: - return os << "SignedNarrowing"; - case ChangeOp::Kind::kUnsignedNarrowing: - return os << "UnsignedNarrowing"; case ChangeOp::Kind::kFloatConversion: return os << "FloatConversion"; - case ChangeOp::Kind::kSignedFloatTruncate: - return os << "SignedFloatTruncate"; case ChangeOp::Kind::kJSFloatTruncate: return os << "JSFloatTruncate"; case ChangeOp::Kind::kSignedFloatTruncateOverflowToMin: return os << "SignedFloatTruncateOverflowToMin"; + case ChangeOp::Kind::kUnsignedFloatTruncateOverflowToMin: + return os << "UnsignedFloatTruncateOverflowToMin"; case ChangeOp::Kind::kSignedToFloat: return os << "SignedToFloat"; case ChangeOp::Kind::kUnsignedToFloat: @@ -184,6 +225,26 @@ std::ostream& operator<<(std::ostream& os, ChangeOp::Kind kind) { } } +std::ostream& operator<<(std::ostream& os, TryChangeOp::Kind kind) { + switch (kind) { + case TryChangeOp::Kind::kSignedFloatTruncateOverflowUndefined: + return os << "SignedFloatTruncateOverflowUndefined"; + case TryChangeOp::Kind::kUnsignedFloatTruncateOverflowUndefined: + return os << "UnsignedFloatTruncateOverflowUndefined"; + } +} + +std::ostream& operator<<(std::ostream& os, ChangeOp::Assumption assumption) { + switch (assumption) { + case ChangeOp::Assumption::kNoAssumption: + return os << "NoAssumption"; + case ChangeOp::Assumption::kNoOverflow: + return os << "NoOverflow"; + case ChangeOp::Assumption::kReversible: + return os << "Reversible"; + } +} + std::ostream& operator<<(std::ostream& os, Float64InsertWord32Op::Kind kind) { switch (kind) { case Float64InsertWord32Op::Kind::kLowHalf: @@ -249,6 +310,14 @@ void ConstantOp::PrintOptions(std::ostream& os) const { case Kind::kCompressedHeapObject: os << "compressed heap object: " << handle(); break; + case Kind::kRelocatableWasmCall: + os << "relocatable wasm call: 0x" + << reinterpret_cast<void*>(storage.integral); + break; + case Kind::kRelocatableWasmStubCall: + os << "relocatable wasm stub call: 0x" + << reinterpret_cast<void*>(storage.integral); + break; } os << "]"; } @@ -465,8 +534,8 @@ std::ostream& operator<<(std::ostream& os, OpProperties opProperties) { os << "Reading"; } else if (opProperties == OpProperties::Writing()) { os << "Writing"; - } else if (opProperties == OpProperties::CanDeopt()) { - os << "CanDeopt"; + } else if (opProperties == OpProperties::CanAbort()) { + os << "CanAbort"; } else if (opProperties == OpProperties::AnySideEffects()) { os << "AnySideEffects"; } else if (opProperties == OpProperties::BlockTerminator()) { diff --git a/deps/v8/src/compiler/turboshaft/operations.h b/deps/v8/src/compiler/turboshaft/operations.h index 6e6cae783c3931..d7f49e9bc58e34 100644 --- a/deps/v8/src/compiler/turboshaft/operations.h +++ b/deps/v8/src/compiler/turboshaft/operations.h @@ -13,21 +13,18 @@ #include <type_traits> #include <utility> -#include "src/base/functional.h" #include "src/base/logging.h" #include "src/base/macros.h" #include "src/base/platform/mutex.h" -#include "src/base/small-vector.h" #include "src/base/template-utils.h" #include "src/base/vector.h" #include "src/codegen/external-reference.h" -#include "src/codegen/machine-type.h" #include "src/common/globals.h" #include "src/compiler/globals.h" #include "src/compiler/turboshaft/fast-hash.h" +#include "src/compiler/turboshaft/representations.h" #include "src/compiler/turboshaft/utils.h" #include "src/compiler/write-barrier-kind.h" -#include "src/zone/zone.h" namespace v8::internal { class HeapObject; @@ -37,6 +34,7 @@ class CallDescriptor; class DeoptimizeParameters; class FrameStateInfo; class Node; +enum class TrapId : uint32_t; } // namespace v8::internal::compiler namespace v8::internal::compiler::turboshaft { class Block; @@ -72,14 +70,18 @@ class Graph; V(Equal) \ V(Comparison) \ V(Change) \ + V(TryChange) \ V(Float64InsertWord32) \ V(TaggedBitcast) \ + V(Select) \ V(PendingLoopPhi) \ V(Constant) \ V(Load) \ V(IndexedLoad) \ + V(ProtectedLoad) \ V(Store) \ V(IndexedStore) \ + V(ProtectedStore) \ V(Retain) \ V(Parameter) \ V(OsrValue) \ @@ -90,9 +92,11 @@ class Graph; V(CheckLazyDeopt) \ V(Deoptimize) \ V(DeoptimizeIf) \ + V(TrapIf) \ V(Phi) \ V(FrameState) \ V(Call) \ + V(TailCall) \ V(Unreachable) \ V(Return) \ V(Branch) \ @@ -251,7 +255,7 @@ struct OpProperties { static constexpr OpProperties Writing() { return {false, true, false, false}; } - static constexpr OpProperties CanDeopt() { + static constexpr OpProperties CanAbort() { return {false, false, true, false}; } static constexpr OpProperties AnySideEffects() { @@ -260,6 +264,15 @@ struct OpProperties { static constexpr OpProperties BlockTerminator() { return {false, false, false, true}; } + static constexpr OpProperties BlockTerminatorWithAnySideEffect() { + return {true, true, true, true}; + } + static constexpr OpProperties ReadingAndCanAbort() { + return {true, false, true, false}; + } + static constexpr OpProperties WritingAndCanAbort() { + return {false, true, true, false}; + } bool operator==(const OpProperties& other) const { return can_read == other.can_read && can_write == other.can_write && can_abort == other.can_abort && @@ -562,7 +575,7 @@ struct WordBinopOp : FixedArityOperationT<2, WordBinopOp> { kUnsignedMod, }; Kind kind; - MachineRepresentation rep; + WordRepresentation rep; static constexpr OpProperties properties = OpProperties::Pure(); @@ -588,10 +601,7 @@ struct WordBinopOp : FixedArityOperationT<2, WordBinopOp> { } } - static bool IsAssociative(Kind kind, MachineRepresentation rep) { - if (IsFloatingPoint(rep)) { - return false; - } + static bool IsAssociative(Kind kind) { switch (kind) { case Kind::kAdd: case Kind::kMul: @@ -630,14 +640,8 @@ struct WordBinopOp : FixedArityOperationT<2, WordBinopOp> { } } - WordBinopOp(OpIndex left, OpIndex right, Kind kind, MachineRepresentation rep) - : Base(left, right), kind(kind), rep(rep) { - DCHECK_EQ(rep, any_of(MachineRepresentation::kWord32, - MachineRepresentation::kWord64)); - DCHECK_IMPLIES(kind == any_of(Kind::kSignedMulOverflownBits, - Kind::kUnsignedMulOverflownBits), - rep == MachineRepresentation::kWord32); - } + WordBinopOp(OpIndex left, OpIndex right, Kind kind, WordRepresentation rep) + : Base(left, right), kind(kind), rep(rep) {} auto options() const { return std::tuple{kind, rep}; } void PrintOptions(std::ostream& os) const; }; @@ -655,7 +659,7 @@ struct FloatBinopOp : FixedArityOperationT<2, FloatBinopOp> { kAtan2, }; Kind kind; - MachineRepresentation rep; + FloatRepresentation rep; static constexpr OpProperties properties = OpProperties::Pure(); @@ -678,13 +682,10 @@ struct FloatBinopOp : FixedArityOperationT<2, FloatBinopOp> { } } - FloatBinopOp(OpIndex left, OpIndex right, Kind kind, - MachineRepresentation rep) + FloatBinopOp(OpIndex left, OpIndex right, Kind kind, FloatRepresentation rep) : Base(left, right), kind(kind), rep(rep) { - DCHECK_EQ(rep, any_of(MachineRepresentation::kFloat32, - MachineRepresentation::kFloat64)); DCHECK_IMPLIES(kind == any_of(Kind::kPower, Kind::kAtan2, Kind::kMod), - rep == MachineRepresentation::kFloat64); + rep == FloatRepresentation::Float64()); } auto options() const { return std::tuple{kind, rep}; } void PrintOptions(std::ostream& os) const; @@ -698,7 +699,7 @@ struct OverflowCheckedBinopOp kSignedSub, }; Kind kind; - MachineRepresentation rep; + WordRepresentation rep; static constexpr OpProperties properties = OpProperties::Pure(); @@ -716,10 +717,8 @@ struct OverflowCheckedBinopOp } OverflowCheckedBinopOp(OpIndex left, OpIndex right, Kind kind, - MachineRepresentation rep) - : Base(left, right), kind(kind), rep(rep) { - DCHECK_EQ(rep, MachineRepresentation::kWord32); - } + WordRepresentation rep) + : Base(left, right), kind(kind), rep(rep) {} auto options() const { return std::tuple{kind, rep}; } void PrintOptions(std::ostream& os) const; }; @@ -728,17 +727,22 @@ struct WordUnaryOp : FixedArityOperationT<1, WordUnaryOp> { enum class Kind : uint8_t { kReverseBytes, kCountLeadingZeros, + kCountTrailingZeros, + kPopCount, + kSignExtend8, + kSignExtend16, }; Kind kind; - MachineRepresentation rep; + WordRepresentation rep; static constexpr OpProperties properties = OpProperties::Pure(); OpIndex input() const { return Base::input(0); } - explicit WordUnaryOp(OpIndex input, Kind kind, MachineRepresentation rep) + static bool IsSupported(Kind kind, WordRepresentation rep); + + explicit WordUnaryOp(OpIndex input, Kind kind, WordRepresentation rep) : Base(input), kind(kind), rep(rep) { - DCHECK_EQ(rep, any_of(MachineRepresentation::kWord32, - MachineRepresentation::kWord64)); + DCHECK(IsSupported(kind, rep)); } auto options() const { return std::tuple{kind, rep}; } }; @@ -754,7 +758,11 @@ struct FloatUnaryOp : FixedArityOperationT<1, FloatUnaryOp> { kRoundToZero, // round towards 0 kRoundTiesEven, // break ties by rounding towards the next even number kLog, + kLog2, + kLog10, + kLog1p, kSqrt, + kCbrt, kExp, kExpm1, kSin, @@ -767,19 +775,19 @@ struct FloatUnaryOp : FixedArityOperationT<1, FloatUnaryOp> { kAcosh, kTan, kTanh, + kAtan, + kAtanh, }; Kind kind; - MachineRepresentation rep; + FloatRepresentation rep; static constexpr OpProperties properties = OpProperties::Pure(); OpIndex input() const { return Base::input(0); } - static bool IsSupported(Kind kind, MachineRepresentation rep); + static bool IsSupported(Kind kind, FloatRepresentation rep); - explicit FloatUnaryOp(OpIndex input, Kind kind, MachineRepresentation rep) + explicit FloatUnaryOp(OpIndex input, Kind kind, FloatRepresentation rep) : Base(input), kind(kind), rep(rep) { - DCHECK_EQ(rep, any_of(MachineRepresentation::kFloat32, - MachineRepresentation::kFloat64)); DCHECK(IsSupported(kind, rep)); } auto options() const { return std::tuple{kind, rep}; } @@ -796,7 +804,7 @@ struct ShiftOp : FixedArityOperationT<2, ShiftOp> { kRotateLeft }; Kind kind; - MachineRepresentation rep; + WordRepresentation rep; static constexpr OpProperties properties = OpProperties::Pure(); @@ -830,29 +838,26 @@ struct ShiftOp : FixedArityOperationT<2, ShiftOp> { } } - ShiftOp(OpIndex left, OpIndex right, Kind kind, MachineRepresentation rep) - : Base(left, right), kind(kind), rep(rep) { - DCHECK_EQ(rep, any_of(MachineRepresentation::kWord32, - MachineRepresentation::kWord64)); - } + ShiftOp(OpIndex left, OpIndex right, Kind kind, WordRepresentation rep) + : Base(left, right), kind(kind), rep(rep) {} auto options() const { return std::tuple{kind, rep}; } }; std::ostream& operator<<(std::ostream& os, ShiftOp::Kind kind); struct EqualOp : FixedArityOperationT<2, EqualOp> { - MachineRepresentation rep; + RegisterRepresentation rep; static constexpr OpProperties properties = OpProperties::Pure(); OpIndex left() const { return input(0); } OpIndex right() const { return input(1); } - EqualOp(OpIndex left, OpIndex right, MachineRepresentation rep) + EqualOp(OpIndex left, OpIndex right, RegisterRepresentation rep) : Base(left, right), rep(rep) { - DCHECK(rep == MachineRepresentation::kWord32 || - rep == MachineRepresentation::kWord64 || - rep == MachineRepresentation::kFloat32 || - rep == MachineRepresentation::kFloat64); + DCHECK(rep == any_of(RegisterRepresentation::Word32(), + RegisterRepresentation::Word64(), + RegisterRepresentation::Float32(), + RegisterRepresentation::Float64())); } auto options() const { return std::tuple{rep}; } }; @@ -865,7 +870,7 @@ struct ComparisonOp : FixedArityOperationT<2, ComparisonOp> { kUnsignedLessThanOrEqual }; Kind kind; - MachineRepresentation rep; + RegisterRepresentation rep; static constexpr OpProperties properties = OpProperties::Pure(); @@ -873,31 +878,50 @@ struct ComparisonOp : FixedArityOperationT<2, ComparisonOp> { OpIndex right() const { return input(1); } ComparisonOp(OpIndex left, OpIndex right, Kind kind, - MachineRepresentation rep) + RegisterRepresentation rep) : Base(left, right), kind(kind), rep(rep) { - DCHECK_EQ(rep, any_of(MachineRepresentation::kWord32, - MachineRepresentation::kWord64, - MachineRepresentation::kFloat32, - MachineRepresentation::kFloat64)); + DCHECK_EQ(rep, any_of(RegisterRepresentation::Word32(), + RegisterRepresentation::Word64(), + RegisterRepresentation::Float32(), + RegisterRepresentation::Float64())); + DCHECK_IMPLIES( + rep == any_of(RegisterRepresentation::Float32(), + RegisterRepresentation::Float64()), + kind == any_of(Kind::kSignedLessThan, Kind::kSignedLessThanOrEqual)); } auto options() const { return std::tuple{kind, rep}; } + + static bool IsSigned(Kind kind) { + switch (kind) { + case Kind::kSignedLessThan: + case Kind::kSignedLessThanOrEqual: + return true; + case Kind::kUnsignedLessThan: + case Kind::kUnsignedLessThanOrEqual: + return false; + } + } + static Kind SetSigned(Kind kind, bool is_signed) { + switch (kind) { + case Kind::kSignedLessThan: + case Kind::kUnsignedLessThan: + return is_signed ? Kind::kSignedLessThan : Kind::kUnsignedLessThan; + case Kind::kSignedLessThanOrEqual: + case Kind::kUnsignedLessThanOrEqual: + return is_signed ? Kind::kSignedLessThanOrEqual + : Kind::kUnsignedLessThanOrEqual; + } + } }; std::ostream& operator<<(std::ostream& os, ComparisonOp::Kind kind); struct ChangeOp : FixedArityOperationT<1, ChangeOp> { enum class Kind : uint8_t { - // narrowing means undefined behavior if value cannot be represented - // precisely - kSignedNarrowing, - kUnsignedNarrowing, // convert between different floating-point types kFloatConversion, - // conversion to signed integer, rounding towards zero, - // overflow behavior system-specific - kSignedFloatTruncate, - // like kSignedFloatTruncate, but overflow guaranteed to result in the - // minimal integer + // overflow guaranteed to result in the minimal integer kSignedFloatTruncateOverflowToMin, + kUnsignedFloatTruncateOverflowToMin, // JS semantics float64 to word32 truncation // https://tc39.es/ecma262/#sec-touint32 kJSFloatTruncate, @@ -914,20 +938,110 @@ struct ChangeOp : FixedArityOperationT<1, ChangeOp> { // preserve bits, change meaning kBitcast }; + // Violated assumptions result in undefined behavior. + enum class Assumption : uint8_t { + kNoAssumption, + // Used for conversions from floating-point to integer, assumes that the + // value doesn't exceed the integer range. + kNoOverflow, + // Assume that the original value can be recovered by a corresponding + // reverse transformation. + kReversible, + }; Kind kind; - MachineRepresentation from; - MachineRepresentation to; + // Reversible means undefined behavior if value cannot be represented + // precisely. + Assumption assumption; + RegisterRepresentation from; + RegisterRepresentation to; + + static bool IsReversible(Kind kind, Assumption assumption, + RegisterRepresentation from, + RegisterRepresentation to, Kind reverse_kind, + bool signalling_nan_possible) { + switch (kind) { + case Kind::kFloatConversion: + return from == RegisterRepresentation::Float32() && + to == RegisterRepresentation::Float64() && + reverse_kind == Kind::kFloatConversion && + !signalling_nan_possible; + case Kind::kSignedFloatTruncateOverflowToMin: + return assumption == Assumption::kReversible && + reverse_kind == Kind::kSignedToFloat; + case Kind::kUnsignedFloatTruncateOverflowToMin: + return assumption == Assumption::kReversible && + reverse_kind == Kind::kUnsignedToFloat; + case Kind::kJSFloatTruncate: + return false; + case Kind::kSignedToFloat: + if (from == RegisterRepresentation::Word32() && + to == RegisterRepresentation::Float64()) { + return reverse_kind == any_of(Kind::kSignedFloatTruncateOverflowToMin, + Kind::kJSFloatTruncate); + } else { + return assumption == Assumption::kReversible && + reverse_kind == + any_of(Kind::kSignedFloatTruncateOverflowToMin); + } + case Kind::kUnsignedToFloat: + if (from == RegisterRepresentation::Word32() && + to == RegisterRepresentation::Float64()) { + return reverse_kind == + any_of(Kind::kUnsignedFloatTruncateOverflowToMin, + Kind::kJSFloatTruncate); + } else { + return assumption == Assumption::kReversible && + reverse_kind == Kind::kUnsignedFloatTruncateOverflowToMin; + } + case Kind::kExtractHighHalf: + case Kind::kExtractLowHalf: + case Kind::kZeroExtend: + case Kind::kSignExtend: + return false; + case Kind::kBitcast: + return reverse_kind == Kind::kBitcast; + } + } + + bool IsReversibleBy(Kind reverse_kind, bool signalling_nan_possible) const { + return IsReversible(kind, assumption, from, to, reverse_kind, + signalling_nan_possible); + } static constexpr OpProperties properties = OpProperties::Pure(); OpIndex input() const { return Base::input(0); } - ChangeOp(OpIndex input, Kind kind, MachineRepresentation from, - MachineRepresentation to) + ChangeOp(OpIndex input, Kind kind, Assumption assumption, + RegisterRepresentation from, RegisterRepresentation to) + : Base(input), kind(kind), assumption(assumption), from(from), to(to) {} + auto options() const { return std::tuple{kind, assumption, from, to}; } +}; +std::ostream& operator<<(std::ostream& os, ChangeOp::Kind kind); +std::ostream& operator<<(std::ostream& os, ChangeOp::Assumption assumption); + +// Perform a conversion and return a pair of the result and a bit if it was +// successful. +struct TryChangeOp : FixedArityOperationT<1, TryChangeOp> { + enum class Kind : uint8_t { + // The result of the truncation is undefined if the result is out of range. + kSignedFloatTruncateOverflowUndefined, + kUnsignedFloatTruncateOverflowUndefined, + }; + Kind kind; + FloatRepresentation from; + WordRepresentation to; + + static constexpr OpProperties properties = OpProperties::Pure(); + + OpIndex input() const { return Base::input(0); } + + TryChangeOp(OpIndex input, Kind kind, FloatRepresentation from, + WordRepresentation to) : Base(input), kind(kind), from(from), to(to) {} auto options() const { return std::tuple{kind, from, to}; } }; -std::ostream& operator<<(std::ostream& os, ChangeOp::Kind kind); +std::ostream& operator<<(std::ostream& os, TryChangeOp::Kind kind); // TODO(tebbi): Unify with other operations. struct Float64InsertWord32Op : FixedArityOperationT<2, Float64InsertWord32Op> { @@ -946,28 +1060,52 @@ struct Float64InsertWord32Op : FixedArityOperationT<2, Float64InsertWord32Op> { std::ostream& operator<<(std::ostream& os, Float64InsertWord32Op::Kind kind); struct TaggedBitcastOp : FixedArityOperationT<1, TaggedBitcastOp> { - MachineRepresentation from; - MachineRepresentation to; + RegisterRepresentation from; + RegisterRepresentation to; // Due to moving GC, converting from or to pointers doesn't commute with GC. static constexpr OpProperties properties = OpProperties::Reading(); OpIndex input() const { return Base::input(0); } - TaggedBitcastOp(OpIndex input, MachineRepresentation from, - MachineRepresentation to) - : Base(input), from(from), to(to) {} + TaggedBitcastOp(OpIndex input, RegisterRepresentation from, + RegisterRepresentation to) + : Base(input), from(from), to(to) { + DCHECK((from == RegisterRepresentation::PointerSized() && + to == RegisterRepresentation::Tagged()) || + (from == RegisterRepresentation::Tagged() && + to == RegisterRepresentation::PointerSized())); + } auto options() const { return std::tuple{from, to}; } }; +struct SelectOp : FixedArityOperationT<3, SelectOp> { + // TODO(12783): Support all register reps. + WordRepresentation rep; + static constexpr OpProperties properties = OpProperties::Pure(); + + OpIndex condition() const { return Base::input(0); } + OpIndex left() const { return Base::input(1); } + OpIndex right() const { return Base::input(2); } + + SelectOp(OpIndex condition, OpIndex left, OpIndex right, + WordRepresentation rep) + : Base(condition, left, right), rep(rep) { + DCHECK(rep == WordRepresentation::Word32() + ? SupportedOperations::word32_select() + : SupportedOperations::word64_select()); + } + auto options() const { return std::tuple{rep}; } +}; + struct PhiOp : OperationT<PhiOp> { - MachineRepresentation rep; + RegisterRepresentation rep; static constexpr OpProperties properties = OpProperties::Pure(); static constexpr size_t kLoopPhiBackEdgeIndex = 1; - explicit PhiOp(base::Vector<const OpIndex> inputs, MachineRepresentation rep) + explicit PhiOp(base::Vector<const OpIndex> inputs, RegisterRepresentation rep) : Base(inputs), rep(rep) {} auto options() const { return std::tuple{rep}; } }; @@ -975,7 +1113,7 @@ struct PhiOp : OperationT<PhiOp> { // Only used when moving a loop phi to a new graph while the loop backedge has // not been emitted yet. struct PendingLoopPhiOp : FixedArityOperationT<1, PendingLoopPhiOp> { - MachineRepresentation rep; + RegisterRepresentation rep; union { // Used when transforming a Turboshaft graph. // This is not an input because it refers to the old graph. @@ -988,12 +1126,12 @@ struct PendingLoopPhiOp : FixedArityOperationT<1, PendingLoopPhiOp> { OpIndex first() const { return input(0); } - PendingLoopPhiOp(OpIndex first, MachineRepresentation rep, + PendingLoopPhiOp(OpIndex first, RegisterRepresentation rep, OpIndex old_backedge_index) : Base(first), rep(rep), old_backedge_index(old_backedge_index) { DCHECK(old_backedge_index.valid()); } - PendingLoopPhiOp(OpIndex first, MachineRepresentation rep, + PendingLoopPhiOp(OpIndex first, RegisterRepresentation rep, Node* old_backedge_node) : Base(first), rep(rep), old_backedge_node(old_backedge_node) {} std::tuple<> options() const { UNREACHABLE(); } @@ -1010,7 +1148,9 @@ struct ConstantOp : FixedArityOperationT<0, ConstantOp> { kTaggedIndex, kExternal, kHeapObject, - kCompressedHeapObject + kCompressedHeapObject, + kRelocatableWasmCall, + kRelocatableWasmStubCall }; Kind kind; @@ -1030,24 +1170,26 @@ struct ConstantOp : FixedArityOperationT<0, ConstantOp> { static constexpr OpProperties properties = OpProperties::Pure(); - MachineRepresentation Representation() const { + RegisterRepresentation Representation() const { switch (kind) { case Kind::kWord32: - return MachineRepresentation::kWord32; + return RegisterRepresentation::Word32(); case Kind::kWord64: - return MachineRepresentation::kWord64; + return RegisterRepresentation::Word64(); case Kind::kFloat32: - return MachineRepresentation::kFloat32; + return RegisterRepresentation::Float32(); case Kind::kFloat64: - return MachineRepresentation::kFloat64; + return RegisterRepresentation::Float64(); case Kind::kExternal: case Kind::kTaggedIndex: - return MachineType::PointerRepresentation(); + case Kind::kRelocatableWasmCall: + case Kind::kRelocatableWasmStubCall: + return RegisterRepresentation::PointerSized(); case Kind::kHeapObject: case Kind::kNumber: - return MachineRepresentation::kTagged; + return RegisterRepresentation::Tagged(); case Kind::kCompressedHeapObject: - return MachineRepresentation::kCompressed; + return RegisterRepresentation::Compressed(); } } @@ -1055,11 +1197,13 @@ struct ConstantOp : FixedArityOperationT<0, ConstantOp> { : Base(), kind(kind), storage(storage) { DCHECK_IMPLIES( kind == Kind::kWord32, - storage.integral <= MaxUnsignedValue(MachineRepresentation::kWord32)); + storage.integral <= WordRepresentation::Word32().MaxUnsignedValue()); } uint64_t integral() const { - DCHECK(kind == Kind::kWord32 || kind == Kind::kWord64); + DCHECK(kind == Kind::kWord32 || kind == Kind::kWord64 || + kind == Kind::kRelocatableWasmCall || + kind == Kind::kRelocatableWasmStubCall); return storage.integral; } @@ -1128,6 +1272,8 @@ struct ConstantOp : FixedArityOperationT<0, ConstantOp> { case Kind::kExternal: case Kind::kHeapObject: case Kind::kCompressedHeapObject: + case Kind::kRelocatableWasmCall: + case Kind::kRelocatableWasmStubCall: UNREACHABLE(); } } @@ -1146,6 +1292,8 @@ struct ConstantOp : FixedArityOperationT<0, ConstantOp> { case Kind::kExternal: case Kind::kHeapObject: case Kind::kCompressedHeapObject: + case Kind::kRelocatableWasmCall: + case Kind::kRelocatableWasmStubCall: UNREACHABLE(); } } @@ -1169,6 +1317,8 @@ struct ConstantOp : FixedArityOperationT<0, ConstantOp> { case Kind::kWord32: case Kind::kWord64: case Kind::kTaggedIndex: + case Kind::kRelocatableWasmCall: + case Kind::kRelocatableWasmStubCall: return fast_hash_combine(opcode, kind, storage.integral); case Kind::kFloat32: return fast_hash_combine(opcode, kind, storage.float32); @@ -1188,6 +1338,8 @@ struct ConstantOp : FixedArityOperationT<0, ConstantOp> { case Kind::kWord32: case Kind::kWord64: case Kind::kTaggedIndex: + case Kind::kRelocatableWasmCall: + case Kind::kRelocatableWasmStubCall: return storage.integral == other.storage.integral; case Kind::kFloat32: // Using a bit_cast to uint32_t in order to return false when comparing @@ -1217,20 +1369,34 @@ struct ConstantOp : FixedArityOperationT<0, ConstantOp> { // For Kind::tagged_base: subtract kHeapObjectTag, // `base` has to be the object start. // For (u)int8/16, the value will be sign- or zero-extended to Word32. +// When result_rep is RegisterRepresentation::Compressed(), then the load does +// not decompress the value. struct LoadOp : FixedArityOperationT<1, LoadOp> { - enum class Kind { kTaggedBase, kRawAligned, kRawUnaligned }; + enum class Kind : uint8_t { kTaggedBase, kRawAligned, kRawUnaligned }; Kind kind; - MachineType loaded_rep; + MemoryRepresentation loaded_rep; + RegisterRepresentation result_rep; int32_t offset; static constexpr OpProperties properties = OpProperties::Reading(); OpIndex base() const { return input(0); } - LoadOp(OpIndex base, Kind kind, MachineType loaded_rep, int32_t offset) - : Base(base), kind(kind), loaded_rep(loaded_rep), offset(offset) {} + LoadOp(OpIndex base, Kind kind, MemoryRepresentation loaded_rep, + RegisterRepresentation result_rep, int32_t offset) + : Base(base), + kind(kind), + loaded_rep(loaded_rep), + result_rep(result_rep), + offset(offset) { + DCHECK(loaded_rep.ToRegisterRepresentation() == result_rep || + (loaded_rep.IsTagged() && + result_rep == RegisterRepresentation::Compressed())); + } void PrintOptions(std::ostream& os) const; - auto options() const { return std::tuple{kind, loaded_rep, offset}; } + auto options() const { + return std::tuple{kind, loaded_rep, result_rep, offset}; + } }; inline bool IsAlignedAccess(LoadOp::Kind kind) { @@ -1247,10 +1413,13 @@ inline bool IsAlignedAccess(LoadOp::Kind kind) { // For Kind::tagged_base: subtract kHeapObjectTag, // `base` has to be the object start. // For (u)int8/16, the value will be sign- or zero-extended to Word32. +// When result_rep is RegisterRepresentation::Compressed(), then the load does +// not decompress the value. struct IndexedLoadOp : FixedArityOperationT<2, IndexedLoadOp> { using Kind = LoadOp::Kind; Kind kind; - MachineType loaded_rep; + MemoryRepresentation loaded_rep; + RegisterRepresentation result_rep; uint8_t element_size_log2; // multiply index with 2^element_size_log2 int32_t offset; // add offset to scaled index @@ -1259,26 +1428,55 @@ struct IndexedLoadOp : FixedArityOperationT<2, IndexedLoadOp> { OpIndex base() const { return input(0); } OpIndex index() const { return input(1); } - IndexedLoadOp(OpIndex base, OpIndex index, Kind kind, MachineType loaded_rep, - int32_t offset, uint8_t element_size_log2) + IndexedLoadOp(OpIndex base, OpIndex index, Kind kind, + MemoryRepresentation loaded_rep, + RegisterRepresentation result_rep, int32_t offset, + uint8_t element_size_log2) : Base(base, index), kind(kind), loaded_rep(loaded_rep), + result_rep(result_rep), element_size_log2(element_size_log2), - offset(offset) {} + offset(offset) { + DCHECK(loaded_rep.ToRegisterRepresentation() == result_rep || + (loaded_rep.IsTagged() && + result_rep == RegisterRepresentation::Compressed())); + } void PrintOptions(std::ostream& os) const; auto options() const { return std::tuple{kind, loaded_rep, offset, element_size_log2}; } }; +// A protected load registers a trap handler which handles out-of-bounds memory +// accesses. +struct ProtectedLoadOp : FixedArityOperationT<2, ProtectedLoadOp> { + MemoryRepresentation loaded_rep; + RegisterRepresentation result_rep; + + static constexpr OpProperties properties = OpProperties::ReadingAndCanAbort(); + + OpIndex base() const { return input(0); } + OpIndex index() const { return input(1); } + + ProtectedLoadOp(OpIndex base, OpIndex index, MemoryRepresentation loaded_rep, + RegisterRepresentation result_rep) + : Base(base, index), loaded_rep(loaded_rep), result_rep(result_rep) { + DCHECK(loaded_rep.ToRegisterRepresentation() == result_rep || + (loaded_rep.IsTagged() && + result_rep == RegisterRepresentation::Compressed())); + } + + auto options() const { return std::tuple{loaded_rep, result_rep}; } +}; + // Store `value` to: base + offset. // For Kind::tagged_base: subtract kHeapObjectTag, // `base` has to be the object start. struct StoreOp : FixedArityOperationT<2, StoreOp> { using Kind = LoadOp::Kind; Kind kind; - MachineRepresentation stored_rep; + MemoryRepresentation stored_rep; WriteBarrierKind write_barrier; int32_t offset; @@ -1288,7 +1486,7 @@ struct StoreOp : FixedArityOperationT<2, StoreOp> { OpIndex value() const { return input(1); } StoreOp(OpIndex base, OpIndex value, Kind kind, - MachineRepresentation stored_rep, WriteBarrierKind write_barrier, + MemoryRepresentation stored_rep, WriteBarrierKind write_barrier, int32_t offset) : Base(base, value), kind(kind), @@ -1307,7 +1505,7 @@ struct StoreOp : FixedArityOperationT<2, StoreOp> { struct IndexedStoreOp : FixedArityOperationT<3, IndexedStoreOp> { using Kind = StoreOp::Kind; Kind kind; - MachineRepresentation stored_rep; + MemoryRepresentation stored_rep; WriteBarrierKind write_barrier; uint8_t element_size_log2; // multiply index with 2^element_size_log2 int32_t offset; // add offset to scaled index @@ -1319,7 +1517,7 @@ struct IndexedStoreOp : FixedArityOperationT<3, IndexedStoreOp> { OpIndex value() const { return input(2); } IndexedStoreOp(OpIndex base, OpIndex index, OpIndex value, Kind kind, - MachineRepresentation stored_rep, + MemoryRepresentation stored_rep, WriteBarrierKind write_barrier, int32_t offset, uint8_t element_size_log2) : Base(base, index, value), @@ -1335,6 +1533,23 @@ struct IndexedStoreOp : FixedArityOperationT<3, IndexedStoreOp> { } }; +// A protected store registers a trap handler which handles out-of-bounds memory +// accesses. +struct ProtectedStoreOp : FixedArityOperationT<3, ProtectedStoreOp> { + MemoryRepresentation stored_rep; + + static constexpr OpProperties properties = OpProperties::WritingAndCanAbort(); + + OpIndex base() const { return input(0); } + OpIndex index() const { return input(1); } + OpIndex value() const { return input(2); } + + ProtectedStoreOp(OpIndex base, OpIndex index, OpIndex value, + MemoryRepresentation stored_rep) + : Base(base, index, value), stored_rep(stored_rep) {} + auto options() const { return std::tuple{stored_rep}; } +}; + // Retain a HeapObject to prevent it from being garbage collected too early. struct RetainOp : FixedArityOperationT<1, RetainOp> { OpIndex retained() const { return input(0); } @@ -1415,7 +1630,7 @@ struct FrameStateOp : OperationT<FrameStateOp> { // Semantically, it deopts if the current code object has been // deoptimized. But this might also be implemented differently. struct CheckLazyDeoptOp : FixedArityOperationT<2, CheckLazyDeoptOp> { - static constexpr OpProperties properties = OpProperties::CanDeopt(); + static constexpr OpProperties properties = OpProperties::CanAbort(); OpIndex call() const { return input(0); } OpIndex frame_state() const { return input(1); } @@ -1441,7 +1656,7 @@ struct DeoptimizeIfOp : FixedArityOperationT<2, DeoptimizeIfOp> { bool negated; const DeoptimizeParameters* parameters; - static constexpr OpProperties properties = OpProperties::CanDeopt(); + static constexpr OpProperties properties = OpProperties::CanAbort(); OpIndex condition() const { return input(0); } OpIndex frame_state() const { return input(1); } @@ -1454,6 +1669,19 @@ struct DeoptimizeIfOp : FixedArityOperationT<2, DeoptimizeIfOp> { auto options() const { return std::tuple{negated, parameters}; } }; +struct TrapIfOp : FixedArityOperationT<1, TrapIfOp> { + bool negated; + const TrapId trap_id; + + static constexpr OpProperties properties = OpProperties::CanAbort(); + + OpIndex condition() const { return input(0); } + + TrapIfOp(OpIndex condition, bool negated, const TrapId trap_id) + : Base(condition), negated(negated), trap_id(trap_id) {} + auto options() const { return std::tuple{negated, trap_id}; } +}; + struct ParameterOp : FixedArityOperationT<0, ParameterOp> { int32_t parameter_index; const char* debug_name; @@ -1501,6 +1729,33 @@ struct CallOp : OperationT<CallOp> { auto options() const { return std::tuple{descriptor}; } }; +struct TailCallOp : OperationT<TailCallOp> { + const CallDescriptor* descriptor; + + static constexpr OpProperties properties = + OpProperties::BlockTerminatorWithAnySideEffect(); + + OpIndex callee() const { return input(0); } + base::Vector<const OpIndex> arguments() const { + return inputs().SubVector(1, input_count); + } + + TailCallOp(OpIndex callee, base::Vector<const OpIndex> arguments, + const CallDescriptor* descriptor) + : Base(1 + arguments.size()), descriptor(descriptor) { + base::Vector<OpIndex> inputs = this->inputs(); + inputs[0] = callee; + inputs.SubVector(1, inputs.size()).OverwriteWith(arguments); + } + static TailCallOp& New(Graph* graph, OpIndex callee, + base::Vector<const OpIndex> arguments, + const CallDescriptor* descriptor) { + return Base::New(graph, 1 + arguments.size(), callee, arguments, + descriptor); + } + auto options() const { return std::tuple{descriptor}; } +}; + // Control-flow should never reach here. struct UnreachableOp : FixedArityOperationT<0, UnreachableOp> { static constexpr OpProperties properties = OpProperties::BlockTerminator(); diff --git a/deps/v8/src/compiler/turboshaft/optimization-phase.h b/deps/v8/src/compiler/turboshaft/optimization-phase.h index 074e517c81af50..421612982aeb0f 100644 --- a/deps/v8/src/compiler/turboshaft/optimization-phase.h +++ b/deps/v8/src/compiler/turboshaft/optimization-phase.h @@ -111,7 +111,7 @@ class OptimizationPhase { static void Run(Graph* input, Zone* phase_zone, NodeOriginTable* origins, VisitOrder visit_order = VisitOrder::kAsEmitted) { Impl phase{*input, phase_zone, origins, visit_order}; - if (FLAG_turboshaft_trace_reduction) { + if (v8_flags.turboshaft_trace_reduction) { phase.template Run<true>(); } else { phase.template Run<false>(); @@ -184,7 +184,6 @@ struct OptimizationPhase<Analyzer, Assembler>::Impl { template <bool trace_reduction> void RunDominatorOrder() { base::SmallVector<Block*, 128> dominator_visit_stack; - input_graph.GenerateDominatorTree(); dominator_visit_stack.push_back(input_graph.GetPtr(0)); while (!dominator_visit_stack.empty()) { @@ -208,6 +207,19 @@ struct OptimizationPhase<Analyzer, Assembler>::Impl { } if (!assembler.Bind(MapToNewGraph(input_block.index()))) { if constexpr (trace_reduction) TraceBlockUnreachable(); + // If we eliminate a loop backedge, we need to turn the loop into a + // single-predecessor merge block. + const Operation& last_op = + *base::Reversed(input_graph.operations(input_block)).begin(); + if (auto* final_goto = last_op.TryCast<GotoOp>()) { + if (final_goto->destination->IsLoop()) { + Block* new_loop = MapToNewGraph(final_goto->destination->index()); + DCHECK(new_loop->IsLoop()); + if (new_loop->IsLoop() && new_loop->PredecessorCount() == 1) { + assembler.graph().TurnLoopIntoMerge(new_loop); + } + } + } assembler.ExitBlock(input_block); return; } @@ -290,12 +302,13 @@ struct OptimizationPhase<Analyzer, Assembler>::Impl { V8_INLINE OpIndex ReduceGoto(const GotoOp& op) { Block* destination = MapToNewGraph(op.destination->index()); + assembler.current_block()->SetOrigin(current_input_block); + assembler.Goto(destination); if (destination->IsBound()) { DCHECK(destination->IsLoop()); FixLoopPhis(destination); } - assembler.current_block()->SetOrigin(current_input_block); - return assembler.Goto(destination); + return OpIndex::Invalid(); } V8_INLINE OpIndex ReduceBranch(const BranchOp& op) { Block* if_true = MapToNewGraph(op.if_true->index()); @@ -402,6 +415,11 @@ struct OptimizationPhase<Analyzer, Assembler>::Impl { auto arguments = MapToNewGraph<16>(op.arguments()); return assembler.Call(callee, base::VectorOf(arguments), op.descriptor); } + OpIndex ReduceTailCall(const TailCallOp& op) { + OpIndex callee = MapToNewGraph(op.callee()); + auto arguments = MapToNewGraph<16>(op.arguments()); + return assembler.TailCall(callee, base::VectorOf(arguments), op.descriptor); + } OpIndex ReduceReturn(const ReturnOp& op) { // We very rarely have tuples longer than 4. auto return_values = MapToNewGraph<4>(op.return_values()); @@ -431,8 +449,14 @@ struct OptimizationPhase<Analyzer, Assembler>::Impl { MapToNewGraph(op.right()), op.kind, op.rep); } OpIndex ReduceChange(const ChangeOp& op) { - return assembler.Change(MapToNewGraph(op.input()), op.kind, op.from, op.to); + return assembler.Change(MapToNewGraph(op.input()), op.kind, op.assumption, + op.from, op.to); + } + OpIndex ReduceTryChange(const TryChangeOp& op) { + return assembler.TryChange(MapToNewGraph(op.input()), op.kind, op.from, + op.to); } + OpIndex ReduceFloat64InsertWord32(const Float64InsertWord32Op& op) { return assembler.Float64InsertWord32(MapToNewGraph(op.float64()), MapToNewGraph(op.word32()), op.kind); @@ -440,17 +464,27 @@ struct OptimizationPhase<Analyzer, Assembler>::Impl { OpIndex ReduceTaggedBitcast(const TaggedBitcastOp& op) { return assembler.TaggedBitcast(MapToNewGraph(op.input()), op.from, op.to); } + OpIndex ReduceSelect(const SelectOp& op) { + return assembler.Select(MapToNewGraph(op.condition()), + MapToNewGraph(op.left()), MapToNewGraph(op.right()), + op.rep); + } OpIndex ReduceConstant(const ConstantOp& op) { return assembler.Constant(op.kind, op.storage); } OpIndex ReduceLoad(const LoadOp& op) { return assembler.Load(MapToNewGraph(op.base()), op.kind, op.loaded_rep, - op.offset); + op.result_rep, op.offset); } OpIndex ReduceIndexedLoad(const IndexedLoadOp& op) { return assembler.IndexedLoad( MapToNewGraph(op.base()), MapToNewGraph(op.index()), op.kind, - op.loaded_rep, op.offset, op.element_size_log2); + op.loaded_rep, op.result_rep, op.offset, op.element_size_log2); + } + OpIndex ReduceProtectedLoad(const ProtectedLoadOp& op) { + return assembler.ProtectedLoad(MapToNewGraph(op.base()), + MapToNewGraph(op.index()), op.loaded_rep, + op.result_rep); } OpIndex ReduceStore(const StoreOp& op) { return assembler.Store(MapToNewGraph(op.base()), MapToNewGraph(op.value()), @@ -462,6 +496,11 @@ struct OptimizationPhase<Analyzer, Assembler>::Impl { MapToNewGraph(op.value()), op.kind, op.stored_rep, op.write_barrier, op.offset, op.element_size_log2); } + OpIndex ReduceProtectedStore(const ProtectedStoreOp& op) { + return assembler.ProtectedStore(MapToNewGraph(op.base()), + MapToNewGraph(op.index()), + MapToNewGraph(op.value()), op.stored_rep); + } OpIndex ReduceRetain(const RetainOp& op) { return assembler.Retain(MapToNewGraph(op.retained())); } @@ -493,6 +532,10 @@ struct OptimizationPhase<Analyzer, Assembler>::Impl { MapToNewGraph(op.frame_state()), op.negated, op.parameters); } + OpIndex ReduceTrapIf(const TrapIfOp& op) { + return assembler.TrapIf(MapToNewGraph(op.condition()), op.negated, + op.trap_id); + } OpIndex ReduceTuple(const TupleOp& op) { return assembler.Tuple(base::VectorOf(MapToNewGraph<4>(op.inputs()))); } diff --git a/deps/v8/src/compiler/turboshaft/recreate-schedule.cc b/deps/v8/src/compiler/turboshaft/recreate-schedule.cc index 52ee5369ca30f5..e8c2563909cb48 100644 --- a/deps/v8/src/compiler/turboshaft/recreate-schedule.cc +++ b/deps/v8/src/compiler/turboshaft/recreate-schedule.cc @@ -88,6 +88,13 @@ struct ScheduleBuilder { return AddNode(machine.Is64() ? machine.Word64Shl() : machine.Word32Shl(), {a, b}); } + Node* RelocatableIntPtrConstant(intptr_t value, RelocInfo::Mode mode) { + return AddNode(machine.Is64() + ? common.RelocatableInt64Constant(value, mode) + : common.RelocatableInt32Constant( + base::checked_cast<int32_t>(value), mode), + {}); + } void ProcessOperation(const Operation& op); #define DECL_PROCESS_OPERATION(Name) Node* ProcessOperation(const Name##Op& op); TURBOSHAFT_OPERATION_LIST(DECL_PROCESS_OPERATION) @@ -168,8 +175,8 @@ void ScheduleBuilder::ProcessOperation(const Operation& op) { Node* ScheduleBuilder::ProcessOperation(const WordBinopOp& op) { using Kind = WordBinopOp::Kind; const Operator* o; - switch (op.rep) { - case MachineRepresentation::kWord32: + switch (op.rep.value()) { + case WordRepresentation::Word32(): switch (op.kind) { case Kind::kAdd: o = machine.Int32Add(); @@ -209,7 +216,7 @@ Node* ScheduleBuilder::ProcessOperation(const WordBinopOp& op) { break; } break; - case MachineRepresentation::kWord64: + case WordRepresentation::Word64(): switch (op.kind) { case Kind::kAdd: o = machine.Int64Add(); @@ -242,8 +249,11 @@ Node* ScheduleBuilder::ProcessOperation(const WordBinopOp& op) { o = machine.Word64Xor(); break; case Kind::kSignedMulOverflownBits: + o = machine.Int64MulHigh(); + break; case Kind::kUnsignedMulOverflownBits: - UNREACHABLE(); + o = machine.Uint64MulHigh(); + break; } break; default: @@ -254,8 +264,8 @@ Node* ScheduleBuilder::ProcessOperation(const WordBinopOp& op) { Node* ScheduleBuilder::ProcessOperation(const FloatBinopOp& op) { using Kind = FloatBinopOp::Kind; const Operator* o; - switch (op.rep) { - case MachineRepresentation::kFloat32: + switch (op.rep.value()) { + case FloatRepresentation::Float32(): switch (op.kind) { case Kind::kAdd: o = machine.Float32Add(); @@ -281,7 +291,7 @@ Node* ScheduleBuilder::ProcessOperation(const FloatBinopOp& op) { UNREACHABLE(); } break; - case MachineRepresentation::kFloat64: + case FloatRepresentation::Float64(): switch (op.kind) { case Kind::kAdd: o = machine.Float64Add(); @@ -320,8 +330,8 @@ Node* ScheduleBuilder::ProcessOperation(const FloatBinopOp& op) { Node* ScheduleBuilder::ProcessOperation(const OverflowCheckedBinopOp& op) { const Operator* o; - switch (op.rep) { - case MachineRepresentation::kWord32: + switch (op.rep.value()) { + case WordRepresentation::Word32(): switch (op.kind) { case OverflowCheckedBinopOp::Kind::kSignedAdd: o = machine.Int32AddWithOverflow(); @@ -334,7 +344,7 @@ Node* ScheduleBuilder::ProcessOperation(const OverflowCheckedBinopOp& op) { break; } break; - case MachineRepresentation::kWord64: + case WordRepresentation::Word64(): switch (op.kind) { case OverflowCheckedBinopOp::Kind::kSignedAdd: o = machine.Int64AddWithOverflow(); @@ -352,9 +362,7 @@ Node* ScheduleBuilder::ProcessOperation(const OverflowCheckedBinopOp& op) { return AddNode(o, {GetNode(op.left()), GetNode(op.right())}); } Node* ScheduleBuilder::ProcessOperation(const WordUnaryOp& op) { - DCHECK(op.rep == MachineRepresentation::kWord32 || - op.rep == MachineRepresentation::kWord64); - bool word64 = op.rep == MachineRepresentation::kWord64; + bool word64 = op.rep == WordRepresentation::Word64(); const Operator* o; switch (op.kind) { case WordUnaryOp::Kind::kReverseBytes: @@ -363,13 +371,25 @@ Node* ScheduleBuilder::ProcessOperation(const WordUnaryOp& op) { case WordUnaryOp::Kind::kCountLeadingZeros: o = word64 ? machine.Word64Clz() : machine.Word32Clz(); break; + case WordUnaryOp::Kind::kCountTrailingZeros: + o = word64 ? machine.Word64Ctz().op() : machine.Word32Ctz().op(); + break; + case WordUnaryOp::Kind::kPopCount: + o = word64 ? machine.Word64Popcnt().op() : machine.Word32Popcnt().op(); + break; + case WordUnaryOp::Kind::kSignExtend8: + o = word64 ? machine.SignExtendWord8ToInt64() + : machine.SignExtendWord8ToInt32(); + break; + case WordUnaryOp::Kind::kSignExtend16: + o = word64 ? machine.SignExtendWord16ToInt64() + : machine.SignExtendWord16ToInt32(); + break; } return AddNode(o, {GetNode(op.input())}); } Node* ScheduleBuilder::ProcessOperation(const FloatUnaryOp& op) { - DCHECK(op.rep == MachineRepresentation::kFloat32 || - op.rep == MachineRepresentation::kFloat64); - bool float64 = op.rep == MachineRepresentation::kFloat64; + bool float64 = op.rep == FloatRepresentation::Float64(); const Operator* o; switch (op.kind) { case FloatUnaryOp::Kind::kAbs: @@ -398,68 +418,92 @@ Node* ScheduleBuilder::ProcessOperation(const FloatUnaryOp& op) { o = float64 ? machine.Float64Sqrt() : machine.Float32Sqrt(); break; case FloatUnaryOp::Kind::kSilenceNaN: - DCHECK_EQ(op.rep, MachineRepresentation::kFloat64); + DCHECK_EQ(op.rep, FloatRepresentation::Float64()); o = machine.Float64SilenceNaN(); break; case FloatUnaryOp::Kind::kLog: - DCHECK_EQ(op.rep, MachineRepresentation::kFloat64); + DCHECK_EQ(op.rep, FloatRepresentation::Float64()); o = machine.Float64Log(); break; case FloatUnaryOp::Kind::kExp: - DCHECK_EQ(op.rep, MachineRepresentation::kFloat64); + DCHECK_EQ(op.rep, FloatRepresentation::Float64()); o = machine.Float64Exp(); break; case FloatUnaryOp::Kind::kExpm1: - DCHECK_EQ(op.rep, MachineRepresentation::kFloat64); + DCHECK_EQ(op.rep, FloatRepresentation::Float64()); o = machine.Float64Expm1(); break; case FloatUnaryOp::Kind::kSin: - DCHECK_EQ(op.rep, MachineRepresentation::kFloat64); + DCHECK_EQ(op.rep, FloatRepresentation::Float64()); o = machine.Float64Sin(); break; case FloatUnaryOp::Kind::kCos: - DCHECK_EQ(op.rep, MachineRepresentation::kFloat64); + DCHECK_EQ(op.rep, FloatRepresentation::Float64()); o = machine.Float64Cos(); break; case FloatUnaryOp::Kind::kAsin: - DCHECK_EQ(op.rep, MachineRepresentation::kFloat64); + DCHECK_EQ(op.rep, FloatRepresentation::Float64()); o = machine.Float64Asin(); break; case FloatUnaryOp::Kind::kAcos: - DCHECK_EQ(op.rep, MachineRepresentation::kFloat64); + DCHECK_EQ(op.rep, FloatRepresentation::Float64()); o = machine.Float64Acos(); break; case FloatUnaryOp::Kind::kSinh: - DCHECK_EQ(op.rep, MachineRepresentation::kFloat64); + DCHECK_EQ(op.rep, FloatRepresentation::Float64()); o = machine.Float64Sinh(); break; case FloatUnaryOp::Kind::kCosh: - DCHECK_EQ(op.rep, MachineRepresentation::kFloat64); + DCHECK_EQ(op.rep, FloatRepresentation::Float64()); o = machine.Float64Cosh(); break; case FloatUnaryOp::Kind::kAsinh: - DCHECK_EQ(op.rep, MachineRepresentation::kFloat64); + DCHECK_EQ(op.rep, FloatRepresentation::Float64()); o = machine.Float64Asinh(); break; case FloatUnaryOp::Kind::kAcosh: - DCHECK_EQ(op.rep, MachineRepresentation::kFloat64); + DCHECK_EQ(op.rep, FloatRepresentation::Float64()); o = machine.Float64Acosh(); break; case FloatUnaryOp::Kind::kTan: - DCHECK_EQ(op.rep, MachineRepresentation::kFloat64); + DCHECK_EQ(op.rep, FloatRepresentation::Float64()); o = machine.Float64Tan(); break; case FloatUnaryOp::Kind::kTanh: - DCHECK_EQ(op.rep, MachineRepresentation::kFloat64); + DCHECK_EQ(op.rep, FloatRepresentation::Float64()); o = machine.Float64Tanh(); break; + case FloatUnaryOp::Kind::kLog2: + DCHECK_EQ(op.rep, FloatRepresentation::Float64()); + o = machine.Float64Log2(); + break; + case FloatUnaryOp::Kind::kLog10: + DCHECK_EQ(op.rep, FloatRepresentation::Float64()); + o = machine.Float64Log10(); + break; + case FloatUnaryOp::Kind::kLog1p: + DCHECK_EQ(op.rep, FloatRepresentation::Float64()); + o = machine.Float64Log1p(); + break; + case FloatUnaryOp::Kind::kAtan: + DCHECK_EQ(op.rep, FloatRepresentation::Float64()); + o = machine.Float64Atan(); + break; + case FloatUnaryOp::Kind::kAtanh: + DCHECK_EQ(op.rep, FloatRepresentation::Float64()); + o = machine.Float64Atanh(); + break; + case FloatUnaryOp::Kind::kCbrt: + DCHECK_EQ(op.rep, FloatRepresentation::Float64()); + o = machine.Float64Cbrt(); + break; } return AddNode(o, {GetNode(op.input())}); } Node* ScheduleBuilder::ProcessOperation(const ShiftOp& op) { - DCHECK(op.rep == MachineRepresentation::kWord32 || - op.rep == MachineRepresentation::kWord64); - bool word64 = op.rep == MachineRepresentation::kWord64; + DCHECK(op.rep == WordRepresentation::Word32() || + op.rep == WordRepresentation::Word64()); + bool word64 = op.rep == WordRepresentation::Word64(); const Operator* o; switch (op.kind) { case ShiftOp::Kind::kShiftRightArithmeticShiftOutZeros: @@ -487,16 +531,16 @@ Node* ScheduleBuilder::ProcessOperation(const ShiftOp& op) { Node* ScheduleBuilder::ProcessOperation(const EqualOp& op) { const Operator* o; switch (op.rep) { - case MachineRepresentation::kWord32: + case RegisterRepresentation::Word32(): o = machine.Word32Equal(); break; - case MachineRepresentation::kWord64: + case RegisterRepresentation::Word64(): o = machine.Word64Equal(); break; - case MachineRepresentation::kFloat32: + case RegisterRepresentation::Float32(): o = machine.Float32Equal(); break; - case MachineRepresentation::kFloat64: + case RegisterRepresentation::Float64(): o = machine.Float64Equal(); break; default: @@ -507,7 +551,7 @@ Node* ScheduleBuilder::ProcessOperation(const EqualOp& op) { Node* ScheduleBuilder::ProcessOperation(const ComparisonOp& op) { const Operator* o; switch (op.rep) { - case MachineRepresentation::kWord32: + case RegisterRepresentation::Word32(): switch (op.kind) { case ComparisonOp::Kind::kSignedLessThan: o = machine.Int32LessThan(); @@ -523,7 +567,7 @@ Node* ScheduleBuilder::ProcessOperation(const ComparisonOp& op) { break; } break; - case MachineRepresentation::kWord64: + case RegisterRepresentation::Word64(): switch (op.kind) { case ComparisonOp::Kind::kSignedLessThan: o = machine.Int64LessThan(); @@ -539,7 +583,7 @@ Node* ScheduleBuilder::ProcessOperation(const ComparisonOp& op) { break; } break; - case MachineRepresentation::kFloat32: + case RegisterRepresentation::Float32(): switch (op.kind) { case ComparisonOp::Kind::kSignedLessThan: o = machine.Float32LessThan(); @@ -552,7 +596,7 @@ Node* ScheduleBuilder::ProcessOperation(const ComparisonOp& op) { UNREACHABLE(); } break; - case MachineRepresentation::kFloat64: + case RegisterRepresentation::Float64(): switch (op.kind) { case ComparisonOp::Kind::kSignedLessThan: o = machine.Float64LessThan(); @@ -574,129 +618,194 @@ Node* ScheduleBuilder::ProcessOperation(const ChangeOp& op) { const Operator* o; switch (op.kind) { using Kind = ChangeOp::Kind; + using Assumption = ChangeOp::Assumption; case Kind::kFloatConversion: - if (op.from == MachineRepresentation::kFloat64 && - op.to == MachineRepresentation::kFloat32) { + if (op.from == FloatRepresentation::Float64() && + op.to == FloatRepresentation::Float32()) { o = machine.TruncateFloat64ToFloat32(); - } else if (op.from == MachineRepresentation::kFloat32 && - op.to == MachineRepresentation::kFloat64) { + } else if (op.from == FloatRepresentation::Float32() && + op.to == FloatRepresentation::Float64()) { o = machine.ChangeFloat32ToFloat64(); } else { UNIMPLEMENTED(); } break; - case Kind::kSignedFloatTruncate: - if (op.from == MachineRepresentation::kFloat64 && - op.to == MachineRepresentation::kWord64) { - o = machine.TruncateFloat64ToInt64(TruncateKind::kArchitectureDefault); - } else if (op.from == MachineRepresentation::kFloat64 && - op.to == MachineRepresentation::kWord32) { - o = machine.RoundFloat64ToInt32(); - } else { - UNIMPLEMENTED(); - } - break; case Kind::kSignedFloatTruncateOverflowToMin: - if (op.from == MachineRepresentation::kFloat64 && - op.to == MachineRepresentation::kWord64) { - o = machine.TruncateFloat64ToInt64(TruncateKind::kSetOverflowToMin); + case Kind::kUnsignedFloatTruncateOverflowToMin: { + bool is_signed = op.kind == Kind::kSignedFloatTruncateOverflowToMin; + if (op.assumption == Assumption::kReversible) { + if (op.from == FloatRepresentation::Float64() && + op.to == WordRepresentation::Word64()) { + o = is_signed ? machine.ChangeFloat64ToInt64() + : machine.ChangeFloat64ToUint64(); + } else if (op.from == FloatRepresentation::Float64() && + op.to == WordRepresentation::Word32()) { + o = is_signed ? machine.ChangeFloat64ToInt32() + : machine.ChangeFloat64ToUint32(); + } else { + UNIMPLEMENTED(); + } + break; + } + TruncateKind truncate_kind; + switch (op.assumption) { + case ChangeOp::Assumption::kReversible: + UNREACHABLE(); + case ChangeOp::Assumption::kNoAssumption: + truncate_kind = TruncateKind::kSetOverflowToMin; + break; + case ChangeOp::Assumption::kNoOverflow: + truncate_kind = TruncateKind::kArchitectureDefault; + break; + } + if (op.from == FloatRepresentation::Float64() && + op.to == WordRepresentation::Word64()) { + DCHECK(is_signed); + o = machine.TruncateFloat64ToInt64(truncate_kind); + } else if (op.from == FloatRepresentation::Float64() && + op.to == WordRepresentation::Word32()) { + if (is_signed) { + DCHECK_EQ(truncate_kind, TruncateKind::kArchitectureDefault); + o = machine.RoundFloat64ToInt32(); + } else { + machine.TruncateFloat32ToUint32(truncate_kind); + } + } else if (op.from == FloatRepresentation::Float32() && + op.to == WordRepresentation::Word32()) { + o = is_signed ? machine.TruncateFloat32ToInt32(truncate_kind) + : machine.TruncateFloat32ToUint32(truncate_kind); } else { UNIMPLEMENTED(); } break; + } case Kind::kJSFloatTruncate: - if (op.from == MachineRepresentation::kFloat64 && - op.to == MachineRepresentation::kWord32) { + if (op.from == FloatRepresentation::Float64() && + op.to == WordRepresentation::Word32()) { o = machine.TruncateFloat64ToWord32(); } else { UNIMPLEMENTED(); } break; case Kind::kSignedToFloat: - if (op.from == MachineRepresentation::kWord32 && - op.to == MachineRepresentation::kFloat64) { + if (op.from == WordRepresentation::Word32() && + op.to == FloatRepresentation::Float64()) { + DCHECK_EQ(op.assumption, Assumption::kNoAssumption); + o = machine.ChangeInt32ToFloat64(); + } else if (op.from == WordRepresentation::Word64() && + op.to == FloatRepresentation::Float64()) { + o = op.assumption == Assumption::kReversible + ? machine.ChangeInt64ToFloat64() + : machine.RoundInt64ToFloat64(); + } else if (op.from == WordRepresentation::Word32() && + op.to == FloatRepresentation::Float32()) { + o = machine.RoundInt32ToFloat32(); + } else if (op.from == WordRepresentation::Word64() && + op.to == FloatRepresentation::Float32()) { + o = machine.RoundInt64ToFloat32(); + } else if (op.from == WordRepresentation::Word32() && + op.to == FloatRepresentation::Float64()) { o = machine.ChangeInt32ToFloat64(); - } else if (op.from == MachineRepresentation::kWord64 && - op.to == MachineRepresentation::kFloat64) { - o = machine.ChangeInt64ToFloat64(); } else { UNIMPLEMENTED(); } break; case Kind::kUnsignedToFloat: - if (op.from == MachineRepresentation::kWord32 && - op.to == MachineRepresentation::kFloat64) { + if (op.from == WordRepresentation::Word32() && + op.to == FloatRepresentation::Float64()) { o = machine.ChangeUint32ToFloat64(); + } else if (op.from == WordRepresentation::Word32() && + op.to == FloatRepresentation::Float32()) { + o = machine.RoundUint32ToFloat32(); + } else if (op.from == WordRepresentation::Word64() && + op.to == FloatRepresentation::Float32()) { + o = machine.RoundUint64ToFloat32(); + } else if (op.from == WordRepresentation::Word64() && + op.to == FloatRepresentation::Float64()) { + o = machine.RoundUint64ToFloat64(); } else { UNIMPLEMENTED(); } break; case Kind::kExtractHighHalf: - DCHECK_EQ(op.from, MachineRepresentation::kFloat64); - DCHECK_EQ(op.to, MachineRepresentation::kWord32); + DCHECK_EQ(op.from, FloatRepresentation::Float64()); + DCHECK_EQ(op.to, WordRepresentation::Word32()); o = machine.Float64ExtractHighWord32(); break; case Kind::kExtractLowHalf: - DCHECK_EQ(op.from, MachineRepresentation::kFloat64); - DCHECK_EQ(op.to, MachineRepresentation::kWord32); + DCHECK_EQ(op.from, FloatRepresentation::Float64()); + DCHECK_EQ(op.to, WordRepresentation::Word32()); o = machine.Float64ExtractLowWord32(); break; case Kind::kBitcast: - if (op.from == MachineRepresentation::kWord32 && - op.to == MachineRepresentation::kWord64) { + if (op.from == WordRepresentation::Word32() && + op.to == WordRepresentation::Word64()) { o = machine.BitcastWord32ToWord64(); - } else if (op.from == MachineRepresentation::kFloat32 && - op.to == MachineRepresentation::kWord32) { + } else if (op.from == FloatRepresentation::Float32() && + op.to == WordRepresentation::Word32()) { o = machine.BitcastFloat32ToInt32(); - } else if (op.from == MachineRepresentation::kWord32 && - op.to == MachineRepresentation::kFloat32) { + } else if (op.from == WordRepresentation::Word32() && + op.to == FloatRepresentation::Float32()) { o = machine.BitcastInt32ToFloat32(); - } else if (op.from == MachineRepresentation::kFloat64 && - op.to == MachineRepresentation::kWord64) { + } else if (op.from == FloatRepresentation::Float64() && + op.to == WordRepresentation::Word64()) { o = machine.BitcastFloat64ToInt64(); - } else if (op.from == MachineRepresentation::kWord64 && - op.to == MachineRepresentation::kFloat64) { + } else if (op.from == WordRepresentation::Word64() && + op.to == FloatRepresentation::Float64()) { o = machine.BitcastInt64ToFloat64(); } else { UNIMPLEMENTED(); } break; case Kind::kSignExtend: - if (op.from == MachineRepresentation::kWord32 && - op.to == MachineRepresentation::kWord64) { + if (op.from == WordRepresentation::Word32() && + op.to == WordRepresentation::Word64()) { o = machine.ChangeInt32ToInt64(); } else { UNIMPLEMENTED(); } break; case Kind::kZeroExtend: - if (op.from == MachineRepresentation::kWord32 && - op.to == MachineRepresentation::kWord64) { + if (op.from == WordRepresentation::Word32() && + op.to == WordRepresentation::Word64()) { o = machine.ChangeUint32ToUint64(); } else { UNIMPLEMENTED(); } break; - case Kind::kSignedNarrowing: - if (op.from == MachineRepresentation::kFloat64 && - op.to == MachineRepresentation::kWord64) { - o = machine.ChangeFloat64ToInt64(); - } else if (op.from == MachineRepresentation::kFloat64 && - op.to == MachineRepresentation::kWord32) { - o = machine.ChangeFloat64ToInt32(); + } + return AddNode(o, {GetNode(op.input())}); +} +Node* ScheduleBuilder::ProcessOperation(const TryChangeOp& op) { + const Operator* o; + switch (op.kind) { + using Kind = TryChangeOp::Kind; + case Kind::kSignedFloatTruncateOverflowUndefined: + if (op.from == FloatRepresentation::Float64() && + op.to == WordRepresentation::Word64()) { + o = machine.TryTruncateFloat64ToInt64(); + } else if (op.from == FloatRepresentation::Float64() && + op.to == WordRepresentation::Word32()) { + o = machine.TryTruncateFloat64ToInt32(); + } else if (op.from == FloatRepresentation::Float32() && + op.to == WordRepresentation::Word64()) { + o = machine.TryTruncateFloat32ToInt64(); } else { - UNIMPLEMENTED(); + UNREACHABLE(); } break; - case Kind::kUnsignedNarrowing: - if (op.from == MachineRepresentation::kFloat64 && - op.to == MachineRepresentation::kWord64) { - o = machine.ChangeFloat64ToUint64(); - } else if (op.from == MachineRepresentation::kFloat64 && - op.to == MachineRepresentation::kWord32) { - o = machine.ChangeFloat64ToUint32(); + case Kind::kUnsignedFloatTruncateOverflowUndefined: + if (op.from == FloatRepresentation::Float64() && + op.to == WordRepresentation::Word64()) { + o = machine.TryTruncateFloat64ToUint64(); + } else if (op.from == FloatRepresentation::Float64() && + op.to == WordRepresentation::Word32()) { + o = machine.TryTruncateFloat64ToUint32(); + } else if (op.from == FloatRepresentation::Float32() && + op.to == WordRepresentation::Word64()) { + o = machine.TryTruncateFloat32ToUint64(); } else { - UNIMPLEMENTED(); + UNREACHABLE(); } break; } @@ -714,17 +823,25 @@ Node* ScheduleBuilder::ProcessOperation(const Float64InsertWord32Op& op) { } Node* ScheduleBuilder::ProcessOperation(const TaggedBitcastOp& op) { const Operator* o; - if (op.from == MachineRepresentation::kTagged && - op.to == MachineType::PointerRepresentation()) { + if (op.from == RegisterRepresentation::Tagged() && + op.to == RegisterRepresentation::PointerSized()) { o = machine.BitcastTaggedToWord(); - } else if (op.from == MachineType::PointerRepresentation() && - op.to == MachineRepresentation::kTagged) { + } else if (op.from == RegisterRepresentation::PointerSized() && + op.to == RegisterRepresentation::Tagged()) { o = machine.BitcastWordToTagged(); } else { UNIMPLEMENTED(); } return AddNode(o, {GetNode(op.input())}); } +Node* ScheduleBuilder::ProcessOperation(const SelectOp& op) { + const Operator* o = op.rep == WordRepresentation::Word32() + ? machine.Word32Select().op() + : machine.Word64Select().op(); + return AddNode( + o, {GetNode(op.condition()), GetNode(op.left()), GetNode(op.right())}); +} + Node* ScheduleBuilder::ProcessOperation(const PendingLoopPhiOp& op) { UNREACHABLE(); } @@ -755,6 +872,11 @@ Node* ScheduleBuilder::ProcessOperation(const ConstantOp& op) { return AddNode(common.Float64Constant(op.float64()), {}); case ConstantOp::Kind::kFloat32: return AddNode(common.Float32Constant(op.float32()), {}); + case ConstantOp::Kind::kRelocatableWasmCall: + return RelocatableIntPtrConstant(op.integral(), RelocInfo::WASM_CALL); + case ConstantOp::Kind::kRelocatableWasmStubCall: + return RelocatableIntPtrConstant(op.integral(), + RelocInfo::WASM_STUB_CALL); } } Node* ScheduleBuilder::ProcessOperation(const LoadOp& op) { @@ -764,9 +886,11 @@ Node* ScheduleBuilder::ProcessOperation(const LoadOp& op) { offset -= kHeapObjectTag; } Node* base = GetNode(op.base()); - return AddNode(IsAlignedAccess(op.kind) - ? machine.Load(op.loaded_rep) - : machine.UnalignedLoad(op.loaded_rep), + return AddNode(op.kind == LoadOp::Kind::kRawAligned + ? machine.Load(op.loaded_rep.ToMachineType()) + : op.kind == LoadOp::Kind::kRawUnaligned + ? machine.UnalignedLoad(op.loaded_rep.ToMachineType()) + : machine.ProtectedLoad(op.loaded_rep.ToMachineType()), {base, IntPtrConstant(offset)}); } Node* ScheduleBuilder::ProcessOperation(const IndexedLoadOp& op) { @@ -783,11 +907,24 @@ Node* ScheduleBuilder::ProcessOperation(const IndexedLoadOp& op) { if (offset != 0) { index = IntPtrAdd(index, IntPtrConstant(offset)); } - return AddNode(IsAlignedAccess(op.kind) - ? machine.Load(op.loaded_rep) - : machine.UnalignedLoad(op.loaded_rep), + MachineType loaded_rep = op.loaded_rep.ToMachineType(); + if (op.result_rep == RegisterRepresentation::Compressed()) { + if (loaded_rep == MachineType::AnyTagged()) { + loaded_rep = MachineType::AnyCompressed(); + } else if (loaded_rep == MachineType::TaggedPointer()) { + loaded_rep = MachineType::CompressedPointer(); + } + } + return AddNode(op.kind == LoadOp::Kind::kRawAligned ? machine.Load(loaded_rep) + : op.kind == LoadOp::Kind::kRawUnaligned + ? machine.UnalignedLoad(loaded_rep) + : machine.ProtectedLoad(loaded_rep), {base, index}); } +Node* ScheduleBuilder::ProcessOperation(const ProtectedLoadOp& op) { + return AddNode(machine.ProtectedLoad(op.loaded_rep.ToMachineType()), + {GetNode(op.base()), GetNode(op.index())}); +} Node* ScheduleBuilder::ProcessOperation(const StoreOp& op) { intptr_t offset = op.offset; if (op.kind == StoreOp::Kind::kTaggedBase) { @@ -798,10 +935,11 @@ Node* ScheduleBuilder::ProcessOperation(const StoreOp& op) { Node* value = GetNode(op.value()); const Operator* o; if (IsAlignedAccess(op.kind)) { - o = machine.Store(StoreRepresentation(op.stored_rep, op.write_barrier)); + o = machine.Store(StoreRepresentation( + op.stored_rep.ToMachineType().representation(), op.write_barrier)); } else { DCHECK_EQ(op.write_barrier, WriteBarrierKind::kNoWriteBarrier); - o = machine.UnalignedStore(op.stored_rep); + o = machine.UnalignedStore(op.stored_rep.ToMachineType().representation()); } return AddNode(o, {base, IntPtrConstant(offset), value}); } @@ -822,13 +960,19 @@ Node* ScheduleBuilder::ProcessOperation(const IndexedStoreOp& op) { } const Operator* o; if (IsAlignedAccess(op.kind)) { - o = machine.Store(StoreRepresentation(op.stored_rep, op.write_barrier)); + o = machine.Store(StoreRepresentation( + op.stored_rep.ToMachineType().representation(), op.write_barrier)); } else { DCHECK_EQ(op.write_barrier, WriteBarrierKind::kNoWriteBarrier); - o = machine.UnalignedStore(op.stored_rep); + o = machine.UnalignedStore(op.stored_rep.ToMachineType().representation()); } return AddNode(o, {base, index, value}); } +Node* ScheduleBuilder::ProcessOperation(const ProtectedStoreOp& op) { + return AddNode( + machine.ProtectedStore(op.stored_rep.ToMachineType().representation()), + {GetNode(op.base()), GetNode(op.index()), GetNode(op.value())}); +} Node* ScheduleBuilder::ProcessOperation(const RetainOp& op) { return AddNode(common.Retain(), {GetNode(op.retained())}); } @@ -894,6 +1038,12 @@ Node* ScheduleBuilder::ProcessOperation(const DeoptimizeIfOp& op) { op.parameters->feedback()); return AddNode(o, {condition, frame_state}); } +Node* ScheduleBuilder::ProcessOperation(const TrapIfOp& op) { + Node* condition = GetNode(op.condition()); + const Operator* o = + op.negated ? common.TrapUnless(op.trap_id) : common.TrapIf(op.trap_id); + return AddNode(o, {condition}); +} Node* ScheduleBuilder::ProcessOperation(const DeoptimizeOp& op) { Node* frame_state = GetNode(op.frame_state()); const Operator* o = @@ -909,7 +1059,8 @@ Node* ScheduleBuilder::ProcessOperation(const PhiOp& op) { Node* input = GetNode(op.input(0)); // The second `input` is a placeholder that is patched when we process the // backedge. - Node* node = AddNode(common.Phi(op.rep, 2), {input, input}); + Node* node = + AddNode(common.Phi(op.rep.machine_representation(), 2), {input, input}); loop_phis.emplace_back(node, op.input(1)); return node; } else { @@ -917,7 +1068,8 @@ Node* ScheduleBuilder::ProcessOperation(const PhiOp& op) { for (OpIndex i : op.inputs()) { inputs.push_back(GetNode(i)); } - return AddNode(common.Phi(op.rep, op.input_count), base::VectorOf(inputs)); + return AddNode(common.Phi(op.rep.machine_representation(), op.input_count), + base::VectorOf(inputs)); } } Node* ScheduleBuilder::ProcessOperation(const ProjectionOp& op) { @@ -1052,6 +1204,17 @@ Node* ScheduleBuilder::ProcessOperation(const CallOp& op) { } return AddNode(common.Call(op.descriptor), base::VectorOf(inputs)); } +Node* ScheduleBuilder::ProcessOperation(const TailCallOp& op) { + base::SmallVector<Node*, 16> inputs; + inputs.push_back(GetNode(op.callee())); + for (OpIndex i : op.arguments()) { + inputs.push_back(GetNode(i)); + } + Node* call = MakeNode(common.TailCall(op.descriptor), base::VectorOf(inputs)); + schedule->AddTailCall(current_block, call); + current_block = nullptr; + return nullptr; +} Node* ScheduleBuilder::ProcessOperation(const UnreachableOp& op) { Node* node = MakeNode(common.Throw(), {}); schedule->AddThrow(current_block, node); diff --git a/deps/v8/src/compiler/turboshaft/recreate-schedule.h b/deps/v8/src/compiler/turboshaft/recreate-schedule.h index b0b1932670abc6..8fb3108775ed3a 100644 --- a/deps/v8/src/compiler/turboshaft/recreate-schedule.h +++ b/deps/v8/src/compiler/turboshaft/recreate-schedule.h @@ -7,7 +7,7 @@ #include "src/compiler/compiler-source-position-table.h" #include "src/compiler/node-origin-table.h" -#include "src/compiler/node.h" + namespace v8::internal { class Zone; } diff --git a/deps/v8/src/compiler/turboshaft/representations.cc b/deps/v8/src/compiler/turboshaft/representations.cc new file mode 100644 index 00000000000000..da29b989517144 --- /dev/null +++ b/deps/v8/src/compiler/turboshaft/representations.cc @@ -0,0 +1,59 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "src/compiler/turboshaft/representations.h" + +namespace v8::internal::compiler::turboshaft { + +std::ostream& operator<<(std::ostream& os, RegisterRepresentation rep) { + switch (rep) { + case RegisterRepresentation::Word32(): + return os << "Word32"; + case RegisterRepresentation::Word64(): + return os << "Word64"; + case RegisterRepresentation::Float32(): + return os << "Float32"; + case RegisterRepresentation::Float64(): + return os << "Float64"; + case RegisterRepresentation::Tagged(): + return os << "Tagged"; + case RegisterRepresentation::Compressed(): + return os << "Compressed"; + } +} + +std::ostream& operator<<(std::ostream& os, MemoryRepresentation rep) { + switch (rep) { + case MemoryRepresentation::Int8(): + return os << "Int8"; + case MemoryRepresentation::Uint8(): + return os << "Uint8"; + case MemoryRepresentation::Int16(): + return os << "Int16"; + case MemoryRepresentation::Uint16(): + return os << "Uint16"; + case MemoryRepresentation::Int32(): + return os << "Int32"; + case MemoryRepresentation::Uint32(): + return os << "Uint32"; + case MemoryRepresentation::Int64(): + return os << "Int64"; + case MemoryRepresentation::Uint64(): + return os << "Uint64"; + case MemoryRepresentation::Float32(): + return os << "Float32"; + case MemoryRepresentation::Float64(): + return os << "Float64"; + case MemoryRepresentation::AnyTagged(): + return os << "AnyTagged"; + case MemoryRepresentation::TaggedPointer(): + return os << "TaggedPointer"; + case MemoryRepresentation::TaggedSigned(): + return os << "TaggedSigned"; + case MemoryRepresentation::SandboxedPointer(): + return os << "SandboxedPointer"; + } +} + +} // namespace v8::internal::compiler::turboshaft diff --git a/deps/v8/src/compiler/turboshaft/representations.h b/deps/v8/src/compiler/turboshaft/representations.h new file mode 100644 index 00000000000000..b99ffeec7c6d4b --- /dev/null +++ b/deps/v8/src/compiler/turboshaft/representations.h @@ -0,0 +1,604 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef V8_COMPILER_TURBOSHAFT_REPRESENTATIONS_H_ +#define V8_COMPILER_TURBOSHAFT_REPRESENTATIONS_H_ + +#include <cstdint> + +#include "src/base/functional.h" +#include "src/base/logging.h" +#include "src/codegen/machine-type.h" +#include "src/compiler/turboshaft/utils.h" + +namespace v8::internal::compiler::turboshaft { + +class WordRepresentation; +class FloatRepresentation; + +class RegisterRepresentation { + public: + enum class Enum : uint8_t { + kWord32, + kWord64, + kFloat32, + kFloat64, + kTagged, + kCompressed + }; + + explicit constexpr RegisterRepresentation(Enum value) : value_(value) {} + RegisterRepresentation() : value_(kInvalid) {} + + constexpr Enum value() const { + DCHECK_NE(value_, kInvalid); + return value_; + } + constexpr operator Enum() const { return value(); } + + static constexpr RegisterRepresentation Word32() { + return RegisterRepresentation(Enum::kWord32); + } + static constexpr RegisterRepresentation Word64() { + return RegisterRepresentation(Enum::kWord64); + } + static constexpr RegisterRepresentation Float32() { + return RegisterRepresentation(Enum::kFloat32); + } + static constexpr RegisterRepresentation Float64() { + return RegisterRepresentation(Enum::kFloat64); + } + // A tagged pointer stored in a register, in the case of pointer compression + // it is an uncompressed pointer or a Smi. + static constexpr RegisterRepresentation Tagged() { + return RegisterRepresentation(Enum::kTagged); + } + // A compressed tagged pointer stored in a register, the upper 32bit are + // unspecified. + static constexpr RegisterRepresentation Compressed() { + return RegisterRepresentation(Enum::kCompressed); + } + // The equivalent of intptr_t/uintptr_t: An integral type with the same size + // as machine pointers. + static constexpr RegisterRepresentation PointerSized() { + if constexpr (kSystemPointerSize == 4) { + return Word32(); + } else { + DCHECK_EQ(kSystemPointerSize, 8); + return Word64(); + } + } + + constexpr bool IsWord() { + switch (*this) { + case Enum::kWord32: + case Enum::kWord64: + return true; + case Enum::kFloat32: + case Enum::kFloat64: + case Enum::kTagged: + case Enum::kCompressed: + return false; + } + } + + bool IsFloat() { + switch (*this) { + case Enum::kFloat32: + case Enum::kFloat64: + return true; + case Enum::kWord32: + case Enum::kWord64: + case Enum::kTagged: + case Enum::kCompressed: + return false; + } + } + + uint64_t MaxUnsignedValue() const { + switch (this->value()) { + case Word32(): + return std::numeric_limits<uint32_t>::max(); + case Word64(): + return std::numeric_limits<uint64_t>::max(); + case Enum::kFloat32: + case Enum::kFloat64: + case Enum::kTagged: + case Enum::kCompressed: + UNREACHABLE(); + } + } + + MachineRepresentation machine_representation() const { + switch (*this) { + case Word32(): + return MachineRepresentation::kWord32; + case Word64(): + return MachineRepresentation::kWord64; + case Float32(): + return MachineRepresentation::kFloat32; + case Float64(): + return MachineRepresentation::kFloat64; + case Tagged(): + return MachineRepresentation::kTagged; + case Compressed(): + return MachineRepresentation::kCompressed; + } + } + + constexpr uint16_t bit_width() const { + switch (*this) { + case Word32(): + return 32; + case Word64(): + return 64; + case Float32(): + return 32; + case Float64(): + return 64; + case Tagged(): + return kSystemPointerSize; + case Compressed(): + return kSystemPointerSize; + } + } + + static RegisterRepresentation FromMachineRepresentation( + MachineRepresentation rep) { + switch (rep) { + case MachineRepresentation::kBit: + case MachineRepresentation::kWord8: + case MachineRepresentation::kWord16: + case MachineRepresentation::kWord32: + return Word32(); + case MachineRepresentation::kWord64: + return Word64(); + case MachineRepresentation::kTaggedSigned: + case MachineRepresentation::kTaggedPointer: + case MachineRepresentation::kTagged: + return Tagged(); + case MachineRepresentation::kCompressedPointer: + case MachineRepresentation::kCompressed: + return Compressed(); + case MachineRepresentation::kFloat32: + return Float32(); + case MachineRepresentation::kFloat64: + return Float64(); + case MachineRepresentation::kMapWord: + case MachineRepresentation::kSandboxedPointer: + case MachineRepresentation::kNone: + case MachineRepresentation::kSimd128: + case MachineRepresentation::kSimd256: + UNREACHABLE(); + } + } + + private: + Enum value_; + + static constexpr Enum kInvalid = static_cast<Enum>(-1); +}; + +V8_INLINE bool operator==(RegisterRepresentation a, RegisterRepresentation b) { + return a.value() == b.value(); +} +V8_INLINE bool operator!=(RegisterRepresentation a, RegisterRepresentation b) { + return a.value() != b.value(); +} + +V8_INLINE size_t hash_value(RegisterRepresentation rep) { + return static_cast<size_t>(rep.value()); +} + +std::ostream& operator<<(std::ostream& os, RegisterRepresentation rep); + +class WordRepresentation : public RegisterRepresentation { + public: + enum class Enum : uint8_t { + kWord32 = static_cast<int>(RegisterRepresentation::Enum::kWord32), + kWord64 = static_cast<int>(RegisterRepresentation::Enum::kWord64) + }; + explicit constexpr WordRepresentation(Enum value) + : RegisterRepresentation( + static_cast<RegisterRepresentation::Enum>(value)) {} + WordRepresentation() = default; + explicit constexpr WordRepresentation(RegisterRepresentation rep) + : WordRepresentation(static_cast<Enum>(rep.value())) { + DCHECK(rep.IsWord()); + } + + static constexpr WordRepresentation Word32() { + return WordRepresentation(Enum::kWord32); + } + static constexpr WordRepresentation Word64() { + return WordRepresentation(Enum::kWord64); + } + + static constexpr WordRepresentation PointerSized() { + return WordRepresentation(RegisterRepresentation::PointerSized()); + } + + constexpr Enum value() const { + return static_cast<Enum>(RegisterRepresentation::value()); + } + constexpr operator Enum() const { return value(); } + + constexpr uint64_t MaxUnsignedValue() const { + switch (this->value()) { + case Word32(): + return std::numeric_limits<uint32_t>::max(); + case Word64(): + return std::numeric_limits<uint64_t>::max(); + } + } + constexpr int64_t MinSignedValue() const { + switch (this->value()) { + case Word32(): + return std::numeric_limits<int32_t>::min(); + case Word64(): + return std::numeric_limits<int64_t>::min(); + } + } + constexpr int64_t MaxSignedValue() const { + switch (this->value()) { + case Word32(): + return std::numeric_limits<int32_t>::max(); + case Word64(): + return std::numeric_limits<int64_t>::max(); + } + } +}; + +class FloatRepresentation : public RegisterRepresentation { + public: + enum class Enum : uint8_t { + kFloat32 = static_cast<int>(RegisterRepresentation::Enum::kFloat32), + kFloat64 = static_cast<int>(RegisterRepresentation::Enum::kFloat64) + }; + + static constexpr FloatRepresentation Float32() { + return FloatRepresentation(Enum::kFloat32); + } + static constexpr FloatRepresentation Float64() { + return FloatRepresentation(Enum::kFloat64); + } + + explicit constexpr FloatRepresentation(Enum value) + : RegisterRepresentation( + static_cast<RegisterRepresentation::Enum>(value)) {} + FloatRepresentation() = default; + + constexpr Enum value() const { + return static_cast<Enum>(RegisterRepresentation::value()); + } + constexpr operator Enum() const { return value(); } +}; + +class MemoryRepresentation { + public: + enum class Enum : uint8_t { + kInt8, + kUint8, + kInt16, + kUint16, + kInt32, + kUint32, + kInt64, + kUint64, + kFloat32, + kFloat64, + kAnyTagged, + kTaggedPointer, + kTaggedSigned, + kSandboxedPointer, + }; + + explicit constexpr MemoryRepresentation(Enum value) : value_(value) {} + MemoryRepresentation() : value_(kInvalid) {} + constexpr Enum value() const { + DCHECK_NE(value_, kInvalid); + return value_; + } + constexpr operator Enum() const { return value(); } + + static constexpr MemoryRepresentation Int8() { + return MemoryRepresentation(Enum::kInt8); + } + static constexpr MemoryRepresentation Uint8() { + return MemoryRepresentation(Enum::kUint8); + } + static constexpr MemoryRepresentation Int16() { + return MemoryRepresentation(Enum::kInt16); + } + static constexpr MemoryRepresentation Uint16() { + return MemoryRepresentation(Enum::kUint16); + } + static constexpr MemoryRepresentation Int32() { + return MemoryRepresentation(Enum::kInt32); + } + static constexpr MemoryRepresentation Uint32() { + return MemoryRepresentation(Enum::kUint32); + } + static constexpr MemoryRepresentation Int64() { + return MemoryRepresentation(Enum::kInt64); + } + static constexpr MemoryRepresentation Uint64() { + return MemoryRepresentation(Enum::kUint64); + } + static constexpr MemoryRepresentation Float32() { + return MemoryRepresentation(Enum::kFloat32); + } + static constexpr MemoryRepresentation Float64() { + return MemoryRepresentation(Enum::kFloat64); + } + static constexpr MemoryRepresentation AnyTagged() { + return MemoryRepresentation(Enum::kAnyTagged); + } + static constexpr MemoryRepresentation TaggedPointer() { + return MemoryRepresentation(Enum::kTaggedPointer); + } + static constexpr MemoryRepresentation TaggedSigned() { + return MemoryRepresentation(Enum::kTaggedSigned); + } + static constexpr MemoryRepresentation SandboxedPointer() { + return MemoryRepresentation(Enum::kSandboxedPointer); + } + + bool IsWord() const { + switch (*this) { + case Int8(): + case Uint8(): + case Int16(): + case Uint16(): + case Int32(): + case Uint32(): + case Int64(): + case Uint64(): + return true; + case Float32(): + case Float64(): + case AnyTagged(): + case TaggedPointer(): + case TaggedSigned(): + case SandboxedPointer(): + return false; + } + } + + bool IsSigned() const { + switch (*this) { + case Int8(): + case Int16(): + case Int32(): + case Int64(): + return true; + case Uint8(): + case Uint16(): + case Uint32(): + case Uint64(): + return false; + case Float32(): + case Float64(): + case AnyTagged(): + case TaggedPointer(): + case TaggedSigned(): + case SandboxedPointer(): + DCHECK(false); + return false; + } + } + + bool IsTagged() const { + switch (*this) { + case AnyTagged(): + case TaggedPointer(): + case TaggedSigned(): + return true; + case Int8(): + case Int16(): + case Int32(): + case Int64(): + case Uint8(): + case Uint16(): + case Uint32(): + case Uint64(): + case Float32(): + case Float64(): + case SandboxedPointer(): + return false; + } + } + + bool CanBeTaggedPointer() const { + switch (*this) { + case AnyTagged(): + case TaggedPointer(): + return true; + case TaggedSigned(): + case Int8(): + case Int16(): + case Int32(): + case Int64(): + case Uint8(): + case Uint16(): + case Uint32(): + case Uint64(): + case Float32(): + case Float64(): + case SandboxedPointer(): + return false; + } + } + + RegisterRepresentation ToRegisterRepresentation() const { + switch (*this) { + case Int8(): + case Uint8(): + case Int16(): + case Uint16(): + case Int32(): + case Uint32(): + return RegisterRepresentation::Word32(); + case Int64(): + case Uint64(): + return RegisterRepresentation::Word64(); + case Float32(): + return RegisterRepresentation::Float32(); + case Float64(): + return RegisterRepresentation::Float64(); + case AnyTagged(): + case TaggedPointer(): + case TaggedSigned(): + return RegisterRepresentation::Tagged(); + case SandboxedPointer(): + return RegisterRepresentation::Word64(); + } + } + + MachineType ToMachineType() const { + switch (*this) { + case Int8(): + return MachineType::Int8(); + case Uint8(): + return MachineType::Uint8(); + case Int16(): + return MachineType::Int16(); + case Uint16(): + return MachineType::Uint16(); + case Int32(): + return MachineType::Int32(); + case Uint32(): + return MachineType::Uint32(); + case Int64(): + return MachineType::Int64(); + case Uint64(): + return MachineType::Uint64(); + case Float32(): + return MachineType::Float32(); + case Float64(): + return MachineType::Float64(); + case AnyTagged(): + return MachineType::AnyTagged(); + case TaggedPointer(): + return MachineType::TaggedPointer(); + case TaggedSigned(): + return MachineType::TaggedSigned(); + case SandboxedPointer(): + return MachineType::SandboxedPointer(); + } + } + + static MemoryRepresentation FromMachineType(MachineType type) { + switch (type.representation()) { + case MachineRepresentation::kWord8: + return type.IsSigned() ? Int8() : Uint8(); + case MachineRepresentation::kWord16: + return type.IsSigned() ? Int16() : Uint16(); + case MachineRepresentation::kWord32: + return type.IsSigned() ? Int32() : Uint32(); + case MachineRepresentation::kWord64: + return type.IsSigned() ? Int64() : Uint64(); + case MachineRepresentation::kTaggedSigned: + return TaggedSigned(); + case MachineRepresentation::kTaggedPointer: + return TaggedPointer(); + case MachineRepresentation::kTagged: + return AnyTagged(); + case MachineRepresentation::kFloat32: + return Float32(); + case MachineRepresentation::kFloat64: + return Float64(); + case MachineRepresentation::kSandboxedPointer: + return SandboxedPointer(); + case MachineRepresentation::kNone: + case MachineRepresentation::kMapWord: + case MachineRepresentation::kBit: + case MachineRepresentation::kSimd128: + case MachineRepresentation::kSimd256: + case MachineRepresentation::kCompressedPointer: + case MachineRepresentation::kCompressed: + UNREACHABLE(); + } + } + + static MemoryRepresentation FromMachineRepresentation( + MachineRepresentation rep) { + switch (rep) { + case MachineRepresentation::kWord8: + return Uint8(); + case MachineRepresentation::kWord16: + return Uint16(); + case MachineRepresentation::kWord32: + return Uint32(); + case MachineRepresentation::kWord64: + return Uint64(); + case MachineRepresentation::kTaggedSigned: + return TaggedSigned(); + case MachineRepresentation::kTaggedPointer: + return TaggedPointer(); + case MachineRepresentation::kTagged: + return AnyTagged(); + case MachineRepresentation::kFloat32: + return Float32(); + case MachineRepresentation::kFloat64: + return Float64(); + case MachineRepresentation::kSandboxedPointer: + return SandboxedPointer(); + case MachineRepresentation::kNone: + case MachineRepresentation::kMapWord: + case MachineRepresentation::kBit: + case MachineRepresentation::kSimd128: + case MachineRepresentation::kSimd256: + case MachineRepresentation::kCompressedPointer: + case MachineRepresentation::kCompressed: + UNREACHABLE(); + } + } + + uint8_t SizeInBytes() const { + switch (*this) { + case Int8(): + case Uint8(): + return 1; + case Int16(): + case Uint16(): + return 2; + case Int32(): + case Uint32(): + case Float32(): + return 4; + case Int64(): + case Uint64(): + case Float64(): + case SandboxedPointer(): + return 8; + case AnyTagged(): + case TaggedPointer(): + case TaggedSigned(): + return kTaggedSize; + } + } + + private: + Enum value_; + + static constexpr Enum kInvalid = static_cast<Enum>(-1); +}; + +V8_INLINE bool operator==(MemoryRepresentation a, MemoryRepresentation b) { + return a.value() == b.value(); +} +V8_INLINE bool operator!=(MemoryRepresentation a, MemoryRepresentation b) { + return a.value() != b.value(); +} + +V8_INLINE size_t hash_value(MemoryRepresentation rep) { + return static_cast<size_t>(rep.value()); +} + +std::ostream& operator<<(std::ostream& os, MemoryRepresentation rep); + +} // namespace v8::internal::compiler::turboshaft + +#endif // V8_COMPILER_TURBOSHAFT_REPRESENTATIONS_H_ diff --git a/deps/v8/src/compiler/turboshaft/simplify-tf-loops.cc b/deps/v8/src/compiler/turboshaft/simplify-tf-loops.cc new file mode 100644 index 00000000000000..e358dec90d771c --- /dev/null +++ b/deps/v8/src/compiler/turboshaft/simplify-tf-loops.cc @@ -0,0 +1,58 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "src/compiler/turboshaft/simplify-tf-loops.h" + +#include "src/base/small-vector.h" +#include "src/compiler/machine-graph.h" +#include "src/compiler/node-properties.h" + +namespace v8::internal::compiler { + +Reduction SimplifyTFLoops::Reduce(Node* node) { + if (node->opcode() != IrOpcode::kLoop) return NoChange(); + if (node->InputCount() <= 2) return NoChange(); + + Node* new_loop = mcgraph_->graph()->NewNode(mcgraph_->common()->Loop(2), + node->InputAt(0), node); + node->RemoveInput(0); + NodeProperties::ChangeOp(node, mcgraph_->common()->Merge(node->InputCount())); + + base::SmallVector<Edge, 4> control_uses; + + for (Edge edge : node->use_edges()) { + Node* use = edge.from(); + if (!NodeProperties::IsPhi(use)) { + control_uses.emplace_back(edge); + continue; + } + Node* dominating_input = use->InputAt(0); + use->RemoveInput(0); + NodeProperties::ChangeOp( + use, use->opcode() == IrOpcode::kPhi + ? mcgraph_->common()->Phi(PhiRepresentationOf(use->op()), + use->InputCount() - 1) + : mcgraph_->common()->EffectPhi(use->InputCount() - 1)); + + Node* new_phi = mcgraph_->graph()->NewNode( + use->opcode() == IrOpcode::kPhi + ? mcgraph_->common()->Phi(PhiRepresentationOf(use->op()), 2) + : mcgraph_->common()->EffectPhi(2), + dominating_input, use, new_loop); + + ReplaceWithValue(use, new_phi, new_phi, new_phi); + // Restore the use <- new_phi edge we just broke. + new_phi->ReplaceInput(1, use); + } + + for (Edge edge : control_uses) { + if (edge.from() != new_loop) { + edge.from()->ReplaceInput(edge.index(), new_loop); + } + } + + return NoChange(); +} + +} // namespace v8::internal::compiler diff --git a/deps/v8/src/compiler/turboshaft/simplify-tf-loops.h b/deps/v8/src/compiler/turboshaft/simplify-tf-loops.h new file mode 100644 index 00000000000000..b045e653882a0d --- /dev/null +++ b/deps/v8/src/compiler/turboshaft/simplify-tf-loops.h @@ -0,0 +1,31 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef V8_COMPILER_TURBOSHAFT_SIMPLIFY_TF_LOOPS_H_ +#define V8_COMPILER_TURBOSHAFT_SIMPLIFY_TF_LOOPS_H_ + +#include "src/compiler/graph-reducer.h" + +namespace v8::internal::compiler { + +class MachineGraph; + +// Constrain loop nodes to have at most two inputs, by introducing additional +// merges as needed. +class SimplifyTFLoops final : public AdvancedReducer { + public: + SimplifyTFLoops(Editor* editor, MachineGraph* mcgraph) + : AdvancedReducer(editor), mcgraph_(mcgraph) {} + + const char* reducer_name() const override { return "SimplifyTFLoops"; } + + Reduction Reduce(Node* node) final; + + private: + MachineGraph* const mcgraph_; +}; + +} // namespace v8::internal::compiler + +#endif // V8_COMPILER_TURBOSHAFT_SIMPLIFY_TF_LOOPS_H_ diff --git a/deps/v8/src/compiler/turboshaft/utils.cc b/deps/v8/src/compiler/turboshaft/utils.cc new file mode 100644 index 00000000000000..7d56af0f3e6742 --- /dev/null +++ b/deps/v8/src/compiler/turboshaft/utils.cc @@ -0,0 +1,26 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "src/compiler/turboshaft/utils.h" + +#include "src/base/platform/platform.h" +#include "src/flags/flags.h" + +namespace v8::internal::compiler::turboshaft { + +#ifdef DEBUG +bool ShouldSkipOptimizationStep() { + static std::atomic<uint64_t> counter{0}; + uint64_t current = counter++; + if (current == FLAG_turboshaft_opt_bisect_break) { + base::OS::DebugBreak(); + } + if (current >= FLAG_turboshaft_opt_bisect_limit) { + return true; + } + return false; +} +#endif // DEBUG + +} // namespace v8::internal::compiler::turboshaft diff --git a/deps/v8/src/compiler/turboshaft/utils.h b/deps/v8/src/compiler/turboshaft/utils.h index 03ad96dd67cbab..5ef2a67ee88fc1 100644 --- a/deps/v8/src/compiler/turboshaft/utils.h +++ b/deps/v8/src/compiler/turboshaft/utils.h @@ -53,11 +53,6 @@ struct all_of : std::tuple<const Ts&...> { return ((value == std::get<indices>(*this)) && ...); } - template <class T, size_t... indices> - bool AllNotEqualTo(const T& value, std::index_sequence<indices...>) { - return ((value != std::get<indices>(*this)) && ...); - } - template <size_t... indices> std::ostream& PrintTo(std::ostream& os, std::index_sequence<indices...>) { bool first = true; @@ -76,16 +71,17 @@ bool operator==(all_of<Ts...> values, const T& target) { return values.AllEqualTo(target, std::index_sequence_for<Ts...>{}); } -template <class T, class... Ts> -bool operator!=(const T& target, all_of<Ts...> values) { - return values.AllNotEqualTo(target, std::index_sequence_for<Ts...>{}); -} - template <class... Ts> std::ostream& operator<<(std::ostream& os, all_of<Ts...> all) { return all.PrintTo(os, std::index_sequence_for<Ts...>{}); } +#ifdef DEBUG +bool ShouldSkipOptimizationStep(); +#else +inline bool ShouldSkipOptimizationStep() { return false; } +#endif + } // namespace v8::internal::compiler::turboshaft #endif // V8_COMPILER_TURBOSHAFT_UTILS_H_ diff --git a/deps/v8/src/compiler/turboshaft/value-numbering-assembler.h b/deps/v8/src/compiler/turboshaft/value-numbering-assembler.h index c63841d7daf4ec..341eeb8fbd796b 100644 --- a/deps/v8/src/compiler/turboshaft/value-numbering-assembler.h +++ b/deps/v8/src/compiler/turboshaft/value-numbering-assembler.h @@ -77,14 +77,15 @@ class ValueNumberingAssembler : public Assembler { public: ValueNumberingAssembler(Graph* graph, Zone* phase_zone) - : Assembler(graph, phase_zone), depths_heads_(phase_zone) { + : Assembler(graph, phase_zone), + dominator_path_(phase_zone), + depths_heads_(phase_zone) { table_ = phase_zone->NewVector<Entry>( base::bits::RoundUpToPowerOfTwo( std::max<size_t>(128, graph->op_id_capacity() / 2)), Entry()); entry_count_ = 0; mask_ = table_.size() - 1; - current_depth_ = -1; } #define EMIT_OP(Name) \ @@ -99,25 +100,30 @@ class ValueNumberingAssembler : public Assembler { TURBOSHAFT_OPERATION_LIST(EMIT_OP) #undef EMIT_OP - void EnterBlock(const Block& block) { - int new_depth = block.Depth(); - // Remember that this assembler should only be used for OptimizationPhases - // that visit the graph in VisitOrder::kDominator order. We can't properly - // check that here, but we do two checks, which should be enough to ensure - // that we are actually visiting the graph in dominator order: - // - There should be only one block at depth 0 (the root). - // - There should be no "jumps" downward in the dominator tree ({new_depth} - // cannot be lower than {current_depth}+1). - DCHECK_IMPLIES(current_depth_ == 0, new_depth != 0); - DCHECK_LE(new_depth, current_depth_ + 1); - if (new_depth <= current_depth_) { - while (current_depth_ >= new_depth) { + bool Bind(Block* block) { + if (!Base::Bind(block)) return false; + ResetToBlock(block); + dominator_path_.push_back(block); + depths_heads_.push_back(nullptr); + return true; + } + + // Resets {table_} up to the first dominator of {block} that it contains. + void ResetToBlock(Block* block) { + Block* target = block->GetDominator(); + while (!dominator_path_.empty() && target != nullptr && + dominator_path_.back() != target) { + if (dominator_path_.back()->Depth() > target->Depth()) { ClearCurrentDepthEntries(); - --current_depth_; + } else if (dominator_path_.back()->Depth() < target->Depth()) { + target = target->GetDominator(); + } else { + // {target} and {dominator_path.back} have the same depth but are not + // equal, so we go one level up for both. + ClearCurrentDepthEntries(); + target = target->GetDominator(); } } - current_depth_ = new_depth; - depths_heads_.push_back(nullptr); } private: @@ -176,6 +182,7 @@ class ValueNumberingAssembler : public Assembler { --entry_count_; } depths_heads_.pop_back(); + dominator_path_.pop_back(); } // If the table is too full, double its size and re-insert the old entries. @@ -254,7 +261,7 @@ class ValueNumberingAssembler : public Assembler { return V8_LIKELY(entry > table_.begin()) ? entry - 1 : table_.end() - 1; } - int current_depth_; + ZoneVector<Block*> dominator_path_; base::Vector<Entry> table_; size_t mask_; size_t entry_count_; diff --git a/deps/v8/src/compiler/type-cache.h b/deps/v8/src/compiler/type-cache.h index 00c3ba47fed68a..48638c0a6324ed 100644 --- a/deps/v8/src/compiler/type-cache.h +++ b/deps/v8/src/compiler/type-cache.h @@ -5,6 +5,7 @@ #ifndef V8_COMPILER_TYPE_CACHE_H_ #define V8_COMPILER_TYPE_CACHE_H_ +#include "src/compiler/globals.h" #include "src/compiler/types.h" #include "src/date/date.h" #include "src/objects/js-array-buffer.h" @@ -202,10 +203,6 @@ class V8_EXPORT_PRIVATE TypeCache final { } Zone* zone() { return &zone_; } - - static constexpr double kMaxDoubleRepresentableInt64 = 9223372036854774784.0; - static constexpr double kMaxDoubleRepresentableUint64 = - 18446744073709549568.0; }; } // namespace compiler diff --git a/deps/v8/src/compiler/typed-optimization.cc b/deps/v8/src/compiler/typed-optimization.cc index bdeb44188cc4f6..bdc94dc904c39f 100644 --- a/deps/v8/src/compiler/typed-optimization.cc +++ b/deps/v8/src/compiler/typed-optimization.cc @@ -580,6 +580,11 @@ Reduction TypedOptimization::ReduceStringLength(Node* node) { // The first value input to the {input} is the resulting length. return Replace(input->InputAt(0)); } + case IrOpcode::kStringFromSingleCharCode: { + // Note that this isn't valid for StringFromCodePointAt, since it the + // string it returns can be 1 or 2 characters long. + return Replace(jsgraph()->Constant(1)); + } default: break; } diff --git a/deps/v8/src/compiler/typer.cc b/deps/v8/src/compiler/typer.cc index b261b5e1cd0330..fbb675a6bb9efe 100644 --- a/deps/v8/src/compiler/typer.cc +++ b/deps/v8/src/compiler/typer.cc @@ -434,6 +434,9 @@ Type Typer::Visitor::BitwiseNot(Type type, Typer* t) { if (type.Is(Type::Number())) { return NumberBitwiseXor(type, t->cache_->kSingletonMinusOne, t); } + if (type.Is(Type::BigInt())) { + return Type::BigInt(); + } return Type::Numeric(); } @@ -442,6 +445,9 @@ Type Typer::Visitor::Decrement(Type type, Typer* t) { if (type.Is(Type::Number())) { return NumberSubtract(type, t->cache_->kSingletonOne, t); } + if (type.Is(Type::BigInt())) { + return Type::BigInt(); + } return Type::Numeric(); } @@ -450,6 +456,9 @@ Type Typer::Visitor::Increment(Type type, Typer* t) { if (type.Is(Type::Number())) { return NumberAdd(type, t->cache_->kSingletonOne, t); } + if (type.Is(Type::BigInt())) { + return Type::BigInt(); + } return Type::Numeric(); } @@ -458,6 +467,9 @@ Type Typer::Visitor::Negate(Type type, Typer* t) { if (type.Is(Type::Number())) { return NumberMultiply(type, t->cache_->kSingletonMinusOne, t); } + if (type.Is(Type::BigInt())) { + return Type::BigInt(); + } return Type::Numeric(); } @@ -862,7 +874,7 @@ Type Typer::Visitor::TypeInductionVariablePhi(Node* node) { max = +V8_INFINITY; } - if (FLAG_trace_turbo_loop) { + if (v8_flags.trace_turbo_loop) { StdoutStream{} << std::setprecision(10) << "Loop (" << NodeProperties::GetControlInput(node)->id() << ") variable bounds in " @@ -1470,6 +1482,10 @@ Type Typer::Visitor::TypeJSGetSuperConstructor(Node* node) { return Type::NonInternal(); } +Type Typer::Visitor::TypeJSFindNonDefaultConstructorOrConstruct(Node* node) { + return Type::Tuple(Type::Boolean(), Type::Object(), zone()); +} + // JS context operators. Type Typer::Visitor::TypeJSHasContextExtension(Node* node) { return Type::Boolean(); diff --git a/deps/v8/src/compiler/types.cc b/deps/v8/src/compiler/types.cc index 676af8d54c44f1..c81a185da06cc0 100644 --- a/deps/v8/src/compiler/types.cc +++ b/deps/v8/src/compiler/types.cc @@ -227,6 +227,7 @@ Type::bitset BitsetType::Lub(const MapRefLike& map) { case JS_COLLATOR_TYPE: case JS_DATE_TIME_FORMAT_TYPE: case JS_DISPLAY_NAMES_TYPE: + case JS_DURATION_FORMAT_TYPE: case JS_LIST_FORMAT_TYPE: case JS_LOCALE_TYPE: case JS_NUMBER_FORMAT_TYPE: @@ -276,13 +277,12 @@ Type::bitset BitsetType::Lub(const MapRefLike& map) { case JS_TEMPORAL_PLAIN_YEAR_MONTH_TYPE: case JS_TEMPORAL_TIME_ZONE_TYPE: case JS_TEMPORAL_ZONED_DATE_TIME_TYPE: + case JS_RAW_JSON_TYPE: #if V8_ENABLE_WEBASSEMBLY - case WASM_ARRAY_TYPE: case WASM_GLOBAL_OBJECT_TYPE: case WASM_INSTANCE_OBJECT_TYPE: case WASM_MEMORY_OBJECT_TYPE: case WASM_MODULE_OBJECT_TYPE: - case WASM_STRUCT_TYPE: case WASM_SUSPENDER_OBJECT_TYPE: case WASM_TABLE_OBJECT_TYPE: case WASM_TAG_OBJECT_TYPE: @@ -293,6 +293,11 @@ Type::bitset BitsetType::Lub(const MapRefLike& map) { DCHECK(!map.is_callable()); DCHECK(!map.is_undetectable()); return kOtherObject; +#if V8_ENABLE_WEBASSEMBLY + case WASM_STRUCT_TYPE: + case WASM_ARRAY_TYPE: + return kWasmObject; +#endif // V8_ENABLE_WEBASSEMBLY case JS_BOUND_FUNCTION_TYPE: DCHECK(!map.is_undetectable()); return kBoundFunction; @@ -1118,6 +1123,13 @@ Type Type::Tuple(Type first, Type second, Type third, Zone* zone) { return FromTypeBase(tuple); } +Type Type::Tuple(Type first, Type second, Zone* zone) { + TupleType* tuple = TupleType::New(2, zone); + tuple->InitElement(0, first); + tuple->InitElement(1, second); + return FromTypeBase(tuple); +} + // static Type Type::OtherNumberConstant(double value, Zone* zone) { return FromTypeBase(OtherNumberConstantType::New(value, zone)); diff --git a/deps/v8/src/compiler/types.h b/deps/v8/src/compiler/types.h index dd7203046c4faa..e9353682eb0631 100644 --- a/deps/v8/src/compiler/types.h +++ b/deps/v8/src/compiler/types.h @@ -423,6 +423,7 @@ class V8_EXPORT_PRIVATE Type { static Type Constant(double value, Zone* zone); static Type Range(double min, double max, Zone* zone); static Type Tuple(Type first, Type second, Type third, Zone* zone); + static Type Tuple(Type first, Type second, Zone* zone); static Type Union(Type type1, Type type2, Zone* zone); static Type Intersect(Type type1, Type type2, Zone* zone); diff --git a/deps/v8/src/compiler/use-info.h b/deps/v8/src/compiler/use-info.h new file mode 100644 index 00000000000000..8a97ef093589f6 --- /dev/null +++ b/deps/v8/src/compiler/use-info.h @@ -0,0 +1,352 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef V8_COMPILER_USE_INFO_H_ +#define V8_COMPILER_USE_INFO_H_ + +#include "src/base/functional.h" +#include "src/compiler/feedback-source.h" +#include "src/compiler/globals.h" + +namespace v8::internal::compiler { + +enum IdentifyZeros : uint8_t { kIdentifyZeros, kDistinguishZeros }; + +class Truncation; +size_t hash_value(const Truncation&); + +class Truncation final { + public: + // Constructors. + static Truncation None() { + return Truncation(TruncationKind::kNone, kIdentifyZeros); + } + static Truncation Bool() { + return Truncation(TruncationKind::kBool, kIdentifyZeros); + } + static Truncation Word32() { + return Truncation(TruncationKind::kWord32, kIdentifyZeros); + } + static Truncation Word64() { + return Truncation(TruncationKind::kWord64, kIdentifyZeros); + } + static Truncation OddballAndBigIntToNumber( + IdentifyZeros identify_zeros = kDistinguishZeros) { + return Truncation(TruncationKind::kOddballAndBigIntToNumber, + identify_zeros); + } + static Truncation Any(IdentifyZeros identify_zeros = kDistinguishZeros) { + return Truncation(TruncationKind::kAny, identify_zeros); + } + + static Truncation Generalize(Truncation t1, Truncation t2) { + return Truncation( + Generalize(t1.kind(), t2.kind()), + GeneralizeIdentifyZeros(t1.identify_zeros(), t2.identify_zeros())); + } + + // Queries. + bool IsUnused() const { return kind_ == TruncationKind::kNone; } + bool IsUsedAsBool() const { + return LessGeneral(kind_, TruncationKind::kBool); + } + bool IsUsedAsWord32() const { + return LessGeneral(kind_, TruncationKind::kWord32); + } + bool IsUsedAsWord64() const { + return LessGeneral(kind_, TruncationKind::kWord64); + } + bool TruncatesOddballAndBigIntToNumber() const { + return LessGeneral(kind_, TruncationKind::kOddballAndBigIntToNumber); + } + bool IdentifiesUndefinedAndZero() { + return LessGeneral(kind_, TruncationKind::kWord32) || + LessGeneral(kind_, TruncationKind::kBool); + } + bool IdentifiesZeroAndMinusZero() const { + return identify_zeros() == kIdentifyZeros; + } + + // Operators. + bool operator==(Truncation other) const { + return kind() == other.kind() && identify_zeros() == other.identify_zeros(); + } + bool operator!=(Truncation other) const { return !(*this == other); } + + // Debug utilities. + const char* description() const; + bool IsLessGeneralThan(Truncation other) const { + return LessGeneral(kind(), other.kind()) && + LessGeneralIdentifyZeros(identify_zeros(), other.identify_zeros()); + } + + IdentifyZeros identify_zeros() const { return identify_zeros_; } + + private: + enum class TruncationKind : uint8_t { + kNone, + kBool, + kWord32, + kWord64, + kOddballAndBigIntToNumber, + kAny + }; + + explicit Truncation(TruncationKind kind, IdentifyZeros identify_zeros) + : kind_(kind), identify_zeros_(identify_zeros) {} + + TruncationKind kind() const { return kind_; } + + friend class SimplifiedLoweringVerifier; + friend size_t hash_value(const Truncation&); + TruncationKind kind_; + IdentifyZeros identify_zeros_; + + static TruncationKind Generalize(TruncationKind rep1, TruncationKind rep2); + static IdentifyZeros GeneralizeIdentifyZeros(IdentifyZeros i1, + IdentifyZeros i2); + static bool LessGeneral(TruncationKind rep1, TruncationKind rep2); + static bool LessGeneralIdentifyZeros(IdentifyZeros u1, IdentifyZeros u2); +}; + +inline size_t hash_value(const Truncation& truncation) { + return base::hash_combine(truncation.kind(), truncation.identify_zeros()); +} + +inline std::ostream& operator<<(std::ostream& os, + const Truncation& truncation) { + return os << truncation.description(); +} + +enum class TypeCheckKind : uint8_t { + kNone, + kSignedSmall, + kSigned32, + kSigned64, + kNumber, + kNumberOrBoolean, + kNumberOrOddball, + kHeapObject, + kBigInt, + kBigInt64, + kArrayIndex +}; + +inline std::ostream& operator<<(std::ostream& os, TypeCheckKind type_check) { + switch (type_check) { + case TypeCheckKind::kNone: + return os << "None"; + case TypeCheckKind::kSignedSmall: + return os << "SignedSmall"; + case TypeCheckKind::kSigned32: + return os << "Signed32"; + case TypeCheckKind::kSigned64: + return os << "Signed64"; + case TypeCheckKind::kNumber: + return os << "Number"; + case TypeCheckKind::kNumberOrBoolean: + return os << "NumberOrBoolean"; + case TypeCheckKind::kNumberOrOddball: + return os << "NumberOrOddball"; + case TypeCheckKind::kHeapObject: + return os << "HeapObject"; + case TypeCheckKind::kBigInt: + return os << "BigInt"; + case TypeCheckKind::kBigInt64: + return os << "BigInt64"; + case TypeCheckKind::kArrayIndex: + return os << "ArrayIndex"; + } + UNREACHABLE(); +} + +// The {UseInfo} class is used to describe a use of an input of a node. +// +// This information is used in two different ways, based on the phase: +// +// 1. During propagation, the use info is used to inform the input node +// about what part of the input is used (we call this truncation) and what +// is the preferred representation. For conversions that will require +// checks, we also keep track of whether a minus zero check is needed. +// +// 2. During lowering, the use info is used to properly convert the input +// to the preferred representation. The preferred representation might be +// insufficient to do the conversion (e.g. word32->float64 conv), so we also +// need the signedness information to produce the correct value. +// Additionally, use info may contain {CheckParameters} which contains +// information for the deoptimizer such as a CallIC on which speculation +// should be disallowed if the check fails. +class UseInfo { + public: + UseInfo(MachineRepresentation representation, Truncation truncation, + TypeCheckKind type_check = TypeCheckKind::kNone, + const FeedbackSource& feedback = FeedbackSource()) + : representation_(representation), + truncation_(truncation), + type_check_(type_check), + feedback_(feedback) {} + static UseInfo TruncatingWord32() { + return UseInfo(MachineRepresentation::kWord32, Truncation::Word32()); + } + static UseInfo CheckedBigIntTruncatingWord64(const FeedbackSource& feedback) { + // Note that Trunction::Word64() can safely use kIdentifyZero, because + // TypeCheckKind::kBigInt will make sure we deopt for anything other than + // type BigInt anyway. + return UseInfo(MachineRepresentation::kWord64, Truncation::Word64(), + TypeCheckKind::kBigInt, feedback); + } + static UseInfo CheckedBigInt64AsWord64(const FeedbackSource& feedback) { + return UseInfo(MachineRepresentation::kWord64, Truncation::Any(), + TypeCheckKind::kBigInt64, feedback); + } + static UseInfo Word64() { + return UseInfo(MachineRepresentation::kWord64, Truncation::Any()); + } + static UseInfo Word() { + return UseInfo(MachineType::PointerRepresentation(), Truncation::Any()); + } + static UseInfo Bool() { + return UseInfo(MachineRepresentation::kBit, Truncation::Bool()); + } + static UseInfo Float32() { + return UseInfo(MachineRepresentation::kFloat32, Truncation::Any()); + } + static UseInfo Float64() { + return UseInfo(MachineRepresentation::kFloat64, Truncation::Any()); + } + static UseInfo TruncatingFloat64( + IdentifyZeros identify_zeros = kDistinguishZeros) { + return UseInfo(MachineRepresentation::kFloat64, + Truncation::OddballAndBigIntToNumber(identify_zeros)); + } + static UseInfo AnyTagged() { + return UseInfo(MachineRepresentation::kTagged, Truncation::Any()); + } + static UseInfo TaggedSigned() { + return UseInfo(MachineRepresentation::kTaggedSigned, Truncation::Any()); + } + static UseInfo TaggedPointer() { + return UseInfo(MachineRepresentation::kTaggedPointer, Truncation::Any()); + } + + // Possibly deoptimizing conversions. + static UseInfo CheckedTaggedAsArrayIndex(const FeedbackSource& feedback) { + return UseInfo(MachineType::PointerRepresentation(), + Truncation::Any(kIdentifyZeros), TypeCheckKind::kArrayIndex, + feedback); + } + static UseInfo CheckedHeapObjectAsTaggedPointer( + const FeedbackSource& feedback) { + return UseInfo(MachineRepresentation::kTaggedPointer, Truncation::Any(), + TypeCheckKind::kHeapObject, feedback); + } + + static UseInfo CheckedBigIntAsTaggedPointer(const FeedbackSource& feedback) { + return UseInfo(MachineRepresentation::kTaggedPointer, Truncation::Any(), + TypeCheckKind::kBigInt, feedback); + } + + static UseInfo CheckedSignedSmallAsTaggedSigned( + const FeedbackSource& feedback, + IdentifyZeros identify_zeros = kDistinguishZeros) { + return UseInfo(MachineRepresentation::kTaggedSigned, + Truncation::Any(identify_zeros), TypeCheckKind::kSignedSmall, + feedback); + } + static UseInfo CheckedSignedSmallAsWord32(IdentifyZeros identify_zeros, + const FeedbackSource& feedback) { + return UseInfo(MachineRepresentation::kWord32, + Truncation::Any(identify_zeros), TypeCheckKind::kSignedSmall, + feedback); + } + static UseInfo CheckedSigned32AsWord32(IdentifyZeros identify_zeros, + const FeedbackSource& feedback) { + return UseInfo(MachineRepresentation::kWord32, + Truncation::Any(identify_zeros), TypeCheckKind::kSigned32, + feedback); + } + static UseInfo CheckedSigned64AsWord64(IdentifyZeros identify_zeros, + const FeedbackSource& feedback) { + return UseInfo(MachineRepresentation::kWord64, + Truncation::Any(identify_zeros), TypeCheckKind::kSigned64, + feedback); + } + static UseInfo CheckedNumberAsFloat64(IdentifyZeros identify_zeros, + const FeedbackSource& feedback) { + return UseInfo(MachineRepresentation::kFloat64, + Truncation::Any(identify_zeros), TypeCheckKind::kNumber, + feedback); + } + static UseInfo CheckedNumberAsWord32(const FeedbackSource& feedback) { + return UseInfo(MachineRepresentation::kWord32, Truncation::Word32(), + TypeCheckKind::kNumber, feedback); + } + static UseInfo CheckedNumberOrBooleanAsFloat64( + IdentifyZeros identify_zeros, const FeedbackSource& feedback) { + return UseInfo(MachineRepresentation::kFloat64, + Truncation::Any(identify_zeros), + TypeCheckKind::kNumberOrBoolean, feedback); + } + static UseInfo CheckedNumberOrOddballAsFloat64( + IdentifyZeros identify_zeros, const FeedbackSource& feedback) { + return UseInfo(MachineRepresentation::kFloat64, + Truncation::Any(identify_zeros), + TypeCheckKind::kNumberOrOddball, feedback); + } + static UseInfo CheckedNumberOrOddballAsWord32( + const FeedbackSource& feedback) { + return UseInfo(MachineRepresentation::kWord32, Truncation::Word32(), + TypeCheckKind::kNumberOrOddball, feedback); + } + + // Undetermined representation. + static UseInfo Any() { + return UseInfo(MachineRepresentation::kNone, Truncation::Any()); + } + static UseInfo AnyTruncatingToBool() { + return UseInfo(MachineRepresentation::kNone, Truncation::Bool()); + } + + // Value not used. + static UseInfo None() { + return UseInfo(MachineRepresentation::kNone, Truncation::None()); + } + + MachineRepresentation representation() const { return representation_; } + Truncation truncation() const { return truncation_; } + TypeCheckKind type_check() const { return type_check_; } + CheckForMinusZeroMode minus_zero_check() const { + return truncation().IdentifiesZeroAndMinusZero() + ? CheckForMinusZeroMode::kDontCheckForMinusZero + : CheckForMinusZeroMode::kCheckForMinusZero; + } + const FeedbackSource& feedback() const { return feedback_; } + + private: + MachineRepresentation representation_; + Truncation truncation_; + TypeCheckKind type_check_; + FeedbackSource feedback_; +}; + +inline bool operator==(const UseInfo& lhs, const UseInfo& rhs) { + return lhs.representation() == rhs.representation() && + lhs.truncation() == rhs.truncation() && + lhs.type_check() == rhs.type_check() && + lhs.feedback() == rhs.feedback(); +} + +inline size_t hash_value(const UseInfo& use_info) { + return base::hash_combine(use_info.representation(), use_info.truncation(), + use_info.type_check(), use_info.feedback()); +} + +inline std::ostream& operator<<(std::ostream& os, const UseInfo& use_info) { + return os << use_info.representation() << ", " << use_info.truncation() + << ", " << use_info.type_check() << ", " << use_info.feedback(); +} + +} // namespace v8::internal::compiler + +#endif // V8_COMPILER_USE_INFO_H_ diff --git a/deps/v8/src/compiler/verifier.cc b/deps/v8/src/compiler/verifier.cc index 9d1959a77616e0..1f7f74918436cd 100644 --- a/deps/v8/src/compiler/verifier.cc +++ b/deps/v8/src/compiler/verifier.cc @@ -48,7 +48,7 @@ class Verifier::Visitor { private: void CheckNotTyped(Node* node) { // Verification of simplified lowering sets types of many additional nodes. - if (FLAG_verify_simplified_lowering) return; + if (v8_flags.verify_simplified_lowering) return; if (NodeProperties::IsTyped(node)) { std::ostringstream str; @@ -790,7 +790,10 @@ void Verifier::Visitor::Check(Node* node, const AllNodes& all) { CheckValueInputIs(node, 0, Type::Any()); CheckTypeIs(node, Type::NonInternal()); break; - + case IrOpcode::kJSFindNonDefaultConstructorOrConstruct: + CheckValueInputIs(node, 0, Type::Any()); + CheckValueInputIs(node, 1, Type::Any()); + break; case IrOpcode::kJSHasContextExtension: CheckTypeIs(node, Type::Boolean()); break; @@ -1513,6 +1516,7 @@ void Verifier::Visitor::Check(Node* node, const AllNodes& all) { case IrOpcode::kCheckedUint32ToTaggedSigned: case IrOpcode::kCheckedUint64Bounds: case IrOpcode::kCheckedUint64ToInt32: + case IrOpcode::kCheckedUint64ToInt64: case IrOpcode::kCheckedUint64ToTaggedSigned: case IrOpcode::kCheckedFloat64ToInt32: case IrOpcode::kCheckedFloat64ToInt64: @@ -1524,6 +1528,7 @@ void Verifier::Visitor::Check(Node* node, const AllNodes& all) { case IrOpcode::kCheckedTaggedToTaggedSigned: case IrOpcode::kCheckedTaggedToTaggedPointer: case IrOpcode::kCheckedTruncateTaggedToWord32: + case IrOpcode::kCheckedBigInt64Add: case IrOpcode::kAssertType: case IrOpcode::kVerifyType: break; @@ -1642,6 +1647,10 @@ void Verifier::Visitor::Check(Node* node, const AllNodes& all) { CheckValueInputIs(node, 0, Type::Any()); CheckTypeIs(node, Type::BigInt()); break; + case IrOpcode::kCheckBigInt64: + CheckValueInputIs(node, 0, Type::Any()); + CheckTypeIs(node, Type::SignedBigInt64()); + break; case IrOpcode::kFastApiCall: CHECK_GE(value_count, 1); CheckValueInputIs(node, 0, Type::Any()); // receiver @@ -1733,12 +1742,15 @@ void Verifier::Visitor::Check(Node* node, const AllNodes& all) { case IrOpcode::kInt64Sub: case IrOpcode::kInt64SubWithOverflow: case IrOpcode::kInt64Mul: + case IrOpcode::kInt64MulHigh: + case IrOpcode::kInt64MulWithOverflow: case IrOpcode::kInt64Div: case IrOpcode::kInt64Mod: case IrOpcode::kInt64LessThan: case IrOpcode::kInt64LessThanOrEqual: case IrOpcode::kUint64Div: case IrOpcode::kUint64Mod: + case IrOpcode::kUint64MulHigh: case IrOpcode::kUint64LessThan: case IrOpcode::kUint64LessThanOrEqual: case IrOpcode::kFloat32Add: diff --git a/deps/v8/src/compiler/wasm-compiler-definitions.h b/deps/v8/src/compiler/wasm-compiler-definitions.h index 01cc8c3580ca27..f6a5eabcdddde2 100644 --- a/deps/v8/src/compiler/wasm-compiler-definitions.h +++ b/deps/v8/src/compiler/wasm-compiler-definitions.h @@ -20,6 +20,7 @@ namespace compiler { struct WasmTypeCheckConfig { bool object_can_be_null; + bool null_succeeds; uint8_t rtt_depth; }; diff --git a/deps/v8/src/compiler/wasm-compiler.cc b/deps/v8/src/compiler/wasm-compiler.cc index 6fd0c2696dc8cf..d30177f961ea03 100644 --- a/deps/v8/src/compiler/wasm-compiler.cc +++ b/deps/v8/src/compiler/wasm-compiler.cc @@ -42,7 +42,6 @@ #include "src/tracing/trace-event.h" #include "src/trap-handler/trap-handler.h" #include "src/wasm/code-space-access.h" -#include "src/wasm/function-body-decoder-impl.h" #include "src/wasm/function-compiler.h" #include "src/wasm/graph-builder-interface.h" #include "src/wasm/jump-table-assembler.h" @@ -56,6 +55,7 @@ #include "src/wasm/wasm-module.h" #include "src/wasm/wasm-objects-inl.h" #include "src/wasm/wasm-opcodes-inl.h" +#include "src/wasm/wasm-subtyping.h" namespace v8 { namespace internal { @@ -2842,17 +2842,11 @@ Node* WasmGraphBuilder::BuildIndirectCall(uint32_t table_index, // Note: Since null entries are identified by having ift_sig_id (-1), we only // need one comparison. // TODO(9495): Change this if we should do full function subtyping instead. - Node* expected_sig_id; - if (v8_flags.wasm_type_canonicalization) { - Node* isorecursive_canonical_types = - LOAD_INSTANCE_FIELD(IsorecursiveCanonicalTypes, MachineType::Pointer()); - expected_sig_id = gasm_->LoadImmutable( - MachineType::Uint32(), isorecursive_canonical_types, - gasm_->IntPtrConstant(sig_index * kInt32Size)); - } else { - expected_sig_id = - Int32Constant(env_->module->per_module_canonical_type_ids[sig_index]); - } + Node* isorecursive_canonical_types = + LOAD_INSTANCE_FIELD(IsorecursiveCanonicalTypes, MachineType::Pointer()); + Node* expected_sig_id = + gasm_->LoadImmutable(MachineType::Uint32(), isorecursive_canonical_types, + gasm_->IntPtrConstant(sig_index * kInt32Size)); Node* int32_scaled_key = gasm_->BuildChangeUint32ToUintPtr( gasm_->Word32Shl(key, Int32Constant(2))); @@ -5436,21 +5430,47 @@ WasmGraphBuilder::Callbacks WasmGraphBuilder::BranchCallbacks( } void WasmGraphBuilder::DataCheck(Node* object, bool object_can_be_null, - Callbacks callbacks) { + Callbacks callbacks, bool null_succeeds) { if (object_can_be_null) { - callbacks.fail_if(IsNull(object), BranchHint::kFalse); + if (null_succeeds) { + callbacks.succeed_if(IsNull(object), BranchHint::kFalse); + } else { + // TODO(7748): Is the extra null check actually beneficial for + // performance? + callbacks.fail_if(IsNull(object), BranchHint::kFalse); + } } callbacks.fail_if(gasm_->IsI31(object), BranchHint::kFalse); Node* map = gasm_->LoadMap(object); callbacks.fail_if_not(gasm_->IsDataRefMap(map), BranchHint::kTrue); } +void WasmGraphBuilder::EqCheck(Node* object, bool object_can_be_null, + Callbacks callbacks, bool null_succeeds) { + // TODO(7748): Is the extra null check actually beneficial for performance? + if (object_can_be_null) { + if (null_succeeds) { + callbacks.succeed_if(IsNull(object), BranchHint::kFalse); + } else { + callbacks.fail_if(IsNull(object), BranchHint::kFalse); + } + } + callbacks.succeed_if(gasm_->IsI31(object), BranchHint::kFalse); + Node* map = gasm_->LoadMap(object); + callbacks.fail_if_not(gasm_->IsDataRefMap(map), BranchHint::kTrue); +} + void WasmGraphBuilder::ManagedObjectInstanceCheck(Node* object, bool object_can_be_null, InstanceType instance_type, - Callbacks callbacks) { + Callbacks callbacks, + bool null_succeeds) { if (object_can_be_null) { - callbacks.fail_if(IsNull(object), BranchHint::kFalse); + if (null_succeeds) { + callbacks.succeed_if(IsNull(object), BranchHint::kFalse); + } else { + callbacks.fail_if(IsNull(object), BranchHint::kFalse); + } } callbacks.fail_if(gasm_->IsI31(object), BranchHint::kFalse); callbacks.fail_if_not(gasm_->HasInstanceType(object, instance_type), @@ -5501,6 +5521,27 @@ Node* WasmGraphBuilder::RefTest(Node* object, Node* rtt, return gasm_->WasmTypeCheck(object, rtt, config); } +Node* WasmGraphBuilder::RefTestAbstract(Node* object, wasm::HeapType type, + bool null_succeeds) { + bool is_nullable = + compiler::NodeProperties::GetType(object).AsWasm().type.is_nullable(); + switch (type.representation()) { + case wasm::HeapType::kEq: + return RefIsEq(object, is_nullable, null_succeeds); + case wasm::HeapType::kI31: + return RefIsI31(object, null_succeeds); + case wasm::HeapType::kData: + return RefIsData(object, is_nullable, null_succeeds); + case wasm::HeapType::kArray: + return RefIsArray(object, is_nullable, null_succeeds); + case wasm::HeapType::kAny: + // Any may never need a cast as it is either implicitly convertible or + // never convertible for any given type. + default: + UNREACHABLE(); + } +} + Node* WasmGraphBuilder::RefCast(Node* object, Node* rtt, WasmTypeCheckConfig config, wasm::WasmCodePosition position) { @@ -5522,9 +5563,19 @@ void WasmGraphBuilder::BrOnCast(Node* object, Node* rtt, *no_match_control = false_node; } -Node* WasmGraphBuilder::RefIsData(Node* object, bool object_can_be_null) { +Node* WasmGraphBuilder::RefIsEq(Node* object, bool object_can_be_null, + bool null_succeeds) { auto done = gasm_->MakeLabel(MachineRepresentation::kWord32); - DataCheck(object, object_can_be_null, TestCallbacks(&done)); + EqCheck(object, object_can_be_null, TestCallbacks(&done), null_succeeds); + gasm_->Goto(&done, Int32Constant(1)); + gasm_->Bind(&done); + return done.PhiAt(0); +} + +Node* WasmGraphBuilder::RefIsData(Node* object, bool object_can_be_null, + bool null_succeeds) { + auto done = gasm_->MakeLabel(MachineRepresentation::kWord32); + DataCheck(object, object_can_be_null, TestCallbacks(&done), null_succeeds); gasm_->Goto(&done, Int32Constant(1)); gasm_->Bind(&done); return done.PhiAt(0); @@ -5532,8 +5583,10 @@ Node* WasmGraphBuilder::RefIsData(Node* object, bool object_can_be_null) { Node* WasmGraphBuilder::RefAsData(Node* object, bool object_can_be_null, wasm::WasmCodePosition position) { + bool null_succeeds = false; auto done = gasm_->MakeLabel(); - DataCheck(object, object_can_be_null, CastCallbacks(&done, position)); + DataCheck(object, object_can_be_null, CastCallbacks(&done, position), + null_succeeds); gasm_->Goto(&done); gasm_->Bind(&done); return object; @@ -5544,16 +5597,19 @@ void WasmGraphBuilder::BrOnData(Node* object, Node* /*rtt*/, Node** match_control, Node** match_effect, Node** no_match_control, Node** no_match_effect) { + bool null_succeeds = false; BrOnCastAbs(match_control, match_effect, no_match_control, no_match_effect, [=](Callbacks callbacks) -> void { - return DataCheck(object, config.object_can_be_null, callbacks); + return DataCheck(object, config.object_can_be_null, callbacks, + null_succeeds); }); } -Node* WasmGraphBuilder::RefIsArray(Node* object, bool object_can_be_null) { +Node* WasmGraphBuilder::RefIsArray(Node* object, bool object_can_be_null, + bool null_succeeds) { auto done = gasm_->MakeLabel(MachineRepresentation::kWord32); ManagedObjectInstanceCheck(object, object_can_be_null, WASM_ARRAY_TYPE, - TestCallbacks(&done)); + TestCallbacks(&done), null_succeeds); gasm_->Goto(&done, Int32Constant(1)); gasm_->Bind(&done); return done.PhiAt(0); @@ -5561,9 +5617,10 @@ Node* WasmGraphBuilder::RefIsArray(Node* object, bool object_can_be_null) { Node* WasmGraphBuilder::RefAsArray(Node* object, bool object_can_be_null, wasm::WasmCodePosition position) { + bool null_succeeds = false; auto done = gasm_->MakeLabel(); ManagedObjectInstanceCheck(object, object_can_be_null, WASM_ARRAY_TYPE, - CastCallbacks(&done, position)); + CastCallbacks(&done, position), null_succeeds); gasm_->Goto(&done); gasm_->Bind(&done); return object; @@ -5574,15 +5631,26 @@ void WasmGraphBuilder::BrOnArray(Node* object, Node* /*rtt*/, Node** match_control, Node** match_effect, Node** no_match_control, Node** no_match_effect) { + bool null_succeeds = false; BrOnCastAbs(match_control, match_effect, no_match_control, no_match_effect, [=](Callbacks callbacks) -> void { - return ManagedObjectInstanceCheck(object, - config.object_can_be_null, - WASM_ARRAY_TYPE, callbacks); + return ManagedObjectInstanceCheck( + object, config.object_can_be_null, WASM_ARRAY_TYPE, + callbacks, null_succeeds); }); } -Node* WasmGraphBuilder::RefIsI31(Node* object) { return gasm_->IsI31(object); } +Node* WasmGraphBuilder::RefIsI31(Node* object, bool null_succeeds) { + if (null_succeeds) { + auto done = gasm_->MakeLabel(MachineRepresentation::kWord32); + gasm_->GotoIf(gasm_->IsI31(object), &done, BranchHint::kTrue, + Int32Constant(1)); + gasm_->Goto(&done, gasm_->IsNull(object)); + gasm_->Bind(&done); + return done.PhiAt(0); + } + return gasm_->IsI31(object); +} Node* WasmGraphBuilder::RefAsI31(Node* object, wasm::WasmCodePosition position) { @@ -5731,36 +5799,49 @@ void WasmGraphBuilder::ArrayCopy(Node* dst_array, Node* dst_index, gasm_->Bind(&skip); } +// General rules for operator properties for builtin calls: +// - Use kEliminatable if it can neither throw a catchable exception nor trap. +// - Use kNoDeopt | kNoThrow if it can trap (because in that case, eliminating +// it would avoid the trap and thereby observably change the code's behavior +// compared to its unoptimized version). +// - If you don't use kNoThrow (nor kEliminatable which implies it), then you +// must also set up control nodes for the throwing case, e.g. by using +// WasmGraphBuildingInterface::CheckForException(). + Node* WasmGraphBuilder::StringNewWtf8(uint32_t memory, unibrow::Utf8Variant variant, Node* offset, Node* size) { - return gasm_->CallBuiltin(Builtin::kWasmStringNewWtf8, Operator::kNoDeopt, - offset, size, gasm_->SmiConstant(memory), + return gasm_->CallBuiltin(Builtin::kWasmStringNewWtf8, + Operator::kNoDeopt | Operator::kNoThrow, offset, + size, gasm_->SmiConstant(memory), gasm_->SmiConstant(static_cast<int32_t>(variant))); } Node* WasmGraphBuilder::StringNewWtf8Array(unibrow::Utf8Variant variant, Node* array, Node* start, Node* end) { - return gasm_->CallBuiltin(Builtin::kWasmStringNewWtf8Array, - Operator::kNoDeopt, start, end, array, - gasm_->SmiConstant(static_cast<int32_t>(variant))); + return gasm_->CallBuiltin( + Builtin::kWasmStringNewWtf8Array, Operator::kNoDeopt | Operator::kNoThrow, + start, end, array, gasm_->SmiConstant(static_cast<int32_t>(variant))); } Node* WasmGraphBuilder::StringNewWtf16(uint32_t memory, Node* offset, Node* size) { - return gasm_->CallBuiltin(Builtin::kWasmStringNewWtf16, Operator::kNoDeopt, + return gasm_->CallBuiltin(Builtin::kWasmStringNewWtf16, + Operator::kNoDeopt | Operator::kNoThrow, gasm_->Uint32Constant(memory), offset, size); } Node* WasmGraphBuilder::StringNewWtf16Array(Node* array, Node* start, Node* end) { return gasm_->CallBuiltin(Builtin::kWasmStringNewWtf16Array, - Operator::kNoDeopt, array, start, end); + Operator::kNoDeopt | Operator::kNoThrow, array, + start, end); } Node* WasmGraphBuilder::StringConst(uint32_t index) { - return gasm_->CallBuiltin(Builtin::kWasmStringConst, Operator::kNoDeopt, + return gasm_->CallBuiltin(Builtin::kWasmStringConst, + Operator::kNoDeopt | Operator::kNoThrow, gasm_->Uint32Constant(index)); } @@ -5769,8 +5850,8 @@ Node* WasmGraphBuilder::StringMeasureUtf8(Node* string, CheckForNull null_check, if (null_check == kWithNullCheck) { string = AssertNotNull(string, position); } - return gasm_->CallBuiltin(Builtin::kWasmStringMeasureUtf8, Operator::kNoDeopt, - string); + return gasm_->CallBuiltin(Builtin::kWasmStringMeasureUtf8, + Operator::kEliminatable, string); } Node* WasmGraphBuilder::StringMeasureWtf8(Node* string, CheckForNull null_check, @@ -5778,8 +5859,8 @@ Node* WasmGraphBuilder::StringMeasureWtf8(Node* string, CheckForNull null_check, if (null_check == kWithNullCheck) { string = AssertNotNull(string, position); } - return gasm_->CallBuiltin(Builtin::kWasmStringMeasureWtf8, Operator::kNoDeopt, - string); + return gasm_->CallBuiltin(Builtin::kWasmStringMeasureWtf8, + Operator::kEliminatable, string); } Node* WasmGraphBuilder::StringMeasureWtf16(Node* string, @@ -5801,8 +5882,9 @@ Node* WasmGraphBuilder::StringEncodeWtf8(uint32_t memory, if (null_check == kWithNullCheck) { string = AssertNotNull(string, position); } - return gasm_->CallBuiltin(Builtin::kWasmStringEncodeWtf8, Operator::kNoDeopt, - string, offset, gasm_->SmiConstant(memory), + return gasm_->CallBuiltin(Builtin::kWasmStringEncodeWtf8, + Operator::kNoDeopt | Operator::kNoThrow, string, + offset, gasm_->SmiConstant(memory), gasm_->SmiConstant(static_cast<int32_t>(variant))); } @@ -5817,7 +5899,8 @@ Node* WasmGraphBuilder::StringEncodeWtf8Array( array = AssertNotNull(array, position); } return gasm_->CallBuiltin(Builtin::kWasmStringEncodeWtf8Array, - Operator::kNoDeopt, string, array, start, + Operator::kNoDeopt | Operator::kNoThrow, string, + array, start, gasm_->SmiConstant(static_cast<int32_t>(variant))); } @@ -5827,8 +5910,9 @@ Node* WasmGraphBuilder::StringEncodeWtf16(uint32_t memory, Node* string, if (null_check == kWithNullCheck) { string = AssertNotNull(string, position); } - return gasm_->CallBuiltin(Builtin::kWasmStringEncodeWtf16, Operator::kNoDeopt, - string, offset, gasm_->SmiConstant(memory)); + return gasm_->CallBuiltin(Builtin::kWasmStringEncodeWtf16, + Operator::kNoDeopt | Operator::kNoThrow, string, + offset, gasm_->SmiConstant(memory)); } Node* WasmGraphBuilder::StringEncodeWtf16Array( @@ -5842,7 +5926,8 @@ Node* WasmGraphBuilder::StringEncodeWtf16Array( array = AssertNotNull(array, position); } return gasm_->CallBuiltin(Builtin::kWasmStringEncodeWtf16Array, - Operator::kNoDeopt, string, array, start); + Operator::kNoDeopt | Operator::kNoThrow, string, + array, start); } Node* WasmGraphBuilder::StringConcat(Node* head, CheckForNull head_null_check, @@ -5851,7 +5936,8 @@ Node* WasmGraphBuilder::StringConcat(Node* head, CheckForNull head_null_check, if (head_null_check == kWithNullCheck) head = AssertNotNull(head, position); if (tail_null_check == kWithNullCheck) tail = AssertNotNull(tail, position); return gasm_->CallBuiltin( - Builtin::kStringAdd_CheckNone, Operator::kEliminatable, head, tail, + Builtin::kStringAdd_CheckNone, Operator::kNoDeopt | Operator::kNoThrow, + head, tail, LOAD_INSTANCE_FIELD(NativeContext, MachineType::TaggedPointer())); } @@ -5868,7 +5954,7 @@ Node* WasmGraphBuilder::StringEqual(Node* a, CheckForNull a_null_check, Node* b, gasm_->GotoIf(gasm_->IsNull(b), &done, Int32Constant(0)); } gasm_->Goto(&done, gasm_->CallBuiltin(Builtin::kWasmStringEqual, - Operator::kNoDeopt, a, b)); + Operator::kEliminatable, a, b)); gasm_->Bind(&done); return done.PhiAt(0); } @@ -5878,14 +5964,14 @@ Node* WasmGraphBuilder::StringIsUSVSequence(Node* str, CheckForNull null_check, if (null_check == kWithNullCheck) str = AssertNotNull(str, position); return gasm_->CallBuiltin(Builtin::kWasmStringIsUSVSequence, - Operator::kNoDeopt, str); + Operator::kEliminatable, str); } Node* WasmGraphBuilder::StringAsWtf8(Node* str, CheckForNull null_check, wasm::WasmCodePosition position) { if (null_check == kWithNullCheck) str = AssertNotNull(str, position); - return gasm_->CallBuiltin(Builtin::kWasmStringAsWtf8, Operator::kNoDeopt, + return gasm_->CallBuiltin(Builtin::kWasmStringAsWtf8, Operator::kEliminatable, str); } @@ -5896,7 +5982,7 @@ Node* WasmGraphBuilder::StringViewWtf8Advance(Node* view, if (null_check == kWithNullCheck) view = AssertNotNull(view, position); return gasm_->CallBuiltin(Builtin::kWasmStringViewWtf8Advance, - Operator::kNoDeopt, view, pos, bytes); + Operator::kEliminatable, view, pos, bytes); } void WasmGraphBuilder::StringViewWtf8Encode( @@ -5907,8 +5993,9 @@ void WasmGraphBuilder::StringViewWtf8Encode( view = AssertNotNull(view, position); } Node* pair = - gasm_->CallBuiltin(Builtin::kWasmStringViewWtf8Encode, Operator::kNoDeopt, - addr, pos, bytes, view, gasm_->SmiConstant(memory), + gasm_->CallBuiltin(Builtin::kWasmStringViewWtf8Encode, + Operator::kNoDeopt | Operator::kNoThrow, addr, pos, + bytes, view, gasm_->SmiConstant(memory), gasm_->SmiConstant(static_cast<int32_t>(variant))); *next_pos = gasm_->Projection(0, pair); *bytes_written = gasm_->Projection(1, pair); @@ -5921,7 +6008,7 @@ Node* WasmGraphBuilder::StringViewWtf8Slice(Node* view, CheckForNull null_check, view = AssertNotNull(view, position); } return gasm_->CallBuiltin(Builtin::kWasmStringViewWtf8Slice, - Operator::kNoDeopt, view, pos, bytes); + Operator::kEliminatable, view, pos, bytes); } Node* WasmGraphBuilder::StringViewWtf16GetCodeUnit( @@ -5931,7 +6018,8 @@ Node* WasmGraphBuilder::StringViewWtf16GetCodeUnit( string = AssertNotNull(string, position); } return gasm_->CallBuiltin(Builtin::kWasmStringViewWtf16GetCodeUnit, - Operator::kNoDeopt, string, offset); + Operator::kNoDeopt | Operator::kNoThrow, string, + offset); } Node* WasmGraphBuilder::StringViewWtf16Encode(uint32_t memory, Node* string, @@ -5943,8 +6031,9 @@ Node* WasmGraphBuilder::StringViewWtf16Encode(uint32_t memory, Node* string, string = AssertNotNull(string, position); } return gasm_->CallBuiltin(Builtin::kWasmStringViewWtf16Encode, - Operator::kNoDeopt, offset, start, codeunits, - string, gasm_->SmiConstant(memory)); + Operator::kNoDeopt | Operator::kNoThrow, offset, + start, codeunits, string, + gasm_->SmiConstant(memory)); } Node* WasmGraphBuilder::StringViewWtf16Slice(Node* string, @@ -5955,14 +6044,14 @@ Node* WasmGraphBuilder::StringViewWtf16Slice(Node* string, string = AssertNotNull(string, position); } return gasm_->CallBuiltin(Builtin::kWasmStringViewWtf16Slice, - Operator::kNoDeopt, string, start, end); + Operator::kEliminatable, string, start, end); } Node* WasmGraphBuilder::StringAsIter(Node* str, CheckForNull null_check, wasm::WasmCodePosition position) { if (null_check == kWithNullCheck) str = AssertNotNull(str, position); - return gasm_->CallBuiltin(Builtin::kWasmStringAsIter, Operator::kNoDeopt, + return gasm_->CallBuiltin(Builtin::kWasmStringAsIter, Operator::kEliminatable, str); } @@ -5971,7 +6060,7 @@ Node* WasmGraphBuilder::StringViewIterNext(Node* view, CheckForNull null_check, if (null_check == kWithNullCheck) view = AssertNotNull(view, position); return gasm_->CallBuiltin(Builtin::kWasmStringViewIterNext, - Operator::kNoDeopt, view); + Operator::kEliminatable, view); } Node* WasmGraphBuilder::StringViewIterAdvance(Node* view, @@ -5981,7 +6070,7 @@ Node* WasmGraphBuilder::StringViewIterAdvance(Node* view, if (null_check == kWithNullCheck) view = AssertNotNull(view, position); return gasm_->CallBuiltin(Builtin::kWasmStringViewIterAdvance, - Operator::kNoDeopt, view, codepoints); + Operator::kEliminatable, view, codepoints); } Node* WasmGraphBuilder::StringViewIterRewind(Node* view, @@ -5991,7 +6080,7 @@ Node* WasmGraphBuilder::StringViewIterRewind(Node* view, if (null_check == kWithNullCheck) view = AssertNotNull(view, position); return gasm_->CallBuiltin(Builtin::kWasmStringViewIterRewind, - Operator::kNoDeopt, view, codepoints); + Operator::kEliminatable, view, codepoints); } Node* WasmGraphBuilder::StringViewIterSlice(Node* view, CheckForNull null_check, @@ -6000,7 +6089,7 @@ Node* WasmGraphBuilder::StringViewIterSlice(Node* view, CheckForNull null_check, if (null_check == kWithNullCheck) view = AssertNotNull(view, position); return gasm_->CallBuiltin(Builtin::kWasmStringViewIterSlice, - Operator::kNoDeopt, view, codepoints); + Operator::kEliminatable, view, codepoints); } // 1 bit V8 Smi tag, 31 bits V8 Smi shift, 1 bit i31ref high-bit truncation. @@ -6905,10 +6994,10 @@ class WasmWrapperGraphBuilder : public WasmGraphBuilder { gasm_->GotoIf(IsSmi(value), &resume, value); gasm_->GotoIfNot(gasm_->HasInstanceType(value, JS_PROMISE_TYPE), &resume, BranchHint::kTrue, value); - auto* call_descriptor = GetBuiltinCallDescriptor( - Builtin::kWasmSuspend, zone_, StubCallMode::kCallWasmRuntimeStub); - Node* call_target = mcgraph()->RelocatableIntPtrConstant( - wasm::WasmCode::kWasmSuspend, RelocInfo::WASM_STUB_CALL); + auto* call_descriptor = + GetBuiltinCallDescriptor(Builtin::kWasmSuspend, zone_, stub_mode_); + Node* call_target = GetTargetForBuiltinCall(wasm::WasmCode::kWasmSuspend, + Builtin::kWasmSuspend); Node* args[] = {value, suspender}; Node* chained_promise = BuildCallToRuntimeWithContext( Runtime::kWasmCreateResumePromise, native_context, args, 2); diff --git a/deps/v8/src/compiler/wasm-compiler.h b/deps/v8/src/compiler/wasm-compiler.h index f562f7e5fe3ba3..6ff58e8ed42fd6 100644 --- a/deps/v8/src/compiler/wasm-compiler.h +++ b/deps/v8/src/compiler/wasm-compiler.h @@ -494,24 +494,26 @@ class WasmGraphBuilder { Node* RttCanon(uint32_t type_index); Node* RefTest(Node* object, Node* rtt, WasmTypeCheckConfig config); + Node* RefTestAbstract(Node* object, wasm::HeapType type, bool null_succeeds); Node* RefCast(Node* object, Node* rtt, WasmTypeCheckConfig config, wasm::WasmCodePosition position); void BrOnCast(Node* object, Node* rtt, WasmTypeCheckConfig config, Node** match_control, Node** match_effect, Node** no_match_control, Node** no_match_effect); - Node* RefIsData(Node* object, bool object_can_be_null); + Node* RefIsEq(Node* object, bool object_can_be_null, bool null_succeeds); + Node* RefIsData(Node* object, bool object_can_be_null, bool null_succeeds); Node* RefAsData(Node* object, bool object_can_be_null, wasm::WasmCodePosition position); void BrOnData(Node* object, Node* rtt, WasmTypeCheckConfig config, Node** match_control, Node** match_effect, Node** no_match_control, Node** no_match_effect); - Node* RefIsArray(Node* object, bool object_can_be_null); + Node* RefIsArray(Node* object, bool object_can_be_null, bool null_succeeds); Node* RefAsArray(Node* object, bool object_can_be_null, wasm::WasmCodePosition position); void BrOnArray(Node* object, Node* rtt, WasmTypeCheckConfig config, Node** match_control, Node** match_effect, Node** no_match_control, Node** no_match_effect); - Node* RefIsI31(Node* object); + Node* RefIsI31(Node* object, bool null_succeeds); Node* RefAsI31(Node* object, wasm::WasmCodePosition position); void BrOnI31(Node* object, Node* rtt, WasmTypeCheckConfig config, Node** match_control, Node** match_effect, @@ -763,10 +765,13 @@ class WasmGraphBuilder { SmallNodeVector& match_controls, SmallNodeVector& match_effects); - void DataCheck(Node* object, bool object_can_be_null, Callbacks callbacks); + void DataCheck(Node* object, bool object_can_be_null, Callbacks callbacks, + bool null_succeeds); + void EqCheck(Node* object, bool object_can_be_null, Callbacks callbacks, + bool null_succeeds); void ManagedObjectInstanceCheck(Node* object, bool object_can_be_null, InstanceType instance_type, - Callbacks callbacks); + Callbacks callbacks, bool null_succeeds); void BrOnCastAbs(Node** match_control, Node** match_effect, Node** no_match_control, Node** no_match_effect, diff --git a/deps/v8/src/compiler/wasm-escape-analysis.h b/deps/v8/src/compiler/wasm-escape-analysis.h index bc29438f5b97db..5a43fda26ed287 100644 --- a/deps/v8/src/compiler/wasm-escape-analysis.h +++ b/deps/v8/src/compiler/wasm-escape-analysis.h @@ -17,9 +17,8 @@ namespace compiler { class MachineGraph; -// Eliminate allocated objects which are only assigned to. -// Current restrictions: Does not work for arrays (until they are also allocated -// with AllocateRaw). Does not work if the allocated object is passed to a phi. +// Eliminate allocated objects with no uses other than as store targets. +// Future work: Also exclude phis and renamings from uses. class WasmEscapeAnalysis final : public AdvancedReducer { public: WasmEscapeAnalysis(Editor* editor, MachineGraph* mcgraph) diff --git a/deps/v8/src/compiler/wasm-gc-lowering.cc b/deps/v8/src/compiler/wasm-gc-lowering.cc index 6d83142b0e8a60..c4fda4ee3c0237 100644 --- a/deps/v8/src/compiler/wasm-gc-lowering.cc +++ b/deps/v8/src/compiler/wasm-gc-lowering.cc @@ -92,10 +92,15 @@ Reduction WasmGCLowering::ReduceWasmTypeCheck(Node* node) { auto end_label = gasm_.MakeLabel(MachineRepresentation::kWord32); if (object_can_be_null) { + const int kResult = config.null_succeeds ? 1 : 0; gasm_.GotoIf(gasm_.TaggedEqual(object, Null()), &end_label, - BranchHint::kFalse, gasm_.Int32Constant(0)); + BranchHint::kFalse, gasm_.Int32Constant(kResult)); } + // TODO(7748): In some cases the Smi check is redundant. If we had information + // about the source type, we could skip it in those cases. + gasm_.GotoIf(gasm_.IsI31(object), &end_label, gasm_.Int32Constant(0)); + Node* map = gasm_.LoadMap(object); // First, check if types happen to be equal. This has been shown to give large diff --git a/deps/v8/src/compiler/wasm-gc-operator-reducer.cc b/deps/v8/src/compiler/wasm-gc-operator-reducer.cc index 7a2b1d8addd607..fa34e79debf36e 100644 --- a/deps/v8/src/compiler/wasm-gc-operator-reducer.cc +++ b/deps/v8/src/compiler/wasm-gc-operator-reducer.cc @@ -117,6 +117,9 @@ wasm::TypeInModule WasmGCOperatorReducer::ObjectTypeFromContext(Node* object, : type_from_node; } +// If the condition of this node's branch is a type check or a null check, +// add the additional information about the type-checked node to the path +// state. Reduction WasmGCOperatorReducer::ReduceIf(Node* node, bool condition) { DCHECK(node->opcode() == IrOpcode::kIfTrue || node->opcode() == IrOpcode::kIfFalse); @@ -184,7 +187,7 @@ Reduction WasmGCOperatorReducer::ReduceMerge(Node* node) { // Change the current type block list to a longest common prefix of this // state list and the other list. (The common prefix should correspond to // the state of the common dominator.) - // TODO(manoskouk): Consider computing intersections for some types. + // TODO(manoskouk): Consider computing unions for some types. types.ResetToCommonAncestor(GetState(*input_it)); } return UpdateStates(node, types); @@ -282,6 +285,7 @@ Reduction WasmGCOperatorReducer::ReduceWasmTypeCast(Node* node) { uint8_t rtt_depth = OpParameter<WasmTypeCheckConfig>(node->op()).rtt_depth; NodeProperties::ChangeOp( node, gasm_.simplified()->WasmTypeCast({false, // object_can_be_null + false, // null_succeeds rtt_depth})); } @@ -310,12 +314,14 @@ Reduction WasmGCOperatorReducer::ReduceWasmTypeCheck(Node* node) { if (wasm::IsHeapSubtypeOf(object_type.type.heap_type(), wasm::HeapType(rtt_type.type.ref_index()), object_type.module, rtt_type.module)) { + bool null_succeeds = + OpParameter<WasmTypeCheckConfig>(node->op()).null_succeeds; // Type cast will fail only on null. gasm_.InitializeEffectControl(effect, control); - Node* condition = - SetType(object_type.type.is_nullable() ? gasm_.IsNotNull(object) - : gasm_.Int32Constant(1), - wasm::kWasmI32); + Node* condition = SetType(object_type.type.is_nullable() && !null_succeeds + ? gasm_.IsNotNull(object) + : gasm_.Int32Constant(1), + wasm::kWasmI32); ReplaceWithValue(node, condition); node->Kill(); return Replace(condition); @@ -324,7 +330,17 @@ Reduction WasmGCOperatorReducer::ReduceWasmTypeCheck(Node* node) { if (wasm::HeapTypesUnrelated(object_type.type.heap_type(), wasm::HeapType(rtt_type.type.ref_index()), object_type.module, rtt_type.module)) { - Node* condition = SetType(gasm_.Int32Constant(0), wasm::kWasmI32); + bool null_succeeds = + OpParameter<WasmTypeCheckConfig>(node->op()).null_succeeds; + Node* condition = nullptr; + if (null_succeeds && object_type.type.is_nullable()) { + // The cast only succeeds in case of null. + gasm_.InitializeEffectControl(effect, control); + condition = SetType(gasm_.IsNull(object), wasm::kWasmI32); + } else { + // The cast never succeeds. + condition = SetType(gasm_.Int32Constant(0), wasm::kWasmI32); + } ReplaceWithValue(node, condition); node->Kill(); return Replace(condition); @@ -335,6 +351,7 @@ Reduction WasmGCOperatorReducer::ReduceWasmTypeCheck(Node* node) { uint8_t rtt_depth = OpParameter<WasmTypeCheckConfig>(node->op()).rtt_depth; NodeProperties::ChangeOp( node, gasm_.simplified()->WasmTypeCheck({false, // object_can_be_null + false, // null_succeeds rtt_depth})); } diff --git a/deps/v8/src/compiler/wasm-gc-operator-reducer.h b/deps/v8/src/compiler/wasm-gc-operator-reducer.h index acae0664daa1c2..d9336094487c3e 100644 --- a/deps/v8/src/compiler/wasm-gc-operator-reducer.h +++ b/deps/v8/src/compiler/wasm-gc-operator-reducer.h @@ -35,9 +35,11 @@ struct NodeWithType { wasm::TypeInModule type; }; -// This class optimizes away wasm-gc nodes based on the types of their -// arguments. Although types have been assigned to nodes already, this class -// also tracks additional type information along control paths. +// This class optimizes away wasm-gc type checks and casts. Two types of +// information are used: +// - Types already marked on graph nodes. +// - Path-dependent type information that is inferred when a type check is used +// as a branch condition. class WasmGCOperatorReducer final : public AdvancedReducerWithControlPathState<NodeWithType, kMultipleInstances> { @@ -61,6 +63,8 @@ class WasmGCOperatorReducer final Reduction ReduceStart(Node* node); Node* SetType(Node* node, wasm::ValueType type); + // Returns the intersection of the type marked on {object} and the type + // information about object tracked on {control}'s control path (if present). wasm::TypeInModule ObjectTypeFromContext(Node* object, Node* control); Reduction UpdateNodeAndAliasesTypes(Node* state_owner, ControlPathTypes parent_state, Node* node, diff --git a/deps/v8/src/compiler/wasm-inlining.h b/deps/v8/src/compiler/wasm-inlining.h index 44dcd83538f1d6..e7ec4eb271d155 100644 --- a/deps/v8/src/compiler/wasm-inlining.h +++ b/deps/v8/src/compiler/wasm-inlining.h @@ -55,7 +55,12 @@ class WasmInliner final : public AdvancedReducer { const char* reducer_name() const override { return "WasmInliner"; } + // Registers (tail) calls to possibly be inlined, prioritized by inlining + // heuristics provided by {LexicographicOrdering}. + // Only locally defined functions are inlinable, and a limited number of + // inlinings of a specific function is allowed. Reduction Reduce(Node* node) final; + // Inlines calls registered by {Reduce}, until an inlining budget is exceeded. void Finalize() final; static bool graph_size_allows_inlining(size_t graph_size) { diff --git a/deps/v8/src/compiler/wasm-typer.h b/deps/v8/src/compiler/wasm-typer.h index 69fecbb49f6bd8..de101f57087c36 100644 --- a/deps/v8/src/compiler/wasm-typer.h +++ b/deps/v8/src/compiler/wasm-typer.h @@ -18,6 +18,11 @@ namespace compiler { class MachineGraph; +// Recomputes wasm-gc types along the graph to assign the narrowest possible +// type to each node. +// Specifically, struct field accesses, array element accesses, phis, type +// casts, and type guards are retyped. +// Types in loops are computed to a fixed point. class WasmTyper final : public AdvancedReducer { public: WasmTyper(Editor* editor, MachineGraph* mcgraph, uint32_t function_index); diff --git a/deps/v8/src/d8/d8-console.cc b/deps/v8/src/d8/d8-console.cc index 9481831b4a7a8f..f48095302adca0 100644 --- a/deps/v8/src/d8/d8-console.cc +++ b/deps/v8/src/d8/d8-console.cc @@ -73,7 +73,7 @@ void D8Console::Debug(const debug::ConsoleCallArguments& args, void D8Console::Time(const debug::ConsoleCallArguments& args, const v8::debug::ConsoleContext&) { - if (internal::FLAG_correctness_fuzzer_suppressions) return; + if (i::v8_flags.correctness_fuzzer_suppressions) return; if (args.Length() == 0) { default_timer_ = base::TimeTicks::Now(); } else { @@ -95,7 +95,7 @@ void D8Console::Time(const debug::ConsoleCallArguments& args, void D8Console::TimeEnd(const debug::ConsoleCallArguments& args, const v8::debug::ConsoleContext&) { - if (internal::FLAG_correctness_fuzzer_suppressions) return; + if (i::v8_flags.correctness_fuzzer_suppressions) return; base::TimeDelta delta; if (args.Length() == 0) { delta = base::TimeTicks::Now() - default_timer_; @@ -119,7 +119,7 @@ void D8Console::TimeEnd(const debug::ConsoleCallArguments& args, void D8Console::TimeStamp(const debug::ConsoleCallArguments& args, const v8::debug::ConsoleContext&) { - if (internal::FLAG_correctness_fuzzer_suppressions) return; + if (i::v8_flags.correctness_fuzzer_suppressions) return; base::TimeDelta delta = base::TimeTicks::Now() - default_timer_; if (args.Length() == 0) { printf("console.timeStamp: default, %f\n", delta.InMillisecondsF()); @@ -136,7 +136,7 @@ void D8Console::TimeStamp(const debug::ConsoleCallArguments& args, void D8Console::Trace(const debug::ConsoleCallArguments& args, const v8::debug::ConsoleContext&) { - if (internal::FLAG_correctness_fuzzer_suppressions) return; + if (i::v8_flags.correctness_fuzzer_suppressions) return; i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate_); i_isolate->PrintStack(stderr, i::Isolate::kPrintStackConcise); } diff --git a/deps/v8/src/d8/d8-test.cc b/deps/v8/src/d8/d8-test.cc index 878693e3c94e91..94efb2946c37ef 100644 --- a/deps/v8/src/d8/d8-test.cc +++ b/deps/v8/src/d8/d8-test.cc @@ -669,10 +669,12 @@ class FastCApiObject { CHECK_NOT_NULL(self); self->fast_call_count_++; - // Number is in range. - CHECK(in_range && "Number range should have been enforced"); - if (!std::isnan(real_arg)) { - CHECK_EQ(static_cast<IntegerT>(real_arg), checked_arg); + if (!i::v8_flags.fuzzing) { + // Number is in range. + CHECK(in_range && "Number range should have been enforced"); + if (!std::isnan(real_arg)) { + CHECK_EQ(static_cast<IntegerT>(real_arg), checked_arg); + } } return true; } @@ -688,6 +690,10 @@ class FastCApiObject { HandleScope handle_scope(isolate); + if (i::v8_flags.fuzzing) { + args.GetReturnValue().Set(Boolean::New(isolate, false)); + return; + } double real_arg = 0; if (args.Length() > 1 && args[1]->IsNumber()) { real_arg = args[1]->NumberValue(isolate->GetCurrentContext()).FromJust(); @@ -710,6 +716,144 @@ class FastCApiObject { } } +#ifdef V8_USE_SIMULATOR_WITH_GENERIC_C_CALLS + static AnyCType ClampCompareI32Patch(AnyCType receiver, AnyCType in_range, + AnyCType real_arg, AnyCType checked_arg, + AnyCType options) { + AnyCType ret; + ret.double_value = ClampCompare<int32_t>( + receiver.object_value, in_range.bool_value, real_arg.double_value, + checked_arg.int32_value, *options.options_value); + return ret; + } + static AnyCType ClampCompareU32Patch(AnyCType receiver, AnyCType in_range, + AnyCType real_arg, AnyCType checked_arg, + AnyCType options) { + AnyCType ret; + ret.double_value = ClampCompare<uint32_t>( + receiver.object_value, in_range.bool_value, real_arg.double_value, + checked_arg.uint32_value, *options.options_value); + return ret; + } + static AnyCType ClampCompareI64Patch(AnyCType receiver, AnyCType in_range, + AnyCType real_arg, AnyCType checked_arg, + AnyCType options) { + AnyCType ret; + ret.double_value = ClampCompare<int64_t>( + receiver.object_value, in_range.bool_value, real_arg.double_value, + checked_arg.int64_value, *options.options_value); + return ret; + } + static AnyCType ClampCompareU64Patch(AnyCType receiver, AnyCType in_range, + AnyCType real_arg, AnyCType checked_arg, + AnyCType options) { + AnyCType ret; + ret.double_value = ClampCompare<uint64_t>( + receiver.object_value, in_range.bool_value, real_arg.double_value, + checked_arg.uint64_value, *options.options_value); + return ret; + } +#endif // V8_USE_SIMULATOR_WITH_GENERIC_C_CALLS + + template <typename IntegerT> + static double ClampCompareCompute(bool in_range, double real_arg, + IntegerT checked_arg) { + if (!in_range) { + IntegerT lower_bound = std::numeric_limits<IntegerT>::min(); + IntegerT upper_bound = std::numeric_limits<IntegerT>::max(); + if (lower_bound < internal::kMinSafeInteger) { + lower_bound = static_cast<IntegerT>(internal::kMinSafeInteger); + } + if (upper_bound > internal::kMaxSafeInteger) { + upper_bound = static_cast<IntegerT>(internal::kMaxSafeInteger); + } + CHECK(!std::isnan(real_arg)); + if (real_arg < static_cast<double>(lower_bound)) { + CHECK_EQ(lower_bound, checked_arg); + } else if (real_arg > static_cast<double>(upper_bound)) { + CHECK_EQ(upper_bound, checked_arg); + } else { + FATAL("Expected value to be out of range."); + } + } else if (!std::isnan(real_arg)) { + if (real_arg != checked_arg) { + // Check if rounding towards nearest even number happened. + double diff = std::fabs(real_arg - checked_arg); + CHECK_LE(diff, 0.5); + if (diff == 0) { + // Check if rounding towards nearest even number happened. + CHECK_EQ(0, checked_arg % 2); + } else if (checked_arg % 2 == 1) { + // Behave as if rounding towards nearest even number *has* + // happened (as it does on the fast path). + checked_arg += 1; + } + } else { + CHECK_EQ(static_cast<IntegerT>(real_arg), checked_arg); + } + } + return checked_arg; + } + + template <typename IntegerT> + static double ClampCompare(Local<Object> receiver, bool in_range, + double real_arg, IntegerT checked_arg, + FastApiCallbackOptions& options) { + FastCApiObject* self = UnwrapObject(receiver); + CHECK_NOT_NULL(self); + self->fast_call_count_++; + + double result = ClampCompareCompute(in_range, real_arg, checked_arg); + return static_cast<double>(result); + } + + template <typename IntegerT> + static bool IsInRange(double arg) { + return !std::isnan(arg) && + arg <= static_cast<double>(std::numeric_limits<IntegerT>::max()) && + arg >= static_cast<double>(std::numeric_limits<IntegerT>::min()); + } + + template <typename IntegerT> + static void ClampCompareSlowCallback( + const FunctionCallbackInfo<Value>& args) { + Isolate* isolate = args.GetIsolate(); + + FastCApiObject* self = UnwrapObject(args.This()); + CHECK_SELF_OR_THROW(); + self->slow_call_count_++; + + double real_arg = 0; + if (args.Length() > 1 && args[1]->IsNumber()) { + real_arg = args[1]->NumberValue(isolate->GetCurrentContext()).FromJust(); + } + double checked_arg_dbl = std::numeric_limits<double>::max(); + if (args.Length() > 2 && args[2]->IsNumber()) { + checked_arg_dbl = args[2].As<Number>()->Value(); + } + bool in_range = args[0]->IsBoolean() && args[0]->BooleanValue(isolate) && + IsInRange<IntegerT>(real_arg) && + IsInRange<IntegerT>(checked_arg_dbl); + + IntegerT checked_arg = std::numeric_limits<IntegerT>::max(); + if (in_range) { + if (checked_arg_dbl != std::numeric_limits<double>::max()) { + checked_arg = static_cast<IntegerT>(checked_arg_dbl); + } + double result = ClampCompareCompute(in_range, real_arg, checked_arg); + args.GetReturnValue().Set(Number::New(isolate, result)); + } else { + IntegerT clamped = std::numeric_limits<IntegerT>::max(); + if (std::isnan(checked_arg_dbl)) { + clamped = 0; + } else { + clamped = std::clamp(checked_arg, std::numeric_limits<IntegerT>::min(), + std::numeric_limits<IntegerT>::max()); + } + args.GetReturnValue().Set(Number::New(isolate, clamped)); + } + } + static bool IsFastCApiObjectFastCallback(v8::Local<v8::Object> receiver, bool should_fallback, v8::Local<v8::Value> arg, @@ -739,6 +883,7 @@ class FastCApiObject { ->GetTestApiObjectCtor() ->IsLeafTemplateForApiObject(object); } + static void IsFastCApiObjectSlowCallback( const FunctionCallbackInfo<Value>& args) { Isolate* isolate = args.GetIsolate(); @@ -776,6 +921,10 @@ class FastCApiObject { CHECK_SELF_OR_FALLBACK(false); self->fast_call_count_++; + if (i::v8_flags.fuzzing) { + return true; + } + CHECK_NOT_NULL(options.wasm_memory); uint8_t* memory = nullptr; CHECK(options.wasm_memory->getStorageIfAligned(&memory)); @@ -1066,6 +1215,8 @@ Local<FunctionTemplate> Shell::CreateTestFastCApiTemplate(Isolate* isolate) { signature, 1, ConstructorBehavior::kThrow, SideEffectType::kHasSideEffect, &add_all_annotate_c_func)); + // Testing enforce range annotation. + CFunction enforce_range_compare_i32_c_func = CFunctionBuilder() .Fn(FastCApiObject::EnforceRangeCompare<int32_t>) @@ -1126,6 +1277,68 @@ Local<FunctionTemplate> Shell::CreateTestFastCApiTemplate(Isolate* isolate) { Local<Value>(), signature, 1, ConstructorBehavior::kThrow, SideEffectType::kHasSideEffect, &enforce_range_compare_u64_c_func)); + // Testing clamp annotation. + + CFunction clamp_compare_i32_c_func = + CFunctionBuilder() + .Fn(FastCApiObject::ClampCompare<int32_t>) + .Arg<3, v8::CTypeInfo::Flags::kClampBit>() +#ifdef V8_USE_SIMULATOR_WITH_GENERIC_C_CALLS + .Patch(FastCApiObject::ClampCompareI32Patch) +#endif // V8_USE_SIMULATOR_WITH_GENERIC_C_CALLS + .Build(); + api_obj_ctor->PrototypeTemplate()->Set( + isolate, "clamp_compare_i32", + FunctionTemplate::New( + isolate, FastCApiObject::ClampCompareSlowCallback<int32_t>, + Local<Value>(), signature, 1, ConstructorBehavior::kThrow, + SideEffectType::kHasSideEffect, &clamp_compare_i32_c_func)); + + CFunction clamp_compare_u32_c_func = + CFunctionBuilder() + .Fn(FastCApiObject::ClampCompare<uint32_t>) + .Arg<3, v8::CTypeInfo::Flags::kClampBit>() +#ifdef V8_USE_SIMULATOR_WITH_GENERIC_C_CALLS + .Patch(FastCApiObject::ClampCompareU32Patch) +#endif // V8_USE_SIMULATOR_WITH_GENERIC_C_CALLS + .Build(); + api_obj_ctor->PrototypeTemplate()->Set( + isolate, "clamp_compare_u32", + FunctionTemplate::New( + isolate, FastCApiObject::ClampCompareSlowCallback<uint32_t>, + Local<Value>(), signature, 1, ConstructorBehavior::kThrow, + SideEffectType::kHasSideEffect, &clamp_compare_u32_c_func)); + + CFunction clamp_compare_i64_c_func = + CFunctionBuilder() + .Fn(FastCApiObject::ClampCompare<int64_t>) + .Arg<3, v8::CTypeInfo::Flags::kClampBit>() +#ifdef V8_USE_SIMULATOR_WITH_GENERIC_C_CALLS + .Patch(FastCApiObject::ClampCompareI64Patch) +#endif // V8_USE_SIMULATOR_WITH_GENERIC_C_CALLS + .Build(); + api_obj_ctor->PrototypeTemplate()->Set( + isolate, "clamp_compare_i64", + FunctionTemplate::New( + isolate, FastCApiObject::ClampCompareSlowCallback<int64_t>, + Local<Value>(), signature, 1, ConstructorBehavior::kThrow, + SideEffectType::kHasSideEffect, &clamp_compare_i64_c_func)); + + CFunction clamp_compare_u64_c_func = + CFunctionBuilder() + .Fn(FastCApiObject::ClampCompare<uint64_t>) + .Arg<3, v8::CTypeInfo::Flags::kClampBit>() +#ifdef V8_USE_SIMULATOR_WITH_GENERIC_C_CALLS + .Patch(FastCApiObject::ClampCompareU64Patch) +#endif // V8_USE_SIMULATOR_WITH_GENERIC_C_CALLS + .Build(); + api_obj_ctor->PrototypeTemplate()->Set( + isolate, "clamp_compare_u64", + FunctionTemplate::New( + isolate, FastCApiObject::ClampCompareSlowCallback<uint64_t>, + Local<Value>(), signature, 1, ConstructorBehavior::kThrow, + SideEffectType::kHasSideEffect, &clamp_compare_u64_c_func)); + CFunction is_valid_api_object_c_func = CFunction::Make(FastCApiObject::IsFastCApiObjectFastCallback); api_obj_ctor->PrototypeTemplate()->Set( diff --git a/deps/v8/src/d8/d8.cc b/deps/v8/src/d8/d8.cc index 8c46cc6a9f941c..37f7de888070ee 100644 --- a/deps/v8/src/d8/d8.cc +++ b/deps/v8/src/d8/d8.cc @@ -786,7 +786,7 @@ bool Shell::ExecuteString(Isolate* isolate, Local<String> source, ReportExceptions report_exceptions, ProcessMessageQueue process_message_queue) { i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate); - if (i::FLAG_parse_only) { + if (i::v8_flags.parse_only) { i::VMState<PARSER> state(i_isolate); i::Handle<i::String> str = Utils::OpenHandle(*(source)); @@ -796,8 +796,8 @@ bool Shell::ExecuteString(Isolate* isolate, Local<String> source, i::UnoptimizedCompileFlags flags = i::UnoptimizedCompileFlags::ForToplevelCompile( - i_isolate, true, i::construct_language_mode(i::FLAG_use_strict), - i::REPLMode::kNo, ScriptType::kClassic, i::FLAG_lazy); + i_isolate, true, i::construct_language_mode(i::v8_flags.use_strict), + i::REPLMode::kNo, ScriptType::kClassic, i::v8_flags.lazy); if (options.compile_options == v8::ScriptCompiler::kEagerCompile) { flags.set_is_eager(true); @@ -1573,7 +1573,7 @@ bool Shell::LoadJSON(Isolate* isolate, const char* file_name) { PerIsolateData::PerIsolateData(Isolate* isolate) : isolate_(isolate), realms_(nullptr) { isolate->SetData(0, this); - if (i::FLAG_expose_async_hooks) { + if (i::v8_flags.expose_async_hooks) { async_hooks_wrapper_ = new AsyncHooks(isolate); } ignore_unhandled_promises_ = false; @@ -1585,7 +1585,7 @@ PerIsolateData::PerIsolateData(Isolate* isolate) PerIsolateData::~PerIsolateData() { isolate_->SetData(0, nullptr); // Not really needed, just to be sure... - if (i::FLAG_expose_async_hooks) { + if (i::v8_flags.expose_async_hooks) { delete async_hooks_wrapper_; // This uses the isolate } #if defined(LEAK_SANITIZER) @@ -1748,16 +1748,148 @@ int PerIsolateData::RealmIndexOrThrow( return index; } -// performance.now() returns a time stamp as double, measured in milliseconds. -// When FLAG_verify_predictable mode is enabled it returns result of +// GetTimestamp() returns a time stamp as double, measured in milliseconds. +// When v8_flags.verify_predictable mode is enabled it returns result of // v8::Platform::MonotonicallyIncreasingTime(). -void Shell::PerformanceNow(const v8::FunctionCallbackInfo<v8::Value>& args) { - if (i::FLAG_verify_predictable) { - args.GetReturnValue().Set(g_platform->MonotonicallyIncreasingTime()); +double Shell::GetTimestamp() { + if (i::v8_flags.verify_predictable) { + return g_platform->MonotonicallyIncreasingTime(); } else { base::TimeDelta delta = base::TimeTicks::Now() - kInitialTicks; - args.GetReturnValue().Set(delta.InMillisecondsF()); + return delta.InMillisecondsF(); + } +} +int64_t Shell::GetTracingTimestampFromPerformanceTimestamp( + double performance_timestamp) { + // Don't use this in --verify-predictable mode, predictable timestamps don't + // work well with tracing. + DCHECK(!i::v8_flags.verify_predictable); + base::TimeDelta delta = + base::TimeDelta::FromMillisecondsD(performance_timestamp); + // See TracingController::CurrentTimestampMicroseconds(). + return (delta + kInitialTicks).ToInternalValue(); +} + +// performance.now() returns GetTimestamp(). +void Shell::PerformanceNow(const v8::FunctionCallbackInfo<v8::Value>& args) { + args.GetReturnValue().Set(GetTimestamp()); +} + +// performance.mark() records and returns a PerformanceEntry with the current +// timestamp. +void Shell::PerformanceMark(const v8::FunctionCallbackInfo<v8::Value>& args) { + Isolate* isolate = args.GetIsolate(); + Local<Context> context = isolate->GetCurrentContext(); + + if (args.Length() < 1 || !args[0]->IsString()) { + args.GetIsolate()->ThrowError("Invalid 'name' argument"); + return; + } + Local<String> name = args[0].As<String>(); + + double timestamp = GetTimestamp(); + + Local<Object> performance_entry = Object::New(isolate); + performance_entry + ->DefineOwnProperty(context, + String::NewFromUtf8Literal(isolate, "entryType"), + String::NewFromUtf8Literal(isolate, "mark"), ReadOnly) + .Check(); + performance_entry + ->DefineOwnProperty(context, String::NewFromUtf8Literal(isolate, "name"), + name, ReadOnly) + .Check(); + performance_entry + ->DefineOwnProperty(context, + String::NewFromUtf8Literal(isolate, "startTime"), + Number::New(isolate, timestamp), ReadOnly) + .Check(); + performance_entry + ->DefineOwnProperty(context, + String::NewFromUtf8Literal(isolate, "duration"), + Integer::New(isolate, 0), ReadOnly) + .Check(); + + args.GetReturnValue().Set(performance_entry); +} + +// performance.measure() records and returns a PerformanceEntry with a duration +// since a given mark, or since zero. +void Shell::PerformanceMeasure( + const v8::FunctionCallbackInfo<v8::Value>& args) { + Isolate* isolate = args.GetIsolate(); + Local<Context> context = isolate->GetCurrentContext(); + + if (args.Length() < 1 || !args[0]->IsString()) { + args.GetIsolate()->ThrowError("Invalid 'name' argument"); + return; } + v8::Local<String> name = args[0].As<String>(); + + double start_timestamp = 0; + if (args.Length() >= 2) { + Local<Value> start_mark = args[1].As<Value>(); + if (!start_mark->IsObject()) { + args.GetIsolate()->ThrowError( + "Invalid 'startMark' argument: Not an Object"); + return; + } + Local<Value> start_time_field; + if (!start_mark.As<Object>() + ->Get(context, String::NewFromUtf8Literal(isolate, "startTime")) + .ToLocal(&start_time_field)) { + return; + } + if (!start_time_field->IsNumber()) { + args.GetIsolate()->ThrowError( + "Invalid 'startMark' argument: No numeric 'startTime' field"); + return; + } + start_timestamp = start_time_field.As<Number>()->Value(); + } + if (args.Length() > 2) { + args.GetIsolate()->ThrowError("Too many arguments"); + return; + } + + double end_timestamp = GetTimestamp(); + + if (options.trace_enabled) { + size_t hash = base::hash_combine(name->GetIdentityHash(), start_timestamp, + end_timestamp); + + String::Utf8Value utf8(isolate, name); + TRACE_EVENT_COPY_NESTABLE_ASYNC_BEGIN_WITH_TIMESTAMP1( + "v8", *utf8, static_cast<uint64_t>(hash), + GetTracingTimestampFromPerformanceTimestamp(start_timestamp), + "startTime", start_timestamp); + TRACE_EVENT_COPY_NESTABLE_ASYNC_END_WITH_TIMESTAMP0( + "v8", *utf8, static_cast<uint64_t>(hash), + GetTracingTimestampFromPerformanceTimestamp(end_timestamp)); + } + + Local<Object> performance_entry = Object::New(isolate); + performance_entry + ->DefineOwnProperty( + context, String::NewFromUtf8Literal(isolate, "entryType"), + String::NewFromUtf8Literal(isolate, "measure"), ReadOnly) + .Check(); + performance_entry + ->DefineOwnProperty(context, String::NewFromUtf8Literal(isolate, "name"), + name, ReadOnly) + .Check(); + performance_entry + ->DefineOwnProperty(context, + String::NewFromUtf8Literal(isolate, "startTime"), + Number::New(isolate, start_timestamp), ReadOnly) + .Check(); + performance_entry + ->DefineOwnProperty( + context, String::NewFromUtf8Literal(isolate, "duration"), + Number::New(isolate, end_timestamp - start_timestamp), ReadOnly) + .Check(); + + args.GetReturnValue().Set(performance_entry); } // performance.measureMemory() implements JavaScript Memory API proposal. @@ -2308,7 +2440,7 @@ void Shell::DisableDebugger(const v8::FunctionCallbackInfo<v8::Value>& args) { void Shell::SetPromiseHooks(const v8::FunctionCallbackInfo<v8::Value>& args) { Isolate* isolate = args.GetIsolate(); - if (i::FLAG_correctness_fuzzer_suppressions) { + if (i::v8_flags.correctness_fuzzer_suppressions) { // Setting promise hoooks dynamically has unexpected timing side-effects // with certain promise optimizations. We might not get all callbacks for // previously scheduled Promises or optimized code-paths that skip Promise @@ -3046,15 +3178,6 @@ Local<String> Shell::Stringify(Isolate* isolate, Local<Value> value) { void Shell::NodeTypeCallback(const v8::FunctionCallbackInfo<v8::Value>& args) { v8::Isolate* isolate = args.GetIsolate(); - // HasInstance does a slow prototype chain lookup, and this function is used - // for micro benchmarks too. -#ifdef DEBUG - PerIsolateData* data = PerIsolateData::Get(isolate); - if (!data->GetDomNodeCtor()->HasInstance(args.This())) { - isolate->ThrowError("Calling .nodeType on wrong instance type."); - } -#endif - args.GetReturnValue().Set(v8::Number::New(isolate, 1)); } @@ -3177,7 +3300,7 @@ Local<ObjectTemplate> Shell::CreateGlobalTemplate(Isolate* isolate) { global_template->Set(isolate, "Worker", Shell::CreateWorkerTemplate(isolate)); // Prevent fuzzers from creating side effects. - if (!i::FLAG_fuzzing) { + if (!i::v8_flags.fuzzing) { global_template->Set(isolate, "os", Shell::CreateOSTemplate(isolate)); } global_template->Set(isolate, "d8", Shell::CreateD8Template(isolate)); @@ -3189,7 +3312,7 @@ Local<ObjectTemplate> Shell::CreateGlobalTemplate(Isolate* isolate) { FunctionTemplate::New(isolate, Fuzzilli), PropertyAttribute::DontEnum); #endif // V8_FUZZILLI - if (i::FLAG_expose_async_hooks) { + if (i::v8_flags.expose_async_hooks) { global_template->Set(isolate, "async_hooks", Shell::CreateAsyncHookTemplate(isolate)); } @@ -3281,6 +3404,10 @@ Local<ObjectTemplate> Shell::CreatePerformanceTemplate(Isolate* isolate) { Local<ObjectTemplate> performance_template = ObjectTemplate::New(isolate); performance_template->Set(isolate, "now", FunctionTemplate::New(isolate, PerformanceNow)); + performance_template->Set(isolate, "mark", + FunctionTemplate::New(isolate, PerformanceMark)); + performance_template->Set(isolate, "measure", + FunctionTemplate::New(isolate, PerformanceMeasure)); performance_template->Set( isolate, "measureMemory", FunctionTemplate::New(isolate, PerformanceMeasureMemory)); @@ -3357,7 +3484,7 @@ Local<ObjectTemplate> Shell::CreateD8Template(Isolate* isolate) { Local<ObjectTemplate> test_template = ObjectTemplate::New(isolate); // For different runs of correctness fuzzing the bytecode of a function // might get flushed, resulting in spurious errors. - if (!i::FLAG_correctness_fuzzer_suppressions) { + if (!i::v8_flags.correctness_fuzzer_suppressions) { test_template->Set( isolate, "verifySourcePositions", FunctionTemplate::New(isolate, TestVerifySourcePositions)); @@ -3365,8 +3492,8 @@ Local<ObjectTemplate> Shell::CreateD8Template(Isolate* isolate) { // Correctness fuzzing will attempt to compare results of tests with and // without turbo_fast_api_calls, so we don't expose the fast_c_api // constructor when --correctness_fuzzer_suppressions is on. - if (options.expose_fast_api && i::FLAG_turbo_fast_api_calls && - !i::FLAG_correctness_fuzzer_suppressions) { + if (options.expose_fast_api && i::v8_flags.turbo_fast_api_calls && + !i::v8_flags.correctness_fuzzer_suppressions) { test_template->Set(isolate, "FastCAPI", Shell::CreateTestFastCApiTemplate(isolate)); test_template->Set(isolate, "LeafInterfaceType", @@ -3496,8 +3623,8 @@ void Shell::Initialize(Isolate* isolate, D8Console* console, isolate->SetPromiseRejectCallback(PromiseRejectCallback); if (isOnMainThread) { // Set up counters - if (i::FLAG_map_counters[0] != '\0') { - MapCounters(isolate, i::FLAG_map_counters); + if (i::v8_flags.map_counters[0] != '\0') { + MapCounters(isolate, i::v8_flags.map_counters); } // Disable default message reporting. isolate->AddMessageListenerWithErrorLevel( @@ -3559,7 +3686,8 @@ Local<Context> Shell::CreateEvaluationContext(Isolate* isolate) { Local<Context> context = Context::New(isolate, nullptr, global_template); DCHECK_IMPLIES(context.IsEmpty(), isolate->IsExecutionTerminating()); if (context.IsEmpty()) return {}; - if (i::FLAG_perf_prof_annotate_wasm || i::FLAG_vtune_prof_annotate_wasm) { + if (i::v8_flags.perf_prof_annotate_wasm || + i::v8_flags.vtune_prof_annotate_wasm) { isolate->SetWasmLoadSourceMapCallback(Shell::WasmLoadSourceMapCallback); } InitializeModuleEmbedderData(context); @@ -3600,7 +3728,7 @@ void Shell::WriteIgnitionDispatchCountersFile(v8::Isolate* isolate) { ->interpreter() ->GetDispatchCountersObject(); std::ofstream dispatch_counters_stream( - i::FLAG_trace_ignition_dispatches_output_file); + i::v8_flags.trace_ignition_dispatches_output_file); dispatch_counters_stream << *String::Utf8Value( isolate, JSON::Stringify(context, Utils::ToLocal(dispatch_counters)) .ToLocalChecked()); @@ -3777,7 +3905,7 @@ void Dummy(char* arg) {} V8_NOINLINE void FuzzerMonitor::SimulateErrors() { // Initialize a fresh RNG to not interfere with JS execution. std::unique_ptr<base::RandomNumberGenerator> rng; - int64_t seed = internal::FLAG_random_seed; + int64_t seed = i::v8_flags.random_seed; if (seed != 0) { rng = std::make_unique<base::RandomNumberGenerator>(seed); } else { @@ -3944,7 +4072,7 @@ MaybeLocal<String> Shell::ReadFile(Isolate* isolate, const char* name, int size = static_cast<int>(file->size()); char* chars = static_cast<char*>(file->memory()); - if (i::FLAG_use_external_strings && i::String::IsAscii(chars, size)) { + if (i::v8_flags.use_external_strings && i::String::IsAscii(chars, size)) { String::ExternalOneByteStringResource* resource = new ExternalOwningOneByteStringResource(std::move(file)); return String::NewExternalOneByte(isolate, resource); @@ -4721,11 +4849,11 @@ bool Shell::SetOptions(int argc, char* argv[]) { options.no_fail = true; argv[i] = nullptr; } else if (strcmp(argv[i], "--dump-counters") == 0) { - i::FLAG_slow_histograms = true; + i::v8_flags.slow_histograms = true; options.dump_counters = true; argv[i] = nullptr; } else if (strcmp(argv[i], "--dump-counters-nvp") == 0) { - i::FLAG_slow_histograms = true; + i::v8_flags.slow_histograms = true; options.dump_counters_nvp = true; argv[i] = nullptr; } else if (strncmp(argv[i], "--icu-data-file=", 16) == 0) { @@ -4846,7 +4974,7 @@ bool Shell::SetOptions(int argc, char* argv[]) { } else if (strcmp(argv[i], "--enable-etw-stack-walking") == 0) { options.enable_etw_stack_walking = true; // This needs to be manually triggered for JIT ETW events to work. - i::FLAG_enable_etw_stack_walking = true; + i::v8_flags.enable_etw_stack_walking = true; #if defined(V8_ENABLE_SYSTEM_INSTRUMENTATION) } else if (strcmp(argv[i], "--enable-system-instrumentation") == 0) { options.enable_system_instrumentation = true; @@ -4855,7 +4983,7 @@ bool Shell::SetOptions(int argc, char* argv[]) { #if defined(V8_OS_WIN) // Guard this bc the flag has a lot of overhead and is not currently used // by macos - i::FLAG_interpreted_frames_native_stack = true; + i::v8_flags.interpreted_frames_native_stack = true; #endif argv[i] = nullptr; #endif @@ -4905,17 +5033,17 @@ bool Shell::SetOptions(int argc, char* argv[]) { " --module execute a file as a JavaScript module\n" " --web-snapshot execute a file as a web snapshot\n\n"; using HelpOptions = i::FlagList::HelpOptions; - i::FLAG_abort_on_contradictory_flags = true; + i::v8_flags.abort_on_contradictory_flags = true; i::FlagList::SetFlagsFromCommandLine(&argc, argv, true, HelpOptions(HelpOptions::kExit, usage)); - options.mock_arraybuffer_allocator = i::FLAG_mock_arraybuffer_allocator; + options.mock_arraybuffer_allocator = i::v8_flags.mock_arraybuffer_allocator; options.mock_arraybuffer_allocator_limit = - i::FLAG_mock_arraybuffer_allocator_limit; + i::v8_flags.mock_arraybuffer_allocator_limit; #if MULTI_MAPPED_ALLOCATOR_AVAILABLE - options.multi_mapped_mock_allocator = i::FLAG_multi_mapped_mock_allocator; + options.multi_mapped_mock_allocator = i::v8_flags.multi_mapped_mock_allocator; #endif - if (i::FLAG_stress_snapshot && options.expose_fast_api && + if (i::v8_flags.stress_snapshot && options.expose_fast_api && check_d8_flag_contradictions) { FATAL("Flag --expose-fast-api is incompatible with --stress-snapshot."); } @@ -4935,7 +5063,7 @@ bool Shell::SetOptions(int argc, char* argv[]) { strcmp(str, "--json") == 0) { // Pass on to SourceGroup, which understands these options. } else if (strncmp(str, "--", 2) == 0) { - if (!i::FLAG_correctness_fuzzer_suppressions) { + if (!i::v8_flags.correctness_fuzzer_suppressions) { printf("Warning: unknown flag %s.\nTry --help for options\n", str); } } else if (strcmp(str, "-e") == 0 && i + 1 < argc) { @@ -4980,7 +5108,7 @@ int Shell::RunMain(Isolate* isolate, bool last_run) { if (!CompleteMessageLoop(isolate)) success = false; } WriteLcovData(isolate, options.lcov_file); - if (last_run && i::FLAG_stress_snapshot) { + if (last_run && i::v8_flags.stress_snapshot) { static constexpr bool kClearRecompilableData = true; i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate); i::Handle<i::Context> i_context = Utils::OpenHandle(*context); @@ -5089,7 +5217,7 @@ bool ProcessMessages( // task queue of the {kProcessGlobalPredictablePlatformWorkerTaskQueue} // isolate. We execute all background tasks after running one foreground // task. - if (i::FLAG_verify_predictable) { + if (i::v8_flags.verify_predictable) { while (v8::platform::PumpMessageLoop( g_default_platform, kProcessGlobalPredictablePlatformWorkerTaskQueue, @@ -5120,7 +5248,7 @@ bool Shell::CompleteMessageLoop(Isolate* isolate) { return should_wait ? platform::MessageLoopBehavior::kWaitForWork : platform::MessageLoopBehavior::kDoNotWait; }; - if (i::FLAG_verify_predictable) { + if (i::v8_flags.verify_predictable) { bool ran_tasks = ProcessMessages( isolate, [] { return platform::MessageLoopBehavior::kDoNotWait; }); if (get_waiting_behaviour() == @@ -5392,7 +5520,7 @@ class D8Testing { * stress coverage. */ static int GetStressRuns() { - if (internal::FLAG_stress_runs != 0) return internal::FLAG_stress_runs; + if (i::v8_flags.stress_runs != 0) return i::v8_flags.stress_runs; #ifdef DEBUG // In debug mode the code runs much slower so stressing will only make two // runs. @@ -5480,7 +5608,7 @@ void d8_sigterm_handler(int signal, siginfo_t* info, void* context) { void d8_install_sigterm_handler() { #ifdef V8_OS_POSIX - CHECK(!i::FLAG_fuzzing); + CHECK(!i::v8_flags.fuzzing); struct sigaction sa; sa.sa_sigaction = d8_sigterm_handler; sigemptyset(&sa.sa_mask); @@ -5496,7 +5624,7 @@ void d8_install_sigterm_handler() { int Shell::Main(int argc, char* argv[]) { v8::base::EnsureConsoleOutput(); if (!SetOptions(argc, argv)) return 1; - if (!i::FLAG_fuzzing) d8_install_sigterm_handler(); + if (!i::v8_flags.fuzzing) d8_install_sigterm_handler(); v8::V8::InitializeICUDefaultLocation(argv[0], options.icu_data_file); @@ -5515,7 +5643,7 @@ int Shell::Main(int argc, char* argv[]) { std::ofstream trace_file; std::unique_ptr<platform::tracing::TracingController> tracing; - if (options.trace_enabled && !i::FLAG_verify_predictable) { + if (options.trace_enabled && !i::v8_flags.verify_predictable) { tracing = std::make_unique<platform::tracing::TracingController>(); if (!options.enable_etw_stack_walking) { @@ -5564,29 +5692,29 @@ int Shell::Main(int argc, char* argv[]) { options.thread_pool_size, v8::platform::IdleTaskSupport::kEnabled, in_process_stack_dumping, std::move(tracing)); g_default_platform = g_platform.get(); - if (i::FLAG_predictable) { + if (i::v8_flags.predictable) { g_platform = MakePredictablePlatform(std::move(g_platform)); } if (options.stress_delay_tasks) { - int64_t random_seed = i::FLAG_fuzzer_random_seed; - if (!random_seed) random_seed = i::FLAG_random_seed; + int64_t random_seed = i::v8_flags.fuzzer_random_seed; + if (!random_seed) random_seed = i::v8_flags.random_seed; // If random_seed is still 0 here, the {DelayedTasksPlatform} will choose a // random seed. g_platform = MakeDelayedTasksPlatform(std::move(g_platform), random_seed); } - if (i::FLAG_trace_turbo_cfg_file == nullptr) { + if (i::v8_flags.trace_turbo_cfg_file == nullptr) { V8::SetFlagsFromString("--trace-turbo-cfg-file=turbo.cfg"); } - if (i::FLAG_redirect_code_traces_to == nullptr) { + if (i::v8_flags.redirect_code_traces_to == nullptr) { V8::SetFlagsFromString("--redirect-code-traces-to=code.asm"); } v8::V8::InitializePlatform(g_platform.get()); // Disable flag freezing if we are producing a code cache, because for that we - // modify FLAG_hash_seed (below). + // modify v8_flags.hash_seed (below). if (options.code_cache_options != ShellOptions::kNoProduceCache) { - i::FLAG_freeze_flags_after_init = false; + i::v8_flags.freeze_flags_after_init = false; } v8::V8::Initialize(); @@ -5623,7 +5751,7 @@ int Shell::Main(int argc, char* argv[]) { } create_params.array_buffer_allocator = Shell::array_buffer_allocator; #ifdef ENABLE_VTUNE_JIT_INTERFACE - if (i::FLAG_enable_vtunejit) { + if (i::v8_flags.enable_vtunejit) { create_params.code_event_handler = vTune::GetVtuneCodeEventHandler(); } #endif @@ -5705,8 +5833,8 @@ int Shell::Main(int argc, char* argv[]) { CpuProfilingOptions{}); } - if (i::FLAG_stress_runs > 0) { - options.stress_runs = i::FLAG_stress_runs; + if (i::v8_flags.stress_runs > 0) { + options.stress_runs = i::v8_flags.stress_runs; for (int i = 0; i < options.stress_runs && result == 0; i++) { printf("============ Run %d/%d ============\n", i + 1, options.stress_runs.get()); @@ -5725,10 +5853,10 @@ int Shell::Main(int argc, char* argv[]) { Isolate::CreateParams create_params2; create_params2.array_buffer_allocator = Shell::array_buffer_allocator; // Use a different hash seed. - i::FLAG_hash_seed = i::FLAG_hash_seed ^ 1337; + i::v8_flags.hash_seed = i::v8_flags.hash_seed ^ 1337; Isolate* isolate2 = Isolate::New(create_params2); // Restore old hash seed. - i::FLAG_hash_seed = i::FLAG_hash_seed ^ 1337; + i::v8_flags.hash_seed = i::v8_flags.hash_seed ^ 1337; { D8Console console2(isolate2); Initialize(isolate2, &console2); @@ -5764,7 +5892,7 @@ int Shell::Main(int argc, char* argv[]) { RunShell(isolate); } - if (i::FLAG_trace_ignition_dispatches_output_file != nullptr) { + if (i::v8_flags.trace_ignition_dispatches_output_file != nullptr) { WriteIgnitionDispatchCountersFile(isolate); } diff --git a/deps/v8/src/d8/d8.h b/deps/v8/src/d8/d8.h index 00e71a12e01ad9..3cfa31322dbc02 100644 --- a/deps/v8/src/d8/d8.h +++ b/deps/v8/src/d8/d8.h @@ -540,7 +540,14 @@ class Shell : public i::AllStatic { static void AddHistogramSample(void* histogram, int sample); static void MapCounters(v8::Isolate* isolate, const char* name); + static double GetTimestamp(); + static int64_t GetTracingTimestampFromPerformanceTimestamp( + double performance_timestamp); + static void PerformanceNow(const v8::FunctionCallbackInfo<v8::Value>& args); + static void PerformanceMark(const v8::FunctionCallbackInfo<v8::Value>& args); + static void PerformanceMeasure( + const v8::FunctionCallbackInfo<v8::Value>& args); static void PerformanceMeasureMemory( const v8::FunctionCallbackInfo<v8::Value>& args); diff --git a/deps/v8/src/date/date.cc b/deps/v8/src/date/date.cc index c4742aab30f1d7..8ffeee0d1a2f83 100644 --- a/deps/v8/src/date/date.cc +++ b/deps/v8/src/date/date.cc @@ -53,7 +53,7 @@ void DateCache::ResetDateCache( after_ = &dst_[1]; ymd_valid_ = false; #ifdef V8_INTL_SUPPORT - if (!FLAG_icu_timezone_data) { + if (!v8_flags.icu_timezone_data) { #endif local_offset_ms_ = kInvalidLocalOffsetInMs; #ifdef V8_INTL_SUPPORT @@ -215,7 +215,7 @@ void DateCache::BreakDownTime(int64_t time_ms, int* year, int* month, int* day, int DateCache::GetLocalOffsetFromOS(int64_t time_ms, bool is_utc) { double offset; #ifdef V8_INTL_SUPPORT - if (FLAG_icu_timezone_data) { + if (v8_flags.icu_timezone_data) { offset = tz_cache_->LocalTimeOffset(static_cast<double>(time_ms), is_utc); } else { #endif diff --git a/deps/v8/src/debug/debug-coverage.cc b/deps/v8/src/debug/debug-coverage.cc index 70fdb41f3bbc41..c434cede657989 100644 --- a/deps/v8/src/debug/debug-coverage.cc +++ b/deps/v8/src/debug/debug-coverage.cc @@ -478,7 +478,7 @@ void CollectBlockCoverage(CoverageFunction* function, SharedFunctionInfo info, void PrintBlockCoverage(const CoverageFunction* function, SharedFunctionInfo info, bool has_nonempty_source_range, bool function_is_relevant) { - DCHECK(FLAG_trace_block_coverage); + DCHECK(v8_flags.trace_block_coverage); std::unique_ptr<char[]> function_name = function->name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL); i::PrintF( @@ -543,10 +543,10 @@ void CollectAndMaybeResetCounts(Isolate* isolate, count = static_cast<uint32_t>(func.feedback_vector().invocation_count()); } else if (func.raw_feedback_cell().interrupt_budget() < - FLAG_interrupt_budget_for_feedback_allocation) { + v8_flags.interrupt_budget_for_feedback_allocation) { // TODO(jgruber): The condition above is no longer precise since we // may use either the fixed interrupt_budget or - // FLAG_interrupt_budget_factor_for_feedback_allocation. If the + // v8_flags.interrupt_budget_factor_for_feedback_allocation. If the // latter, we may incorrectly set a count of 1. // // We haven't allocated feedback vector, but executed the function @@ -605,9 +605,8 @@ std::unique_ptr<Coverage> Coverage::CollectPrecise(Isolate* isolate) { DCHECK(!isolate->is_best_effort_code_coverage()); std::unique_ptr<Coverage> result = Collect(isolate, isolate->code_coverage_mode()); - if (!isolate->is_collecting_type_profile() && - (isolate->is_precise_binary_code_coverage() || - isolate->is_block_binary_code_coverage())) { + if (isolate->is_precise_binary_code_coverage() || + isolate->is_block_binary_code_coverage()) { // We do not have to hold onto feedback vectors for invocations we already // reported. So we can reset the list. isolate->SetFeedbackVectorsForProfilingTools( @@ -735,7 +734,7 @@ std::unique_ptr<Coverage> Coverage::Collect( functions->emplace_back(function); } - if (FLAG_trace_block_coverage) { + if (v8_flags.trace_block_coverage) { PrintBlockCoverage(&function, *info, has_nonempty_source_range, function_is_relevant); } @@ -766,10 +765,8 @@ void Coverage::SelectMode(Isolate* isolate, debug::CoverageMode mode) { // following coverage recording (without reloads) will be at function // granularity. isolate->debug()->RemoveAllCoverageInfos(); - if (!isolate->is_collecting_type_profile()) { - isolate->SetFeedbackVectorsForProfilingTools( - ReadOnlyRoots(isolate).undefined_value()); - } + isolate->SetFeedbackVectorsForProfilingTools( + ReadOnlyRoots(isolate).undefined_value()); break; case debug::CoverageMode::kBlockBinary: case debug::CoverageMode::kBlockCount: diff --git a/deps/v8/src/debug/debug-evaluate.cc b/deps/v8/src/debug/debug-evaluate.cc index 65bdfa241a1a0a..2813456b12636a 100644 --- a/deps/v8/src/debug/debug-evaluate.cc +++ b/deps/v8/src/debug/debug-evaluate.cc @@ -19,6 +19,7 @@ #include "src/interpreter/bytecodes.h" #include "src/objects/code-inl.h" #include "src/objects/contexts.h" +#include "src/objects/string-set-inl.h" #if V8_ENABLE_WEBASSEMBLY #include "src/debug/debug-wasm-objects.h" @@ -210,7 +211,9 @@ DebugEvaluate::ContextBuilder::ContextBuilder(Isolate* isolate, : isolate_(isolate), frame_inspector_(frame, inlined_jsframe_index, isolate), scope_iterator_(isolate, &frame_inspector_, - ScopeIterator::ReparseStrategy::kScript) { + v8_flags.experimental_reuse_locals_blocklists + ? ScopeIterator::ReparseStrategy::kScriptIfNeeded + : ScopeIterator::ReparseStrategy::kScript) { Handle<Context> outer_context(frame_inspector_.GetFunction()->context(), isolate); evaluation_context_ = outer_context; @@ -246,8 +249,16 @@ DebugEvaluate::ContextBuilder::ContextBuilder(Isolate* isolate, if (scope_iterator_.HasContext()) { context_chain_element.wrapped_context = scope_iterator_.CurrentContext(); } - if (!scope_iterator_.InInnerScope()) { - context_chain_element.blocklist = scope_iterator_.GetLocals(); + if (v8_flags.experimental_reuse_locals_blocklists) { + // With the re-use experiment we only need `DebugEvaluateContexts` up + // to (and including) the paused function scope so the evaluated + // expression can access the materialized stack locals. + if (!scope_iterator_.InInnerScope()) break; + } else { + CHECK(!v8_flags.experimental_reuse_locals_blocklists); + if (!scope_iterator_.InInnerScope()) { + context_chain_element.blocklist = scope_iterator_.GetLocals(); + } } context_chain_.push_back(context_chain_element); } @@ -261,10 +272,28 @@ DebugEvaluate::ContextBuilder::ContextBuilder(Isolate* isolate, ContextChainElement element = *rit; scope_info = ScopeInfo::CreateForWithScope(isolate, scope_info); scope_info->SetIsDebugEvaluateScope(); - if (!element.blocklist.is_null()) { + + if (v8_flags.experimental_reuse_locals_blocklists) { + if (rit == context_chain_.rbegin()) { + // The DebugEvaluateContext we create for the closure scope is the only + // DebugEvaluateContext with a block list. This means we'll retrieve + // the existing block list from the paused function scope + // and also associate the temporary scope_info we create here with that + // blocklist. + Handle<ScopeInfo> function_scope_info = handle( + frame_inspector_.GetFunction()->shared().scope_info(), isolate_); + Handle<Object> block_list = handle( + isolate_->LocalsBlockListCacheGet(function_scope_info), isolate_); + CHECK(block_list->IsStringSet()); + isolate_->LocalsBlockListCacheSet(scope_info, Handle<ScopeInfo>::null(), + Handle<StringSet>::cast(block_list)); + } + } else if (!element.blocklist.is_null()) { + CHECK(!v8_flags.experimental_reuse_locals_blocklists); scope_info = ScopeInfo::RecreateWithBlockList(isolate, scope_info, element.blocklist); } + evaluation_context_ = factory->NewDebugEvaluateContext( evaluation_context_, scope_info, element.materialized_object, element.wrapped_context); @@ -407,7 +436,7 @@ bool DebugEvaluate::IsSideEffectFreeIntrinsic(Runtime::FunctionId id) { INLINE_INTRINSIC_ALLOWLIST(INLINE_CASE) return true; default: - if (FLAG_trace_side_effect_free_debug_evaluate) { + if (v8_flags.trace_side_effect_free_debug_evaluate) { PrintF("[debug-evaluate] intrinsic %s may cause side effect.\n", Runtime::FunctionForId(id)->name); } @@ -573,6 +602,8 @@ DebugInfo::SideEffectState BuiltinGetSideEffectState(Builtin id) { case Builtin::kArrayPrototypeFlat: case Builtin::kArrayPrototypeFlatMap: case Builtin::kArrayPrototypeJoin: + case Builtin::kArrayPrototypeGroup: + case Builtin::kArrayPrototypeGroupToMap: case Builtin::kArrayPrototypeKeys: case Builtin::kArrayPrototypeLastIndexOf: case Builtin::kArrayPrototypeSlice: @@ -975,7 +1006,7 @@ DebugInfo::SideEffectState BuiltinGetSideEffectState(Builtin id) { return DebugInfo::kRequiresRuntimeChecks; default: - if (FLAG_trace_side_effect_free_debug_evaluate) { + if (v8_flags.trace_side_effect_free_debug_evaluate) { PrintF("[debug-evaluate] built-in %s may cause side effect.\n", Builtins::name(id)); } @@ -1003,7 +1034,7 @@ bool BytecodeRequiresRuntimeCheck(interpreter::Bytecode bytecode) { // static DebugInfo::SideEffectState DebugEvaluate::FunctionGetSideEffectState( Isolate* isolate, Handle<SharedFunctionInfo> info) { - if (FLAG_trace_side_effect_free_debug_evaluate) { + if (v8_flags.trace_side_effect_free_debug_evaluate) { PrintF("[debug-evaluate] Checking function %s for side effect.\n", info->DebugNameCStr().get()); } @@ -1014,7 +1045,7 @@ DebugInfo::SideEffectState DebugEvaluate::FunctionGetSideEffectState( // Check bytecodes against allowlist. Handle<BytecodeArray> bytecode_array(info->GetBytecodeArray(isolate), isolate); - if (FLAG_trace_side_effect_free_debug_evaluate) { + if (v8_flags.trace_side_effect_free_debug_evaluate) { bytecode_array->Print(); } bool requires_runtime_checks = false; @@ -1027,7 +1058,7 @@ DebugInfo::SideEffectState DebugEvaluate::FunctionGetSideEffectState( continue; } - if (FLAG_trace_side_effect_free_debug_evaluate) { + if (v8_flags.trace_side_effect_free_debug_evaluate) { PrintF("[debug-evaluate] bytecode %s may cause side effect.\n", interpreter::Bytecodes::ToString(bytecode)); } diff --git a/deps/v8/src/debug/debug-interface.cc b/deps/v8/src/debug/debug-interface.cc index 015fb4cec5d4f6..510e874b1a02f4 100644 --- a/deps/v8/src/debug/debug-interface.cc +++ b/deps/v8/src/debug/debug-interface.cc @@ -14,7 +14,6 @@ #include "src/debug/debug-evaluate.h" #include "src/debug/debug-property-iterator.h" #include "src/debug/debug-stack-trace-iterator.h" -#include "src/debug/debug-type-profile.h" #include "src/debug/debug.h" #include "src/execution/vm-state-inl.h" #include "src/heap/heap.h" @@ -948,8 +947,8 @@ MaybeLocal<UnboundScript> CompileInspectorScript(Isolate* v8_isolate, isolate, str, i::ScriptDetails(), cached_data, ScriptCompiler::kNoCompileOptions, ScriptCompiler::kNoCacheBecauseInspector, - i::FLAG_expose_inspector_scripts ? i::NOT_NATIVES_CODE - : i::INSPECTOR_CODE); + i::v8_flags.expose_inspector_scripts ? i::NOT_NATIVES_CODE + : i::INSPECTOR_CODE); has_pending_exception = !maybe_function_info.ToHandle(&result); RETURN_ON_FAILED_EXECUTION(UnboundScript); } @@ -1317,48 +1316,6 @@ void Coverage::SelectMode(Isolate* isolate, CoverageMode mode) { i::Coverage::SelectMode(reinterpret_cast<i::Isolate*>(isolate), mode); } -int TypeProfile::Entry::SourcePosition() const { return entry_->position; } - -std::vector<MaybeLocal<String>> TypeProfile::Entry::Types() const { - std::vector<MaybeLocal<String>> result; - for (const internal::Handle<internal::String>& type : entry_->types) { - result.emplace_back(ToApiHandle<String>(type)); - } - return result; -} - -TypeProfile::ScriptData::ScriptData( - size_t index, std::shared_ptr<i::TypeProfile> type_profile) - : script_(&type_profile->at(index)), - type_profile_(std::move(type_profile)) {} - -Local<Script> TypeProfile::ScriptData::GetScript() const { - return ToApiHandle<Script>(script_->script); -} - -std::vector<TypeProfile::Entry> TypeProfile::ScriptData::Entries() const { - std::vector<TypeProfile::Entry> result; - for (const internal::TypeProfileEntry& entry : script_->entries) { - result.push_back(TypeProfile::Entry(&entry, type_profile_)); - } - return result; -} - -TypeProfile TypeProfile::Collect(Isolate* isolate) { - return TypeProfile( - i::TypeProfile::Collect(reinterpret_cast<i::Isolate*>(isolate))); -} - -void TypeProfile::SelectMode(Isolate* isolate, TypeProfileMode mode) { - i::TypeProfile::SelectMode(reinterpret_cast<i::Isolate*>(isolate), mode); -} - -size_t TypeProfile::ScriptCount() const { return type_profile_->size(); } - -TypeProfile::ScriptData TypeProfile::GetScriptData(size_t i) const { - return ScriptData(i, type_profile_); -} - MaybeLocal<v8::Value> EphemeronTable::Get(v8::Isolate* isolate, v8::Local<v8::Value> key) { i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate); @@ -1431,7 +1388,11 @@ MaybeLocal<Message> GetMessageFromPromise(Local<Promise> p) { } bool isExperimentalAsyncStackTaggingApiEnabled() { - return v8::internal::FLAG_experimental_async_stack_tagging_api; + return i::v8_flags.experimental_async_stack_tagging_api; +} + +bool isExperimentalRemoveInternalScopesPropertyEnabled() { + return i::v8_flags.experimental_remove_internal_scopes_property; } void RecordAsyncStackTaggingCreateTaskCall(v8::Isolate* v8_isolate) { diff --git a/deps/v8/src/debug/debug-interface.h b/deps/v8/src/debug/debug-interface.h index 3f85bb60224718..b67d61f8363ae6 100644 --- a/deps/v8/src/debug/debug-interface.h +++ b/deps/v8/src/debug/debug-interface.h @@ -34,13 +34,10 @@ namespace internal { struct CoverageBlock; struct CoverageFunction; struct CoverageScript; -struct TypeProfileEntry; -struct TypeProfileScript; class Coverage; class DisableBreak; class PostponeInterruptsScope; class Script; -class TypeProfile; } // namespace internal namespace debug { @@ -438,64 +435,6 @@ class V8_EXPORT_PRIVATE Coverage { std::shared_ptr<i::Coverage> coverage_; }; -/* - * Provide API layer between inspector and type profile. - */ -class V8_EXPORT_PRIVATE TypeProfile { - public: - MOVE_ONLY_NO_DEFAULT_CONSTRUCTOR(TypeProfile); - - class ScriptData; // Forward declaration. - - class V8_EXPORT_PRIVATE Entry { - public: - MOVE_ONLY_NO_DEFAULT_CONSTRUCTOR(Entry); - - int SourcePosition() const; - std::vector<MaybeLocal<String>> Types() const; - - private: - explicit Entry(const i::TypeProfileEntry* entry, - std::shared_ptr<i::TypeProfile> type_profile) - : entry_(entry), type_profile_(std::move(type_profile)) {} - - const i::TypeProfileEntry* entry_; - std::shared_ptr<i::TypeProfile> type_profile_; - - friend class v8::debug::TypeProfile::ScriptData; - }; - - class V8_EXPORT_PRIVATE ScriptData { - public: - MOVE_ONLY_NO_DEFAULT_CONSTRUCTOR(ScriptData); - - Local<debug::Script> GetScript() const; - std::vector<Entry> Entries() const; - - private: - explicit ScriptData(size_t index, - std::shared_ptr<i::TypeProfile> type_profile); - - i::TypeProfileScript* script_; - std::shared_ptr<i::TypeProfile> type_profile_; - - friend class v8::debug::TypeProfile; - }; - - static TypeProfile Collect(Isolate* isolate); - - static void SelectMode(Isolate* isolate, TypeProfileMode mode); - - size_t ScriptCount() const; - ScriptData GetScriptData(size_t i) const; - - private: - explicit TypeProfile(std::shared_ptr<i::TypeProfile> type_profile) - : type_profile_(std::move(type_profile)) {} - - std::shared_ptr<i::TypeProfile> type_profile_; -}; - class V8_EXPORT_PRIVATE ScopeIterator { public: static std::unique_ptr<ScopeIterator> CreateForFunction( @@ -732,6 +671,7 @@ AccessorPair* AccessorPair::Cast(v8::Value* value) { MaybeLocal<Message> GetMessageFromPromise(Local<Promise> promise); bool isExperimentalAsyncStackTaggingApiEnabled(); +bool isExperimentalRemoveInternalScopesPropertyEnabled(); void RecordAsyncStackTaggingCreateTaskCall(v8::Isolate* isolate); diff --git a/deps/v8/src/debug/debug-scopes.cc b/deps/v8/src/debug/debug-scopes.cc index 3bb08e8c2ac490..03caab3900d1bf 100644 --- a/deps/v8/src/debug/debug-scopes.cc +++ b/deps/v8/src/debug/debug-scopes.cc @@ -220,6 +220,14 @@ void ScopeIterator::TryParseAndRetrieveScopes(ReparseStrategy strategy) { ignore_nested_scopes = location.IsReturn(); } + if (strategy == ReparseStrategy::kScriptIfNeeded) { + CHECK(v8_flags.experimental_reuse_locals_blocklists); + Object maybe_block_list = isolate_->LocalsBlockListCacheGet(scope_info); + calculate_blocklists_ = maybe_block_list.IsTheHole(); + strategy = calculate_blocklists_ ? ReparseStrategy::kScript + : ReparseStrategy::kFunctionLiteral; + } + // Reparse the code and analyze the scopes. // Depending on the choosen strategy, the whole script or just // the closure is re-parsed for function scopes. @@ -291,6 +299,7 @@ void ScopeIterator::TryParseAndRetrieveScopes(ReparseStrategy strategy) { } } + MaybeCollectAndStoreLocalBlocklists(); UnwrapEvaluationContext(); } else { // A failed reparse indicates that the preparser has diverged from the @@ -481,9 +490,10 @@ void ScopeIterator::Next() { } } - if (leaving_closure) function_ = Handle<JSFunction>(); - + MaybeCollectAndStoreLocalBlocklists(); UnwrapEvaluationContext(); + + if (leaving_closure) function_ = Handle<JSFunction>(); } // Return the type of the current scope. @@ -561,7 +571,7 @@ Handle<JSObject> ScopeIterator::ScopeObject(Mode mode) { auto visitor = [=](Handle<String> name, Handle<Object> value, ScopeType scope_type) { if (value->IsOptimizedOut(isolate_)) { - if (FLAG_experimental_value_unavailable) { + if (v8_flags.experimental_value_unavailable) { JSObject::SetAccessor(scope, name, isolate_->factory()->value_unavailable_accessor(), NONE) @@ -577,7 +587,7 @@ Handle<JSObject> ScopeIterator::ScopeObject(Mode mode) { // REPL mode in a script context. Catch this case. return false; } - if (FLAG_experimental_value_unavailable) { + if (v8_flags.experimental_value_unavailable) { JSObject::SetAccessor(scope, name, isolate_->factory()->value_unavailable_accessor(), NONE) @@ -1123,5 +1133,175 @@ bool ScopeIterator::SetScriptVariableValue(Handle<String> variable_name, return false; } +namespace { + +// Given the scope and context of a paused function, this class calculates +// all the necessary block lists on the scope chain and stores them in the +// global LocalsBlockListCache ephemeron table. +// +// Doc: bit.ly/chrome-devtools-debug-evaluate-design. +// +// The algorithm works in a single walk of the scope chain from the +// paused function scope outwards to the script scope. +// +// When we step from scope "a" to its outer scope "b", we do: +// +// 1. Add all stack-allocated variables from "b" to the blocklists. +// 2. Does "b" need a context? If yes: +// - Store all current blocklists in the global table +// - Start a new blocklist for scope "b" +// 3. Is "b" a function scope without a context? If yes: +// - Start a new blocklist for scope "b" +// +class LocalBlocklistsCollector { + public: + LocalBlocklistsCollector(Isolate* isolate, Handle<Script> script, + Handle<Context> context, + DeclarationScope* closure_scope); + void CollectAndStore(); + + private: + void InitializeWithClosureScope(); + void AdvanceToNextNonHiddenScope(); + void CollectCurrentLocalsIntoBlocklists(); + Handle<ScopeInfo> FindScopeInfoForScope(Scope* scope) const; + void StoreFunctionBlocklists(Handle<ScopeInfo> outer_scope_info); + + Isolate* isolate_; + Handle<Script> script_; + Handle<Context> context_; + Scope* scope_; + DeclarationScope* closure_scope_; + + Handle<StringSet> context_blocklist_; + std::map<Scope*, Handle<StringSet>> function_blocklists_; +}; + +LocalBlocklistsCollector::LocalBlocklistsCollector( + Isolate* isolate, Handle<Script> script, Handle<Context> context, + DeclarationScope* closure_scope) + : isolate_(isolate), + script_(script), + context_(context), + scope_(closure_scope), + closure_scope_(closure_scope) {} + +void LocalBlocklistsCollector::InitializeWithClosureScope() { + CHECK(scope_->is_declaration_scope()); + function_blocklists_.emplace(scope_, StringSet::New(isolate_)); + if (scope_->NeedsContext()) context_blocklist_ = StringSet::New(isolate_); +} + +void LocalBlocklistsCollector::AdvanceToNextNonHiddenScope() { + DCHECK(scope_ && scope_->outer_scope()); + do { + scope_ = scope_->outer_scope(); + CHECK(scope_); + } while (scope_->is_hidden()); +} + +void LocalBlocklistsCollector::CollectCurrentLocalsIntoBlocklists() { + for (Variable* var : *scope_->locals()) { + if (var->location() == VariableLocation::PARAMETER || + var->location() == VariableLocation::LOCAL) { + if (!context_blocklist_.is_null()) { + context_blocklist_ = + StringSet::Add(isolate_, context_blocklist_, var->name()); + } + for (auto& pair : function_blocklists_) { + pair.second = StringSet::Add(isolate_, pair.second, var->name()); + } + } + } +} + +Handle<ScopeInfo> LocalBlocklistsCollector::FindScopeInfoForScope( + Scope* scope) const { + DisallowGarbageCollection no_gc; + SharedFunctionInfo::ScriptIterator iterator(isolate_, *script_); + for (SharedFunctionInfo info = iterator.Next(); !info.is_null(); + info = iterator.Next()) { + if (scope->start_position() == info.StartPosition() && + scope->end_position() == info.EndPosition()) { + if (info.is_compiled() && !info.scope_info().is_null()) { + return handle(info.scope_info(), isolate_); + } + return Handle<ScopeInfo>(); + } + } + return Handle<ScopeInfo>(); +} + +void LocalBlocklistsCollector::StoreFunctionBlocklists( + Handle<ScopeInfo> outer_scope_info) { + for (const auto& pair : function_blocklists_) { + Handle<ScopeInfo> scope_info = FindScopeInfoForScope(pair.first); + // If we don't find a ScopeInfo it's not tragic. It means we'll do + // a full-reparse in case we pause in that function in the future. + // The only ScopeInfo that MUST be found is for the closure_scope_. + CHECK_IMPLIES(pair.first == closure_scope_, !scope_info.is_null()); + if (scope_info.is_null()) continue; + isolate_->LocalsBlockListCacheSet(scope_info, outer_scope_info, + pair.second); + } +} + +void LocalBlocklistsCollector::CollectAndStore() { + InitializeWithClosureScope(); + + while (scope_->outer_scope() && !context_->IsNativeContext()) { + AdvanceToNextNonHiddenScope(); + // 1. Add all stack-allocated variables of `scope_` to the various lists. + CollectCurrentLocalsIntoBlocklists(); + + // 2. If the current scope requires a context then all the blocklists "stop" + // here and we store them. Next, advance the current context so + // `context_` and `scope_` match again. + if (scope_->NeedsContext()) { + if (!context_blocklist_.is_null()) { + // Only store the block list and advance the context if the + // context_blocklist is set. This handles the case when we start on + // a closure scope that doesn't require a context. In that case + // `context_` is already the right context for `scope_` so we don't + // need to advance `context_`. + isolate_->LocalsBlockListCacheSet( + handle(context_->scope_info(), isolate_), + handle(context_->previous().scope_info(), isolate_), + context_blocklist_); + context_ = handle(context_->previous(), isolate_); + } + + StoreFunctionBlocklists(handle(context_->scope_info(), isolate_)); + + context_blocklist_ = StringSet::New(isolate_); + function_blocklists_.clear(); + } else if (scope_->is_function_scope()) { + // 3. If `scope` is a function scope with an SFI, start recording + // locals for its ScopeInfo. + CHECK(!scope_->NeedsContext()); + function_blocklists_.emplace(scope_, StringSet::New(isolate_)); + } + } + + // In case we don't have any outer scopes we still need to record the empty + // block list for the paused function to prevent future re-parses. + StoreFunctionBlocklists(handle(context_->scope_info(), isolate_)); +} + +} // namespace + +void ScopeIterator::MaybeCollectAndStoreLocalBlocklists() const { + if (!calculate_blocklists_ || current_scope_ != closure_scope_) return; + + CHECK(v8_flags.experimental_reuse_locals_blocklists); + DCHECK(isolate_ + ->LocalsBlockListCacheGet( + handle(function_->shared().scope_info(), isolate_)) + .IsTheHole()); + LocalBlocklistsCollector collector(isolate_, script_, context_, + closure_scope_); + collector.CollectAndStore(); +} + } // namespace internal } // namespace v8 diff --git a/deps/v8/src/debug/debug-scopes.h b/deps/v8/src/debug/debug-scopes.h index ca16dad72623ba..fa2a79bc46ab5d 100644 --- a/deps/v8/src/debug/debug-scopes.h +++ b/deps/v8/src/debug/debug-scopes.h @@ -43,6 +43,11 @@ class ScopeIterator { enum class ReparseStrategy { kScript, kFunctionLiteral, + // Checks whether the paused function (and its scope chain) already has + // its blocklist calculated and re-parses the whole script if not. + // Otherwise only the function literal is re-parsed. + // Only vaild with enabled "experimental_reuse_locals_blocklists" flag. + kScriptIfNeeded, }; ScopeIterator(Isolate* isolate, FrameInspector* frame_inspector, @@ -125,6 +130,7 @@ class ScopeIterator { Scope* start_scope_ = nullptr; Scope* current_scope_ = nullptr; bool seen_script_scope_ = false; + bool calculate_blocklists_ = false; inline JavaScriptFrame* GetFrame() const { return frame_inspector_->javascript_frame(); @@ -136,6 +142,14 @@ class ScopeIterator { void AdvanceContext(); void CollectLocalsFromCurrentScope(); + // Calculates all the block list starting at the current scope and stores + // them in the global "LocalsBlocklistCache". + // + // Is a no-op unless `calculate_blocklists_` is true and + // current_scope_ == closure_scope_. Otherwise `context_` does not match + // with current_scope_/closure_scope_. + void MaybeCollectAndStoreLocalBlocklists() const; + int GetSourcePosition() const; void TryParseAndRetrieveScopes(ReparseStrategy strategy); diff --git a/deps/v8/src/debug/debug-type-profile.cc b/deps/v8/src/debug/debug-type-profile.cc deleted file mode 100644 index a4cae83e3b710d..00000000000000 --- a/deps/v8/src/debug/debug-type-profile.cc +++ /dev/null @@ -1,121 +0,0 @@ -// Copyright 2017 the V8 project authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#include "src/debug/debug-type-profile.h" - -#include "src/execution/isolate.h" -#include "src/objects/feedback-vector.h" -#include "src/objects/objects-inl.h" -#include "src/objects/objects.h" - -namespace v8 { -namespace internal { - -std::unique_ptr<TypeProfile> TypeProfile::Collect(Isolate* isolate) { - std::unique_ptr<TypeProfile> result(new TypeProfile()); - - // Feedback vectors are already listed to prevent losing them to GC. - DCHECK(isolate->factory() - ->feedback_vectors_for_profiling_tools() - ->IsArrayList()); - Handle<ArrayList> list = Handle<ArrayList>::cast( - isolate->factory()->feedback_vectors_for_profiling_tools()); - - Script::Iterator scripts(isolate); - - for (Script script = scripts.Next(); !script.is_null(); - script = scripts.Next()) { - if (!script.IsUserJavaScript()) { - continue; - } - - Handle<Script> script_handle(script, isolate); - - TypeProfileScript type_profile_script(script_handle); - std::vector<TypeProfileEntry>* entries = &type_profile_script.entries; - - // TODO(franzih): Sort the vectors by script first instead of iterating - // the list multiple times. - for (int i = 0; i < list->Length(); i++) { - FeedbackVector vector = FeedbackVector::cast(list->Get(i)); - SharedFunctionInfo info = vector.shared_function_info(); - DCHECK(info.IsSubjectToDebugging()); - - // Match vectors with script. - if (script != info.script()) { - continue; - } - if (!info.HasFeedbackMetadata() || info.feedback_metadata().is_empty() || - !info.feedback_metadata().HasTypeProfileSlot()) { - continue; - } - FeedbackSlot slot = vector.GetTypeProfileSlot(); - FeedbackNexus nexus(vector, slot); - std::vector<int> source_positions = nexus.GetSourcePositions(); - for (int position : source_positions) { - DCHECK_GE(position, 0); - entries->emplace_back(position, nexus.GetTypesForSourcePositions( - static_cast<uint32_t>(position))); - } - - // Releases type profile data collected so far. - nexus.ResetTypeProfile(); - } - if (!entries->empty()) { - result->emplace_back(type_profile_script); - } - } - return result; -} - -void TypeProfile::SelectMode(Isolate* isolate, debug::TypeProfileMode mode) { - if (mode != isolate->type_profile_mode()) { - // Changing the type profile mode can change the bytecode that would be - // generated for a function, which can interfere with lazy source positions, - // so just force source position collection whenever there's such a change. - isolate->CollectSourcePositionsForAllBytecodeArrays(); - } - - HandleScope handle_scope(isolate); - - if (mode == debug::TypeProfileMode::kNone) { - if (!isolate->factory() - ->feedback_vectors_for_profiling_tools() - ->IsUndefined(isolate)) { - // Release type profile data collected so far. - - // Feedback vectors are already listed to prevent losing them to GC. - DCHECK(isolate->factory() - ->feedback_vectors_for_profiling_tools() - ->IsArrayList()); - Handle<ArrayList> list = Handle<ArrayList>::cast( - isolate->factory()->feedback_vectors_for_profiling_tools()); - - for (int i = 0; i < list->Length(); i++) { - FeedbackVector vector = FeedbackVector::cast(list->Get(i)); - SharedFunctionInfo info = vector.shared_function_info(); - DCHECK(info.IsSubjectToDebugging()); - if (info.feedback_metadata().HasTypeProfileSlot()) { - FeedbackSlot slot = vector.GetTypeProfileSlot(); - FeedbackNexus nexus(vector, slot); - nexus.ResetTypeProfile(); - } - } - - // Delete the feedback vectors from the list if they're not used by code - // coverage. - if (isolate->is_best_effort_code_coverage()) { - isolate->SetFeedbackVectorsForProfilingTools( - ReadOnlyRoots(isolate).undefined_value()); - } - } - } else { - DCHECK_EQ(debug::TypeProfileMode::kCollect, mode); - isolate->MaybeInitializeVectorListFromHeap(); - } - isolate->set_type_profile_mode(mode); -} - -} // namespace internal -} // namespace v8 diff --git a/deps/v8/src/debug/debug-type-profile.h b/deps/v8/src/debug/debug-type-profile.h deleted file mode 100644 index a4da12f1e41c92..00000000000000 --- a/deps/v8/src/debug/debug-type-profile.h +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright 2017 the V8 project authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#ifndef V8_DEBUG_DEBUG_TYPE_PROFILE_H_ -#define V8_DEBUG_DEBUG_TYPE_PROFILE_H_ - -#include <memory> -#include <vector> - -#include "src/debug/debug-interface.h" -#include "src/handles/handles.h" - -namespace v8 { -namespace internal { - -// Forward declaration. -class Isolate; - -struct TypeProfileEntry { - explicit TypeProfileEntry( - int pos, std::vector<v8::internal::Handle<internal::String>> t) - : position(pos), types(std::move(t)) {} - int position; - std::vector<v8::internal::Handle<internal::String>> types; -}; - -struct TypeProfileScript { - explicit TypeProfileScript(Handle<Script> s) : script(s) {} - Handle<Script> script; - std::vector<TypeProfileEntry> entries; -}; - -class TypeProfile : public std::vector<TypeProfileScript> { - public: - static std::unique_ptr<TypeProfile> Collect(Isolate* isolate); - static void SelectMode(Isolate* isolate, debug::TypeProfileMode mode); - - private: - TypeProfile() = default; -}; - -} // namespace internal -} // namespace v8 - -#endif // V8_DEBUG_DEBUG_TYPE_PROFILE_H_ diff --git a/deps/v8/src/debug/debug-wasm-objects.cc b/deps/v8/src/debug/debug-wasm-objects.cc index a7933438abaf69..1a4eca61fc0041 100644 --- a/deps/v8/src/debug/debug-wasm-objects.cc +++ b/deps/v8/src/debug/debug-wasm-objects.cc @@ -729,6 +729,18 @@ Handle<String> GetRefTypeName(Isolate* isolate, wasm::ValueType type, return ToInternalString(name, isolate); } +// Returns the type name for the given value. Uses the module object for +// providing user-defined type names if available, otherwise falls back +// to numbers for indexed types. +Handle<String> GetRefTypeName(Isolate* isolate, wasm::ValueType type, + Handle<WasmModuleObject> module_object) { + if (!module_object.is_null()) { + return GetRefTypeName(isolate, type, module_object->native_module()); + } + std::string name = type.name(); + return isolate->factory()->InternalizeString({name.data(), name.length()}); +} + } // namespace // static @@ -905,49 +917,54 @@ Handle<WasmValueObject> WasmValueObject::New( } case wasm::kRefNull: case wasm::kRef: { - t = GetRefTypeName(isolate, value.type(), module_object->native_module()); Handle<Object> ref = value.to_ref(); if (ref->IsWasmStruct()) { WasmTypeInfo type_info = ref->GetHeapObject().map().wasm_type_info(); wasm::ValueType type = wasm::ValueType::FromIndex( wasm::ValueKind::kRef, type_info.type_index()); - t = GetRefTypeName( - isolate, type, - type_info.instance().module_object().native_module()); - v = StructProxy::Create(isolate, Handle<WasmStruct>::cast(ref), - module_object); + Handle<WasmModuleObject> module(type_info.instance().module_object(), + isolate); + t = GetRefTypeName(isolate, type, module->native_module()); + v = StructProxy::Create(isolate, Handle<WasmStruct>::cast(ref), module); } else if (ref->IsWasmArray()) { WasmTypeInfo type_info = ref->GetHeapObject().map().wasm_type_info(); wasm::ValueType type = wasm::ValueType::FromIndex( wasm::ValueKind::kRef, type_info.type_index()); - t = GetRefTypeName( - isolate, type, - type_info.instance().module_object().native_module()); - v = ArrayProxy::Create(isolate, Handle<WasmArray>::cast(ref), - module_object); + Handle<WasmModuleObject> module(type_info.instance().module_object(), + isolate); + t = GetRefTypeName(isolate, type, module->native_module()); + v = ArrayProxy::Create(isolate, Handle<WasmArray>::cast(ref), module); } else if (ref->IsWasmInternalFunction()) { - v = handle(Handle<WasmInternalFunction>::cast(ref)->external(), - isolate); + auto internal_fct = Handle<WasmInternalFunction>::cast(ref); + v = handle(internal_fct->external(), isolate); + // If the module is not provided by the caller, retrieve it from the + // instance object. If the function was created in JavaScript using + // `new WebAssembly.Function(...)`, a module for name resolution is not + // available. + if (module_object.is_null() && + internal_fct->ref().IsWasmInstanceObject()) { + module_object = handle( + WasmInstanceObject::cast(internal_fct->ref()).module_object(), + isolate); + } + t = GetRefTypeName(isolate, value.type(), module_object); } else if (ref->IsJSFunction() || ref->IsSmi() || ref->IsNull() || ref->IsString() || - value.type().is_reference_to(wasm::HeapType::kExtern)) { + value.type().is_reference_to(wasm::HeapType::kExtern) || + value.type().is_reference_to(wasm::HeapType::kAny)) { + t = GetRefTypeName(isolate, value.type(), module_object); v = ref; } else { // Fail gracefully. base::EmbeddedVector<char, 64> error; int len = SNPrintF(error, "unimplemented object type: %d", HeapObject::cast(*ref).map().instance_type()); + t = GetRefTypeName(isolate, value.type(), module_object); v = isolate->factory()->InternalizeString(error.SubVector(0, len)); } break; } - case wasm::kRtt: { - // TODO(7748): Expose RTTs to DevTools. - t = isolate->factory()->InternalizeString(base::StaticCharVector("rtt")); - v = isolate->factory()->InternalizeString( - base::StaticCharVector("(unimplemented)")); - break; - } + case wasm::kRtt: case wasm::kVoid: case wasm::kBottom: UNREACHABLE(); @@ -1037,8 +1054,11 @@ Handle<ArrayList> AddWasmTableObjectInternalProperties( for (int i = 0; i < length; ++i) { Handle<Object> entry = WasmTableObject::Get(isolate, table, i); wasm::WasmValue wasm_value(entry, table->type()); - Handle<WasmModuleObject> module( - WasmInstanceObject::cast(table->instance()).module_object(), isolate); + Handle<WasmModuleObject> module; + if (table->instance().IsWasmInstanceObject()) { + module = Handle<WasmModuleObject>( + WasmInstanceObject::cast(table->instance()).module_object(), isolate); + } Handle<Object> debug_value = WasmValueObject::New(isolate, wasm_value, module); entries->set(i, *debug_value); diff --git a/deps/v8/src/debug/debug.cc b/deps/v8/src/debug/debug.cc index 674d2b39bd965d..72c8e1aa5fb00d 100644 --- a/deps/v8/src/debug/debug.cc +++ b/deps/v8/src/debug/debug.cc @@ -249,9 +249,11 @@ BreakIterator::BreakIterator(Handle<DebugInfo> debug_info) int BreakIterator::BreakIndexFromPosition(int source_position) { for (; !Done(); Next()) { + if (GetDebugBreakType() == DEBUG_BREAK_SLOT_AT_SUSPEND) continue; if (source_position <= position()) { int first_break = break_index(); for (; !Done(); Next()) { + if (GetDebugBreakType() == DEBUG_BREAK_SLOT_AT_SUSPEND) continue; if (source_position == position()) return break_index(); } return first_break; @@ -297,6 +299,10 @@ DebugBreakType BreakIterator::GetDebugBreakType() { } else if (bytecode == interpreter::Bytecode::kReturn) { return DEBUG_BREAK_SLOT_AT_RETURN; } else if (bytecode == interpreter::Bytecode::kSuspendGenerator) { + // SuspendGenerator should always only carry an expression position that + // is used in stack trace construction, but should never be a breakable + // position reported to the debugger front-end. + DCHECK(!source_position_iterator_.is_statement()); return DEBUG_BREAK_SLOT_AT_SUSPEND; } else if (interpreter::Bytecodes::IsCallOrConstruct(bytecode)) { return DEBUG_BREAK_SLOT_AT_CALL; @@ -574,20 +580,21 @@ void Debug::Break(JavaScriptFrame* frame, Handle<JSFunction> break_target) { if (current_frame_count > target_frame_count) return; V8_FALLTHROUGH; case StepInto: { - // Special case StepInto and StepOver for generators that are about to - // suspend, in which case we go into "generator stepping" mode. The - // exception here is the initial implicit yield in generators (which - // always has a suspend ID of 0), where we return to the caller first, - // instead of triggering "generator stepping" mode straight away. - if (location.IsSuspend() && (!IsGeneratorFunction(shared->kind()) || - location.generator_suspend_id() > 0)) { + // StepInto and StepOver should enter "generator stepping" mode, except + // for the implicit initial yield in generators, where it should simply + // step out of the generator function. + if (location.IsSuspend()) { DCHECK(!has_suspended_generator()); - thread_local_.suspended_generator_ = - location.GetGeneratorObjectForSuspendedFrame(frame); ClearStepping(); + if (!IsGeneratorFunction(shared->kind()) || + location.generator_suspend_id() > 0) { + thread_local_.suspended_generator_ = + location.GetGeneratorObjectForSuspendedFrame(frame); + } else { + PrepareStep(StepOut); + } return; } - FrameSummary summary = FrameSummary::GetTop(frame); step_break = step_break || location.IsReturn() || current_frame_count != last_frame_count || @@ -1220,9 +1227,7 @@ void Debug::PrepareStep(StepAction step_action) { // Any step at a return is a step-out, and a step-out at a suspend behaves // like a return. if (location.IsReturn() || - (location.IsSuspend() && - (step_action == StepOut || (IsGeneratorFunction(shared->kind()) && - location.generator_suspend_id() == 0)))) { + (location.IsSuspend() && step_action == StepOut)) { // On StepOut we'll ignore our further calls to current function in // PrepareStepIn callback. if (last_step_action() == StepOut) { @@ -1637,23 +1642,18 @@ void Debug::InstallDebugBreakTrampoline() { } namespace { -template <typename Iterator> -void GetBreakablePositions(Iterator* it, int start_position, int end_position, - std::vector<BreakLocation>* locations) { - while (!it->Done()) { - if (it->position() >= start_position && it->position() < end_position) { - locations->push_back(it->GetBreakLocation()); - } - it->Next(); - } -} - void FindBreakablePositions(Handle<DebugInfo> debug_info, int start_position, int end_position, std::vector<BreakLocation>* locations) { DCHECK(debug_info->HasInstrumentedBytecodeArray()); BreakIterator it(debug_info); - GetBreakablePositions(&it, start_position, end_position, locations); + while (!it.Done()) { + if (it.GetDebugBreakType() != DEBUG_BREAK_SLOT_AT_SUSPEND && + it.position() >= start_position && it.position() < end_position) { + locations->push_back(it.GetBreakLocation()); + } + it.Next(); + } } bool CompileTopLevel(Isolate* isolate, Handle<Script> script) { @@ -2573,7 +2573,7 @@ void Debug::HandleDebugBreak(IgnoreBreakMode ignore_break_mode, #ifdef DEBUG void Debug::PrintBreakLocation() { - if (!FLAG_print_break_location) return; + if (!v8_flags.print_break_location) return; RCS_SCOPE(isolate_, RuntimeCallCounterId::kDebugger); HandleScope scope(isolate_); StackTraceFrameIterator iterator(isolate_); @@ -2783,7 +2783,7 @@ bool Debug::PerformSideEffectCheck(Handle<JSFunction> function, debug_info->GetSideEffectState(isolate_); switch (side_effect_state) { case DebugInfo::kHasSideEffects: - if (FLAG_trace_side_effect_free_debug_evaluate) { + if (v8_flags.trace_side_effect_free_debug_evaluate) { PrintF("[debug-evaluate] Function %s failed side effect check.\n", function->shared().DebugNameCStr().get()); } @@ -2846,7 +2846,7 @@ bool Debug::PerformSideEffectCheckForCallback( case SideEffectType::kHasSideEffect: break; } - if (FLAG_trace_side_effect_free_debug_evaluate) { + if (v8_flags.trace_side_effect_free_debug_evaluate) { PrintF("[debug-evaluate] API Callback '"); info.name().ShortPrint(); PrintF("' may cause side effect.\n"); @@ -2854,13 +2854,13 @@ bool Debug::PerformSideEffectCheckForCallback( } else if (callback_info->IsInterceptorInfo()) { InterceptorInfo info = InterceptorInfo::cast(*callback_info); if (info.has_no_side_effect()) return true; - if (FLAG_trace_side_effect_free_debug_evaluate) { + if (v8_flags.trace_side_effect_free_debug_evaluate) { PrintF("[debug-evaluate] API Interceptor may cause side effect.\n"); } } else if (callback_info->IsCallHandlerInfo()) { CallHandlerInfo info = CallHandlerInfo::cast(*callback_info); if (info.IsSideEffectFreeCallHandlerInfo()) return true; - if (FLAG_trace_side_effect_free_debug_evaluate) { + if (v8_flags.trace_side_effect_free_debug_evaluate) { PrintF("[debug-evaluate] API CallHandlerInfo may cause side effect.\n"); } } @@ -2922,7 +2922,7 @@ bool Debug::PerformSideEffectCheckForObject(Handle<Object> object) { return true; } - if (FLAG_trace_side_effect_free_debug_evaluate) { + if (v8_flags.trace_side_effect_free_debug_evaluate) { PrintF("[debug-evaluate] failed runtime side effect check.\n"); } side_effect_check_failed_ = true; diff --git a/deps/v8/src/debug/debug.h b/deps/v8/src/debug/debug.h index 24a107dd936e69..3c28d0d1aab171 100644 --- a/deps/v8/src/debug/debug.h +++ b/deps/v8/src/debug/debug.h @@ -148,13 +148,13 @@ class V8_EXPORT_PRIVATE BreakIterator { void ClearDebugBreak(); void SetDebugBreak(); + DebugBreakType GetDebugBreakType(); + private: int BreakIndexFromPosition(int position); Isolate* isolate(); - DebugBreakType GetDebugBreakType(); - Handle<DebugInfo> debug_info_; int break_index_; int position_; diff --git a/deps/v8/src/debug/interface-types.h b/deps/v8/src/debug/interface-types.h index 0321a4d9c6457c..71bf7fd187e2cb 100644 --- a/deps/v8/src/debug/interface-types.h +++ b/deps/v8/src/debug/interface-types.h @@ -78,11 +78,6 @@ enum class CoverageMode { kBlockBinary, }; -enum class TypeProfileMode { - kNone, - kCollect, -}; - class V8_EXPORT_PRIVATE BreakLocation : public Location { public: BreakLocation(int line_number, int column_number, BreakLocationType type) diff --git a/deps/v8/src/debug/liveedit-diff.cc b/deps/v8/src/debug/liveedit-diff.cc index 613561590365de..ff2f8aa88a9a9c 100644 --- a/deps/v8/src/debug/liveedit-diff.cc +++ b/deps/v8/src/debug/liveedit-diff.cc @@ -10,216 +10,12 @@ #include "src/base/logging.h" #include "src/base/optional.h" -#include "src/common/globals.h" namespace v8 { namespace internal { namespace { -// A simple implementation of dynamic programming algorithm. It solves -// the problem of finding the difference of 2 arrays. It uses a table of results -// of subproblems. Each cell contains a number together with 2-bit flag -// that helps building the chunk list. -class Differencer { - public: - explicit Differencer(Comparator::Input* input) - : input_(input), len1_(input->GetLength1()), len2_(input->GetLength2()) {} - - void Initialize() {} - - // Makes sure that result for the full problem is calculated and stored - // in the table together with flags showing a path through subproblems. - void FillTable() { - // Determine common prefix to skip. - int minLen = std::min(len1_, len2_); - while (prefixLen_ < minLen && input_->Equals(prefixLen_, prefixLen_)) { - ++prefixLen_; - } - - // Pre-fill common suffix in the table. - for (int pos1 = len1_, pos2 = len2_; pos1 > prefixLen_ && - pos2 > prefixLen_ && - input_->Equals(--pos1, --pos2);) { - set_value4_and_dir(pos1, pos2, 0, EQ); - } - - CompareUpToTail(prefixLen_, prefixLen_); - } - - void SaveResult(Comparator::Output* chunk_writer) { - ResultWriter writer(chunk_writer); - - if (prefixLen_) writer.eq(prefixLen_); - for (int pos1 = prefixLen_, pos2 = prefixLen_; true;) { - if (pos1 < len1_) { - if (pos2 < len2_) { - Direction dir = get_direction(pos1, pos2); - switch (dir) { - case EQ: - writer.eq(); - pos1++; - pos2++; - break; - case SKIP1: - writer.skip1(1); - pos1++; - break; - case SKIP2: - case SKIP_ANY: - writer.skip2(1); - pos2++; - break; - default: - UNREACHABLE(); - } - } else { - writer.skip1(len1_ - pos1); - break; - } - } else { - if (len2_ != pos2) { - writer.skip2(len2_ - pos2); - } - break; - } - } - writer.close(); - } - - private: - Comparator::Input* input_; - std::map<std::pair<int, int>, int> buffer_; - int len1_; - int len2_; - int prefixLen_ = 0; - - enum Direction { - EQ = 0, - SKIP1, - SKIP2, - SKIP_ANY, - - MAX_DIRECTION_FLAG_VALUE = SKIP_ANY - }; - - // Computes result for a subtask and optionally caches it in the buffer table. - // All results values are shifted to make space for flags in the lower bits. - int CompareUpToTail(int pos1, int pos2) { - if (pos1 == len1_) { - return (len2_ - pos2) << kDirectionSizeBits; - } - if (pos2 == len2_) { - return (len1_ - pos1) << kDirectionSizeBits; - } - int res = get_value4(pos1, pos2); - if (res != kEmptyCellValue) { - return res; - } - Direction dir; - if (input_->Equals(pos1, pos2)) { - res = CompareUpToTail(pos1 + 1, pos2 + 1); - dir = EQ; - } else { - int res1 = CompareUpToTail(pos1 + 1, pos2) + (1 << kDirectionSizeBits); - int res2 = CompareUpToTail(pos1, pos2 + 1) + (1 << kDirectionSizeBits); - if (res1 == res2) { - res = res1; - dir = SKIP_ANY; - } else if (res1 < res2) { - res = res1; - dir = SKIP1; - } else { - res = res2; - dir = SKIP2; - } - } - set_value4_and_dir(pos1, pos2, res, dir); - return res; - } - - inline int get_cell(int i1, int i2) { - auto it = buffer_.find(std::make_pair(i1, i2)); - return it == buffer_.end() ? kEmptyCellValue : it->second; - } - - inline void set_cell(int i1, int i2, int value) { - buffer_.insert(std::make_pair(std::make_pair(i1, i2), value)); - } - - // Each cell keeps a value plus direction. Value is multiplied by 4. - void set_value4_and_dir(int i1, int i2, int value4, Direction dir) { - DCHECK_EQ(0, value4 & kDirectionMask); - set_cell(i1, i2, value4 | dir); - } - - int get_value4(int i1, int i2) { - return get_cell(i1, i2) & (kMaxUInt32 ^ kDirectionMask); - } - Direction get_direction(int i1, int i2) { - return static_cast<Direction>(get_cell(i1, i2) & kDirectionMask); - } - - static const int kDirectionSizeBits = 2; - static const int kDirectionMask = (1 << kDirectionSizeBits) - 1; - static const int kEmptyCellValue = ~0u << kDirectionSizeBits; - - // This method only holds static assert statement (unfortunately you cannot - // place one in class scope). - void StaticAssertHolder() { - static_assert(MAX_DIRECTION_FLAG_VALUE < (1 << kDirectionSizeBits)); - } - - class ResultWriter { - public: - explicit ResultWriter(Comparator::Output* chunk_writer) - : chunk_writer_(chunk_writer), - pos1_(0), - pos2_(0), - pos1_begin_(-1), - pos2_begin_(-1), - has_open_chunk_(false) {} - void eq(int len = 1) { - FlushChunk(); - pos1_ += len; - pos2_ += len; - } - void skip1(int len1) { - StartChunk(); - pos1_ += len1; - } - void skip2(int len2) { - StartChunk(); - pos2_ += len2; - } - void close() { FlushChunk(); } - - private: - Comparator::Output* chunk_writer_; - int pos1_; - int pos2_; - int pos1_begin_; - int pos2_begin_; - bool has_open_chunk_; - - void StartChunk() { - if (!has_open_chunk_) { - pos1_begin_ = pos1_; - pos2_begin_ = pos2_; - has_open_chunk_ = true; - } - } - - void FlushChunk() { - if (has_open_chunk_) { - chunk_writer_->AddChunk(pos1_begin_, pos2_begin_, pos1_ - pos1_begin_, - pos2_ - pos2_begin_); - has_open_chunk_ = false; - } - } - }; -}; - // Implements Myer's Algorithm from // "An O(ND) Difference Algorithm and Its Variations", particularly the // linear space refinement mentioned in section 4b. @@ -608,17 +404,8 @@ class MyersDiffer { } // namespace void Comparator::CalculateDifference(Comparator::Input* input, - Comparator::Output* result_writer, - Comparator::CompareMethod method) { - if (method == CompareMethod::kDynamicProgramming) { - Differencer differencer(input); - differencer.Initialize(); - differencer.FillTable(); - differencer.SaveResult(result_writer); - } else { - CHECK_EQ(method, CompareMethod::kMyers); - MyersDiffer::MyersDiff(input, result_writer); - } + Comparator::Output* result_writer) { + MyersDiffer::MyersDiff(input, result_writer); } } // namespace internal diff --git a/deps/v8/src/debug/liveedit-diff.h b/deps/v8/src/debug/liveedit-diff.h index 9649b19cdb8835..15bedc3eacfb4d 100644 --- a/deps/v8/src/debug/liveedit-diff.h +++ b/deps/v8/src/debug/liveedit-diff.h @@ -34,14 +34,8 @@ class Comparator { virtual ~Output() = default; }; - enum class CompareMethod { - kDynamicProgramming, - kMyers, - }; - // Finds the difference between 2 arrays of elements. - static void CalculateDifference(Input* input, Output* result_writer, - CompareMethod = CompareMethod::kMyers); + static void CalculateDifference(Input* input, Output* result_writer); }; } // namespace internal diff --git a/deps/v8/src/debug/liveedit.cc b/deps/v8/src/debug/liveedit.cc index b086908c9e7196..ebcf80b37fdf58 100644 --- a/deps/v8/src/debug/liveedit.cc +++ b/deps/v8/src/debug/liveedit.cc @@ -511,17 +511,20 @@ class CollectFunctionLiterals final }; bool ParseScript(Isolate* isolate, Handle<Script> script, ParseInfo* parse_info, - bool compile_as_well, std::vector<FunctionLiteral*>* literals, + MaybeHandle<ScopeInfo> outer_scope_info, bool compile_as_well, + std::vector<FunctionLiteral*>* literals, debug::LiveEditResult* result) { v8::TryCatch try_catch(reinterpret_cast<v8::Isolate*>(isolate)); Handle<SharedFunctionInfo> shared; bool success = false; if (compile_as_well) { - success = Compiler::CompileForLiveEdit(parse_info, script, isolate) + success = Compiler::CompileForLiveEdit(parse_info, script, outer_scope_info, + isolate) .ToHandle(&shared); } else { - success = parsing::ParseProgram(parse_info, script, isolate, - parsing::ReportStatisticsMode::kYes); + success = + parsing::ParseProgram(parse_info, script, outer_scope_info, isolate, + parsing::ReportStatisticsMode::kYes); if (!success) { // Throw the parser error. parse_info->pending_error_handler()->PrepareErrors( @@ -740,21 +743,80 @@ void TranslateSourcePositionTable(Isolate* isolate, Handle<BytecodeArray> code, } void UpdatePositions(Isolate* isolate, Handle<SharedFunctionInfo> sfi, + FunctionLiteral* new_function, const std::vector<SourceChangeRange>& diffs) { - int old_start_position = sfi->StartPosition(); - int new_start_position = - LiveEdit::TranslatePosition(diffs, old_start_position); - int new_end_position = LiveEdit::TranslatePosition(diffs, sfi->EndPosition()); - int new_function_token_position = - LiveEdit::TranslatePosition(diffs, sfi->function_token_position()); - sfi->SetPosition(new_start_position, new_end_position); - sfi->SetFunctionTokenPosition(new_function_token_position, - new_start_position); + sfi->UpdateFromFunctionLiteralForLiveEdit(new_function); if (sfi->HasBytecodeArray()) { TranslateSourcePositionTable( isolate, handle(sfi->GetBytecodeArray(isolate), isolate), diffs); } } + +#ifdef DEBUG +ScopeInfo FindOuterScopeInfoFromScriptSfi(Isolate* isolate, + Handle<Script> script) { + // We take some SFI from the script and walk outwards until we find the + // EVAL_SCOPE. Then we do the same search as `DetermineOuterScopeInfo` and + // check that we found the same ScopeInfo. + SharedFunctionInfo::ScriptIterator it(isolate, *script); + ScopeInfo other_scope_info; + for (SharedFunctionInfo sfi = it.Next(); !sfi.is_null(); sfi = it.Next()) { + if (!sfi.scope_info().IsEmpty()) { + other_scope_info = sfi.scope_info(); + break; + } + } + if (other_scope_info.is_null()) return other_scope_info; + + while (!other_scope_info.IsEmpty() && + other_scope_info.scope_type() != EVAL_SCOPE && + other_scope_info.HasOuterScopeInfo()) { + other_scope_info = other_scope_info.OuterScopeInfo(); + } + + // This function is only called when we found a ScopeInfo candidate, so + // technically the EVAL_SCOPE must have an outer_scope_info. But, the GC can + // clean up some ScopeInfos it thinks are no longer needed. Abort the check + // in that case. + if (!other_scope_info.HasOuterScopeInfo()) return ScopeInfo(); + + DCHECK_EQ(other_scope_info.scope_type(), EVAL_SCOPE); + other_scope_info = other_scope_info.OuterScopeInfo(); + + while (!other_scope_info.IsEmpty() && !other_scope_info.HasContext() && + other_scope_info.HasOuterScopeInfo()) { + other_scope_info = other_scope_info.OuterScopeInfo(); + } + return other_scope_info; +} +#endif + +// For sloppy eval we need to know the ScopeInfo the eval was compiled in and +// re-use it when we compile the new version of the script. +MaybeHandle<ScopeInfo> DetermineOuterScopeInfo(Isolate* isolate, + Handle<Script> script) { + if (!script->has_eval_from_shared()) return kNullMaybeHandle; + DCHECK_EQ(script->compilation_type(), Script::COMPILATION_TYPE_EVAL); + ScopeInfo scope_info = script->eval_from_shared().scope_info(); + // Sloppy eval compiles use the ScopeInfo of the context. Let's find it. + while (!scope_info.IsEmpty()) { + if (scope_info.HasContext()) { +#ifdef DEBUG + ScopeInfo other_scope_info = + FindOuterScopeInfoFromScriptSfi(isolate, script); + DCHECK_IMPLIES(!other_scope_info.is_null(), + scope_info == other_scope_info); +#endif + return handle(scope_info, isolate); + } else if (!scope_info.HasOuterScopeInfo()) { + break; + } + scope_info = scope_info.OuterScopeInfo(); + } + + return kNullMaybeHandle; +} + } // anonymous namespace void LiveEdit::PatchScript(Isolate* isolate, Handle<Script> script, @@ -778,8 +840,11 @@ void LiveEdit::PatchScript(Isolate* isolate, Handle<Script> script, flags.set_is_eager(true); flags.set_is_reparse(true); ParseInfo parse_info(isolate, flags, &compile_state, &reusable_state); + MaybeHandle<ScopeInfo> outer_scope_info = + DetermineOuterScopeInfo(isolate, script); std::vector<FunctionLiteral*> literals; - if (!ParseScript(isolate, script, &parse_info, false, &literals, result)) + if (!ParseScript(isolate, script, &parse_info, outer_scope_info, false, + &literals, result)) return; Handle<Script> new_script = isolate->factory()->CloneScript(script); @@ -791,8 +856,8 @@ void LiveEdit::PatchScript(Isolate* isolate, Handle<Script> script, ParseInfo new_parse_info(isolate, new_flags, &new_compile_state, &reusable_state); std::vector<FunctionLiteral*> new_literals; - if (!ParseScript(isolate, new_script, &new_parse_info, true, &new_literals, - result)) { + if (!ParseScript(isolate, new_script, &new_parse_info, outer_scope_info, true, + &new_literals, result)) { return; } @@ -814,7 +879,7 @@ void LiveEdit::PatchScript(Isolate* isolate, Handle<Script> script, function_data_map.Fill(isolate); const bool allow_top_frame_live_editing = - allow_top_frame_live_editing_param && FLAG_live_edit_top_frame; + allow_top_frame_live_editing_param && v8_flags.live_edit_top_frame; if (!CanPatchScript(changed, script, new_script, function_data_map, allow_top_frame_live_editing, result)) { return; @@ -847,7 +912,7 @@ void LiveEdit::PatchScript(Isolate* isolate, Handle<Script> script, isolate->debug()->RemoveBreakInfoAndMaybeFree(debug_info); } SharedFunctionInfo::EnsureSourcePositionsAvailable(isolate, sfi); - UpdatePositions(isolate, sfi, diffs); + UpdatePositions(isolate, sfi, mapping.second, diffs); sfi->set_script(*new_script); sfi->set_function_literal_id(mapping.second->function_literal_id()); diff --git a/deps/v8/src/debug/wasm/gdb-server/gdb-remote-util.h b/deps/v8/src/debug/wasm/gdb-server/gdb-remote-util.h index d02e0eaae3ac00..a4c8828e1e287c 100644 --- a/deps/v8/src/debug/wasm/gdb-server/gdb-remote-util.h +++ b/deps/v8/src/debug/wasm/gdb-server/gdb-remote-util.h @@ -15,9 +15,9 @@ namespace internal { namespace wasm { namespace gdb_server { -#define TRACE_GDB_REMOTE(...) \ - do { \ - if (FLAG_trace_wasm_gdb_remote) PrintF("[gdb-remote] " __VA_ARGS__); \ +#define TRACE_GDB_REMOTE(...) \ + do { \ + if (v8_flags.trace_wasm_gdb_remote) PrintF("[gdb-remote] " __VA_ARGS__); \ } while (false) // Convert from 0-255 to a pair of ASCII chars (0-9,a-f). diff --git a/deps/v8/src/debug/wasm/gdb-server/gdb-server-thread.cc b/deps/v8/src/debug/wasm/gdb-server/gdb-server-thread.cc index d3c7887a5b9a5e..b681aedc573460 100644 --- a/deps/v8/src/debug/wasm/gdb-server/gdb-server-thread.cc +++ b/deps/v8/src/debug/wasm/gdb-server/gdb-server-thread.cc @@ -62,7 +62,8 @@ void GdbServerThread::Run() { #endif // If the default port is not available, try any port. - SocketBinding socket_binding = SocketBinding::Bind(FLAG_wasm_gdb_remote_port); + SocketBinding socket_binding = + SocketBinding::Bind(v8_flags.wasm_gdb_remote_port); if (!socket_binding.IsValid()) { socket_binding = SocketBinding::Bind(0); } diff --git a/deps/v8/src/debug/wasm/gdb-server/gdb-server.cc b/deps/v8/src/debug/wasm/gdb-server/gdb-server.cc index b6cb5a98d20e4a..f3f891c30b76b7 100644 --- a/deps/v8/src/debug/wasm/gdb-server/gdb-server.cc +++ b/deps/v8/src/debug/wasm/gdb-server/gdb-server.cc @@ -119,7 +119,7 @@ auto GdbServer::RunSyncTask(Functor&& callback) const { // static std::unique_ptr<GdbServer> GdbServer::Create() { - DCHECK(FLAG_wasm_gdb_remote); + DCHECK(v8_flags.wasm_gdb_remote); std::unique_ptr<GdbServer> gdb_server(new GdbServer()); @@ -377,7 +377,7 @@ void GdbServer::AddWasmModule(uint32_t module_id, std::make_pair(module_id, WasmModuleDebug(isolate, wasm_script))); has_module_list_changed_ = true; - if (FLAG_wasm_pause_waiting_for_debugger && scripts_.size() == 1) { + if (v8_flags.wasm_pause_waiting_for_debugger && scripts_.size() == 1) { TRACE_GDB_REMOTE("Paused, waiting for a debugger to attach...\n"); Suspend(); } diff --git a/deps/v8/src/deoptimizer/deoptimizer.cc b/deps/v8/src/deoptimizer/deoptimizer.cc index f1379f90ee5f0d..9479740884d0b1 100644 --- a/deps/v8/src/deoptimizer/deoptimizer.cc +++ b/deps/v8/src/deoptimizer/deoptimizer.cc @@ -494,7 +494,7 @@ Deoptimizer::Deoptimizer(Isolate* isolate, JSFunction function, caller_constant_pool_(0), actual_argument_count_(0), stack_fp_(0), - trace_scope_(FLAG_trace_deopt || FLAG_log_deopt + trace_scope_(v8_flags.trace_deopt || v8_flags.log_deopt ? new CodeTracer::Scope(isolate->GetCodeTracer()) : nullptr) { if (isolate->deoptimizer_lazy_throw()) { @@ -678,9 +678,9 @@ void Deoptimizer::TraceDeoptBegin(int optimization_id, MessageFor(deopt_kind_), DeoptimizeReasonToString(info.deopt_reason)); if (function_.IsJSFunction()) { function_.ShortPrint(file); - } else { - PrintF(file, "%s", CodeKindToString(compiled_code_.kind())); + PrintF(file, ", "); } + compiled_code_.ShortPrint(file); PrintF(file, ", opt id %d, " #ifdef DEBUG @@ -711,7 +711,7 @@ void Deoptimizer::TraceDeoptEnd(double deopt_duration) { // static void Deoptimizer::TraceMarkForDeoptimization(Code code, const char* reason) { - if (!FLAG_trace_deopt && !FLAG_log_deopt) return; + if (!v8_flags.trace_deopt && !v8_flags.log_deopt) return; DisallowGarbageCollection no_gc; Isolate* isolate = code.GetIsolate(); @@ -720,14 +720,15 @@ void Deoptimizer::TraceMarkForDeoptimization(Code code, const char* reason) { DeoptimizationData deopt_data = DeoptimizationData::cast(maybe_data); CodeTracer::Scope scope(isolate->GetCodeTracer()); - if (FLAG_trace_deopt) { - PrintF(scope.file(), "[marking dependent code " V8PRIxPTR_FMT " (", - code.ptr()); + if (v8_flags.trace_deopt) { + PrintF(scope.file(), "[marking dependent code "); + code.ShortPrint(scope.file()); + PrintF(scope.file(), " ("); deopt_data.SharedFunctionInfo().ShortPrint(scope.file()); PrintF(") (opt id %d) for deoptimization, reason: %s]\n", deopt_data.OptimizationId().value(), reason); } - if (!FLAG_log_deopt) return; + if (!v8_flags.log_deopt) return; no_gc.Release(); { HandleScope handle_scope(isolate); @@ -744,7 +745,7 @@ void Deoptimizer::TraceMarkForDeoptimization(Code code, const char* reason) { // static void Deoptimizer::TraceEvictFromOptimizedCodeCache(SharedFunctionInfo sfi, const char* reason) { - if (!FLAG_trace_deopt_verbose) return; + if (!v8_flags.trace_deopt_verbose) return; DisallowGarbageCollection no_gc; CodeTracer::Scope scope(sfi.GetIsolate()->GetCodeTracer()); @@ -758,7 +759,7 @@ void Deoptimizer::TraceEvictFromOptimizedCodeCache(SharedFunctionInfo sfi, #ifdef DEBUG // static void Deoptimizer::TraceFoundActivation(Isolate* isolate, JSFunction function) { - if (!FLAG_trace_deopt_verbose) return; + if (!v8_flags.trace_deopt_verbose) return; CodeTracer::Scope scope(isolate->GetCodeTracer()); PrintF(scope.file(), "[deoptimizer found activation of function: "); function.PrintName(scope.file()); @@ -768,14 +769,14 @@ void Deoptimizer::TraceFoundActivation(Isolate* isolate, JSFunction function) { // static void Deoptimizer::TraceDeoptAll(Isolate* isolate) { - if (!FLAG_trace_deopt_verbose) return; + if (!v8_flags.trace_deopt_verbose) return; CodeTracer::Scope scope(isolate->GetCodeTracer()); PrintF(scope.file(), "[deoptimize all code in all contexts]\n"); } // static void Deoptimizer::TraceDeoptMarked(Isolate* isolate) { - if (!FLAG_trace_deopt_verbose) return; + if (!v8_flags.trace_deopt_verbose) return; CodeTracer::Scope scope(isolate->GetCodeTracer()); PrintF(scope.file(), "[deoptimize marked code in all contexts]\n"); } @@ -812,7 +813,7 @@ void Deoptimizer::DoComputeOutputFrames() { actual_argument_count_ = static_cast<int>( Memory<intptr_t>(fp_address + StandardFrameConstants::kArgCOffset)); - if (FLAG_enable_embedded_constant_pool) { + if (V8_EMBEDDED_CONSTANT_POOL_BOOL) { caller_constant_pool_ = Memory<intptr_t>( fp_address + CommonFrameConstants::kConstantPoolOffset); } @@ -1018,7 +1019,7 @@ void Deoptimizer::DoComputeUnoptimizedFrame(TranslatedFrame* translated_frame, (!is_topmost || (deopt_kind_ == DeoptimizeKind::kLazy)) && !goto_catch_handler; const bool deopt_to_baseline = - shared.HasBaselineCode() && FLAG_deopt_to_baseline; + shared.HasBaselineCode() && v8_flags.deopt_to_baseline; const bool restart_frame = goto_catch_handler && is_restart_frame(); CodeT dispatch_builtin = builtins->code( DispatchBuiltinFor(deopt_to_baseline, advance_bc, restart_frame)); @@ -1094,7 +1095,7 @@ void Deoptimizer::DoComputeUnoptimizedFrame(TranslatedFrame* translated_frame, output_frame->SetRegister(fp_reg.code(), fp_value); } - if (FLAG_enable_embedded_constant_pool) { + if (V8_EMBEDDED_CONSTANT_POOL_BOOL) { // For the bottommost output frame the constant pool pointer can be gotten // from the input frame. For subsequent output frames, it can be read from // the previous frame. @@ -1242,7 +1243,7 @@ void Deoptimizer::DoComputeUnoptimizedFrame(TranslatedFrame* translated_frame, } // Update constant pool. - if (FLAG_enable_embedded_constant_pool) { + if (V8_EMBEDDED_CONSTANT_POOL_BOOL) { intptr_t constant_pool_value = static_cast<intptr_t>(dispatch_builtin.constant_pool()); output_frame->SetConstantPool(constant_pool_value); @@ -1407,7 +1408,7 @@ void Deoptimizer::DoComputeConstructStubFrame(TranslatedFrame* translated_frame, output_frame->SetRegister(fp_reg.code(), fp_value); } - if (FLAG_enable_embedded_constant_pool) { + if (V8_EMBEDDED_CONSTANT_POOL_BOOL) { // Read the caller's constant pool from the previous frame. const intptr_t caller_cp = output_[frame_index - 1]->GetConstantPool(); frame_writer.PushCallerConstantPool(caller_cp); @@ -1472,7 +1473,7 @@ void Deoptimizer::DoComputeConstructStubFrame(TranslatedFrame* translated_frame, } // Update constant pool. - if (FLAG_enable_embedded_constant_pool) { + if (V8_EMBEDDED_CONSTANT_POOL_BOOL) { intptr_t constant_pool_value = static_cast<intptr_t>(construct_stub.constant_pool()); output_frame->SetConstantPool(constant_pool_value); @@ -1821,7 +1822,7 @@ void Deoptimizer::DoComputeBuiltinContinuation( DCHECK_EQ(output_frame_size_above_fp, frame_writer.top_offset()); - if (FLAG_enable_embedded_constant_pool) { + if (V8_EMBEDDED_CONSTANT_POOL_BOOL) { // Read the caller's constant pool from the previous frame. const intptr_t caller_cp = is_bottommost ? caller_constant_pool_ @@ -1943,7 +1944,7 @@ void Deoptimizer::DoComputeBuiltinContinuation( void Deoptimizer::MaterializeHeapObjects() { translated_state_.Prepare(static_cast<Address>(stack_fp_)); - if (FLAG_deopt_every_n_times > 0) { + if (v8_flags.deopt_every_n_times > 0) { // Doing a GC here will find problems with the deoptimized frames. isolate_->heap()->CollectAllGarbage(Heap::kNoGCFlags, GarbageCollectionReason::kTesting); diff --git a/deps/v8/src/deoptimizer/deoptimizer.h b/deps/v8/src/deoptimizer/deoptimizer.h index ab15919301e7f3..1de0a4e6e75d4b 100644 --- a/deps/v8/src/deoptimizer/deoptimizer.h +++ b/deps/v8/src/deoptimizer/deoptimizer.h @@ -184,11 +184,11 @@ class Deoptimizer : public Malloced { // Tracing. bool tracing_enabled() const { return trace_scope_ != nullptr; } bool verbose_tracing_enabled() const { - return FLAG_trace_deopt_verbose && tracing_enabled(); + return v8_flags.trace_deopt_verbose && tracing_enabled(); } CodeTracer::Scope* trace_scope() const { return trace_scope_; } CodeTracer::Scope* verbose_trace_scope() const { - return FLAG_trace_deopt_verbose ? trace_scope() : nullptr; + return v8_flags.trace_deopt_verbose ? trace_scope() : nullptr; } void TraceDeoptBegin(int optimization_id, BytecodeOffset bytecode_offset); void TraceDeoptEnd(double deopt_duration); diff --git a/deps/v8/src/deoptimizer/ppc/deoptimizer-ppc.cc b/deps/v8/src/deoptimizer/ppc/deoptimizer-ppc.cc index 63843b326182a3..c4c9bffec69565 100644 --- a/deps/v8/src/deoptimizer/ppc/deoptimizer-ppc.cc +++ b/deps/v8/src/deoptimizer/ppc/deoptimizer-ppc.cc @@ -35,7 +35,7 @@ void FrameDescription::SetCallerFp(unsigned offset, intptr_t value) { } void FrameDescription::SetCallerConstantPool(unsigned offset, intptr_t value) { - DCHECK(FLAG_enable_embedded_constant_pool); + DCHECK(V8_EMBEDDED_CONSTANT_POOL_BOOL); SetFrameSlot(offset, value); } diff --git a/deps/v8/src/deoptimizer/translated-state.cc b/deps/v8/src/deoptimizer/translated-state.cc index 511d4ba8ca150d..2bd0f2394ebb82 100644 --- a/deps/v8/src/deoptimizer/translated-state.cc +++ b/deps/v8/src/deoptimizer/translated-state.cc @@ -145,6 +145,22 @@ void TranslationArrayPrintSingleFrame( break; } + case TranslationOpcode::SIGNED_BIGINT64_REGISTER: { + DCHECK_EQ(TranslationOpcodeOperandCount(opcode), 1); + int reg_code = iterator.NextUnsigned(); + os << "{input=" << converter.NameOfCPURegister(reg_code) + << " (signed bigint64)}"; + break; + } + + case TranslationOpcode::UNSIGNED_BIGINT64_REGISTER: { + DCHECK_EQ(TranslationOpcodeOperandCount(opcode), 1); + int reg_code = iterator.NextUnsigned(); + os << "{input=" << converter.NameOfCPURegister(reg_code) + << " (unsigned bigint64)}"; + break; + } + case TranslationOpcode::UINT32_REGISTER: { DCHECK_EQ(TranslationOpcodeOperandCount(opcode), 1); int reg_code = iterator.NextUnsigned(); @@ -195,6 +211,20 @@ void TranslationArrayPrintSingleFrame( break; } + case TranslationOpcode::SIGNED_BIGINT64_STACK_SLOT: { + DCHECK_EQ(TranslationOpcodeOperandCount(opcode), 1); + int input_slot_index = iterator.Next(); + os << "{input=" << input_slot_index << " (signed bigint64)}"; + break; + } + + case TranslationOpcode::UNSIGNED_BIGINT64_STACK_SLOT: { + DCHECK_EQ(TranslationOpcodeOperandCount(opcode), 1); + int input_slot_index = iterator.Next(); + os << "{input=" << input_slot_index << " (unsigned bigint64)}"; + break; + } + case TranslationOpcode::UINT32_STACK_SLOT: { DCHECK_EQ(TranslationOpcodeOperandCount(opcode), 1); int input_slot_index = iterator.Next(); @@ -329,6 +359,14 @@ TranslatedValue TranslatedValue::NewInt64ToBigInt(TranslatedState* container, return slot; } +// static +TranslatedValue TranslatedValue::NewUint64ToBigInt(TranslatedState* container, + uint64_t value) { + TranslatedValue slot(container, kUint64ToBigInt); + slot.uint64_value_ = value; + return slot; +} + // static TranslatedValue TranslatedValue::NewUInt32(TranslatedState* container, uint32_t value) { @@ -375,6 +413,11 @@ int64_t TranslatedValue::int64_value() const { return int64_value_; } +uint64_t TranslatedValue::uint64_value() const { + DCHECK(kUint64ToBigInt == kind()); + return uint64_value_; +} + uint32_t TranslatedValue::uint32_value() const { DCHECK(kind() == kUInt32 || kind() == kBoolBit); return uint32_value_; @@ -523,8 +566,8 @@ Handle<Object> TranslatedValue::GetValue() { // headers. // TODO(hpayer): Find a cleaner way to support a group of // non-fully-initialized objects. - isolate()->heap()->mark_compact_collector()->EnsureSweepingCompleted( - MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only); + isolate()->heap()->EnsureSweepingCompleted( + Heap::SweepingForcedFinalizationMode::kV8Only); // 2. Initialize the objects. If we have allocated only byte arrays // for some objects, we now overwrite the byte arrays with the @@ -547,6 +590,9 @@ Handle<Object> TranslatedValue::GetValue() { case TranslatedValue::kInt64ToBigInt: heap_object = BigInt::FromInt64(isolate(), int64_value()); break; + case TranslatedValue::kUint64ToBigInt: + heap_object = BigInt::FromUint64(isolate(), uint64_value()); + break; case TranslatedValue::kUInt32: number = uint32_value(); heap_object = isolate()->factory()->NewHeapNumber(number); @@ -562,7 +608,8 @@ Handle<Object> TranslatedValue::GetValue() { default: UNREACHABLE(); } - DCHECK(!IsSmiDouble(number) || kind() == TranslatedValue::kInt64ToBigInt); + DCHECK(!IsSmiDouble(number) || kind() == TranslatedValue::kInt64ToBigInt || + kind() == TranslatedValue::kUint64ToBigInt); set_initialized_storage(heap_object); return storage_; } @@ -868,6 +915,8 @@ TranslatedFrame TranslatedState::CreateNextTranslatedFrame( case TranslationOpcode::REGISTER: case TranslationOpcode::INT32_REGISTER: case TranslationOpcode::INT64_REGISTER: + case TranslationOpcode::SIGNED_BIGINT64_REGISTER: + case TranslationOpcode::UNSIGNED_BIGINT64_REGISTER: case TranslationOpcode::UINT32_REGISTER: case TranslationOpcode::BOOL_REGISTER: case TranslationOpcode::FLOAT_REGISTER: @@ -875,6 +924,8 @@ TranslatedFrame TranslatedState::CreateNextTranslatedFrame( case TranslationOpcode::STACK_SLOT: case TranslationOpcode::INT32_STACK_SLOT: case TranslationOpcode::INT64_STACK_SLOT: + case TranslationOpcode::SIGNED_BIGINT64_STACK_SLOT: + case TranslationOpcode::UNSIGNED_BIGINT64_STACK_SLOT: case TranslationOpcode::UINT32_STACK_SLOT: case TranslationOpcode::BOOL_STACK_SLOT: case TranslationOpcode::FLOAT_STACK_SLOT: @@ -1089,6 +1140,42 @@ int TranslatedState::CreateNextTranslatedValue( return translated_value.GetChildrenCount(); } + case TranslationOpcode::SIGNED_BIGINT64_REGISTER: { + int input_reg = iterator->NextUnsigned(); + if (registers == nullptr) { + TranslatedValue translated_value = TranslatedValue::NewInvalid(this); + frame.Add(translated_value); + return translated_value.GetChildrenCount(); + } + intptr_t value = registers->GetRegister(input_reg); + if (trace_file != nullptr) { + PrintF(trace_file, "%" V8PRIdPTR " ; %s (signed bigint64)", value, + converter.NameOfCPURegister(input_reg)); + } + TranslatedValue translated_value = + TranslatedValue::NewInt64ToBigInt(this, value); + frame.Add(translated_value); + return translated_value.GetChildrenCount(); + } + + case TranslationOpcode::UNSIGNED_BIGINT64_REGISTER: { + int input_reg = iterator->NextUnsigned(); + if (registers == nullptr) { + TranslatedValue translated_value = TranslatedValue::NewInvalid(this); + frame.Add(translated_value); + return translated_value.GetChildrenCount(); + } + intptr_t value = registers->GetRegister(input_reg); + if (trace_file != nullptr) { + PrintF(trace_file, "%" V8PRIdPTR " ; %s (unsigned bigint64)", value, + converter.NameOfCPURegister(input_reg)); + } + TranslatedValue translated_value = + TranslatedValue::NewUint64ToBigInt(this, value); + frame.Add(translated_value); + return translated_value.GetChildrenCount(); + } + case TranslationOpcode::UINT32_REGISTER: { int input_reg = iterator->NextUnsigned(); if (registers == nullptr) { @@ -1205,6 +1292,36 @@ int TranslatedState::CreateNextTranslatedValue( return translated_value.GetChildrenCount(); } + case TranslationOpcode::SIGNED_BIGINT64_STACK_SLOT: { + int slot_offset = + OptimizedFrame::StackSlotOffsetRelativeToFp(iterator->Next()); + uint64_t value = GetUInt64Slot(fp, slot_offset); + if (trace_file != nullptr) { + PrintF(trace_file, "%" V8PRIdPTR " ; (signed bigint64) [fp %c %3d] ", + static_cast<intptr_t>(value), slot_offset < 0 ? '-' : '+', + std::abs(slot_offset)); + } + TranslatedValue translated_value = + TranslatedValue::NewInt64ToBigInt(this, value); + frame.Add(translated_value); + return translated_value.GetChildrenCount(); + } + + case TranslationOpcode::UNSIGNED_BIGINT64_STACK_SLOT: { + int slot_offset = + OptimizedFrame::StackSlotOffsetRelativeToFp(iterator->Next()); + uint64_t value = GetUInt64Slot(fp, slot_offset); + if (trace_file != nullptr) { + PrintF(trace_file, "%" V8PRIdPTR " ; (unsigned bigint64) [fp %c %3d] ", + static_cast<intptr_t>(value), slot_offset < 0 ? '-' : '+', + std::abs(slot_offset)); + } + TranslatedValue translated_value = + TranslatedValue::NewUint64ToBigInt(this, value); + frame.Add(translated_value); + return translated_value.GetChildrenCount(); + } + case TranslationOpcode::UINT32_STACK_SLOT: { int slot_offset = OptimizedFrame::StackSlotOffsetRelativeToFp(iterator->Next()); @@ -1291,7 +1408,8 @@ int TranslatedState::CreateNextTranslatedValue( Address TranslatedState::DecompressIfNeeded(intptr_t value) { if (COMPRESS_POINTERS_BOOL) { - return DecompressTaggedAny(isolate(), static_cast<uint32_t>(value)); + return V8HeapCompressionScheme::DecompressTaggedAny( + isolate(), static_cast<uint32_t>(value)); } else { return value; } diff --git a/deps/v8/src/deoptimizer/translated-state.h b/deps/v8/src/deoptimizer/translated-state.h index 958e99fafde756..cac57ec8324278 100644 --- a/deps/v8/src/deoptimizer/translated-state.h +++ b/deps/v8/src/deoptimizer/translated-state.h @@ -73,6 +73,7 @@ class TranslatedValue { kInt32, kInt64, kInt64ToBigInt, + kUint64ToBigInt, kUInt32, kBoolBit, kFloat, @@ -111,6 +112,8 @@ class TranslatedValue { static TranslatedValue NewInt64(TranslatedState* container, int64_t value); static TranslatedValue NewInt64ToBigInt(TranslatedState* container, int64_t value); + static TranslatedValue NewUint64ToBigInt(TranslatedState* container, + uint64_t value); static TranslatedValue NewUInt32(TranslatedState* container, uint32_t value); static TranslatedValue NewBool(TranslatedState* container, uint32_t value); static TranslatedValue NewTagged(TranslatedState* container, Object literal); @@ -152,7 +155,9 @@ class TranslatedValue { uint32_t uint32_value_; // kind is kInt32. int32_t int32_value_; - // kind is kInt64. + // kind is kUint64ToBigInt. + uint64_t uint64_value_; + // kind is kInt64 or kInt64ToBigInt. int64_t int64_value_; // kind is kFloat Float32 float_value_; @@ -167,6 +172,7 @@ class TranslatedValue { int32_t int32_value() const; int64_t int64_value() const; uint32_t uint32_value() const; + uint64_t uint64_value() const; Float32 float_value() const; Float64 double_value() const; int object_length() const; diff --git a/deps/v8/src/deoptimizer/translation-array.cc b/deps/v8/src/deoptimizer/translation-array.cc index b8ab806821e91c..2a8b3dd2bbd096 100644 --- a/deps/v8/src/deoptimizer/translation-array.cc +++ b/deps/v8/src/deoptimizer/translation-array.cc @@ -33,7 +33,7 @@ TranslationArrayIterator::TranslationArrayIterator(TranslationArray buffer, int index) : buffer_(buffer), index_(index) { #ifdef V8_USE_ZLIB - if (V8_UNLIKELY(FLAG_turbo_compress_translation_arrays)) { + if (V8_UNLIKELY(v8_flags.turbo_compress_translation_arrays)) { const int size = buffer_.get_int(kUncompressedSizeOffset); uncompressed_contents_.insert(uncompressed_contents_.begin(), size, 0); @@ -50,12 +50,12 @@ TranslationArrayIterator::TranslationArrayIterator(TranslationArray buffer, return; } #endif // V8_USE_ZLIB - DCHECK(!FLAG_turbo_compress_translation_arrays); + DCHECK(!v8_flags.turbo_compress_translation_arrays); DCHECK(index >= 0 && index < buffer.length()); } int32_t TranslationArrayIterator::Next() { - if (V8_UNLIKELY(FLAG_turbo_compress_translation_arrays)) { + if (V8_UNLIKELY(v8_flags.turbo_compress_translation_arrays)) { return uncompressed_contents_[index_++]; } else { int32_t value = base::VLQDecode(buffer_.GetDataStartAddress(), &index_); @@ -65,7 +65,7 @@ int32_t TranslationArrayIterator::Next() { } uint32_t TranslationArrayIterator::NextUnsigned() { - if (V8_UNLIKELY(FLAG_turbo_compress_translation_arrays)) { + if (V8_UNLIKELY(v8_flags.turbo_compress_translation_arrays)) { return uncompressed_contents_[index_++]; } else { uint32_t value = @@ -76,7 +76,7 @@ uint32_t TranslationArrayIterator::NextUnsigned() { } bool TranslationArrayIterator::HasNext() const { - if (V8_UNLIKELY(FLAG_turbo_compress_translation_arrays)) { + if (V8_UNLIKELY(v8_flags.turbo_compress_translation_arrays)) { return index_ < static_cast<int>(uncompressed_contents_.size()); } else { return index_ < buffer_.length(); @@ -84,7 +84,7 @@ bool TranslationArrayIterator::HasNext() const { } void TranslationArrayBuilder::Add(int32_t value) { - if (V8_UNLIKELY(FLAG_turbo_compress_translation_arrays)) { + if (V8_UNLIKELY(v8_flags.turbo_compress_translation_arrays)) { contents_for_compression_.push_back(value); } else { base::VLQEncode(&contents_, value); @@ -93,7 +93,7 @@ void TranslationArrayBuilder::Add(int32_t value) { void TranslationArrayBuilder::AddOpcode(TranslationOpcode opcode) { static_assert(kNumTranslationOpcodes - 1 <= base::kDataMask); - if (V8_UNLIKELY(FLAG_turbo_compress_translation_arrays)) { + if (V8_UNLIKELY(v8_flags.turbo_compress_translation_arrays)) { contents_for_compression_.push_back(static_cast<byte>(opcode)); } else { contents_.push_back(static_cast<byte>(opcode)); @@ -102,7 +102,7 @@ void TranslationArrayBuilder::AddOpcode(TranslationOpcode opcode) { void TranslationArrayBuilder::AddRegister(Register reg) { static_assert(Register::kNumRegisters - 1 <= base::kDataMask); - if (V8_UNLIKELY(FLAG_turbo_compress_translation_arrays)) { + if (V8_UNLIKELY(v8_flags.turbo_compress_translation_arrays)) { contents_for_compression_.push_back(static_cast<byte>(reg.code())); } else { contents_.push_back(static_cast<byte>(reg.code())); @@ -111,7 +111,7 @@ void TranslationArrayBuilder::AddRegister(Register reg) { void TranslationArrayBuilder::AddFloatRegister(FloatRegister reg) { static_assert(FloatRegister::kNumRegisters - 1 <= base::kDataMask); - if (V8_UNLIKELY(FLAG_turbo_compress_translation_arrays)) { + if (V8_UNLIKELY(v8_flags.turbo_compress_translation_arrays)) { contents_for_compression_.push_back(static_cast<byte>(reg.code())); } else { contents_.push_back(static_cast<byte>(reg.code())); @@ -120,7 +120,7 @@ void TranslationArrayBuilder::AddFloatRegister(FloatRegister reg) { void TranslationArrayBuilder::AddDoubleRegister(DoubleRegister reg) { static_assert(DoubleRegister::kNumRegisters - 1 <= base::kDataMask); - if (V8_UNLIKELY(FLAG_turbo_compress_translation_arrays)) { + if (V8_UNLIKELY(v8_flags.turbo_compress_translation_arrays)) { contents_for_compression_.push_back(static_cast<byte>(reg.code())); } else { contents_.push_back(static_cast<byte>(reg.code())); @@ -130,7 +130,7 @@ void TranslationArrayBuilder::AddDoubleRegister(DoubleRegister reg) { Handle<TranslationArray> TranslationArrayBuilder::ToTranslationArray( Factory* factory) { #ifdef V8_USE_ZLIB - if (V8_UNLIKELY(FLAG_turbo_compress_translation_arrays)) { + if (V8_UNLIKELY(v8_flags.turbo_compress_translation_arrays)) { const int input_size = SizeInBytes(); uLongf compressed_data_size = compressBound(input_size); @@ -155,7 +155,7 @@ Handle<TranslationArray> TranslationArrayBuilder::ToTranslationArray( return result; } #endif - DCHECK(!FLAG_turbo_compress_translation_arrays); + DCHECK(!v8_flags.turbo_compress_translation_arrays); Handle<TranslationArray> result = factory->NewByteArray(SizeInBytes(), AllocationType::kOld); memcpy(result->GetDataStartAddress(), contents_.data(), @@ -287,6 +287,20 @@ void TranslationArrayBuilder::StoreInt64Register(Register reg) { DCHECK_EQ(TranslationOpcodeOperandCount(opcode), 1); } +void TranslationArrayBuilder::StoreSignedBigInt64Register(Register reg) { + auto opcode = TranslationOpcode::SIGNED_BIGINT64_REGISTER; + AddOpcode(opcode); + AddRegister(reg); + DCHECK_EQ(TranslationOpcodeOperandCount(opcode), 1); +} + +void TranslationArrayBuilder::StoreUnsignedBigInt64Register(Register reg) { + auto opcode = TranslationOpcode::UNSIGNED_BIGINT64_REGISTER; + AddOpcode(opcode); + AddRegister(reg); + DCHECK_EQ(TranslationOpcodeOperandCount(opcode), 1); +} + void TranslationArrayBuilder::StoreUint32Register(Register reg) { auto opcode = TranslationOpcode::UINT32_REGISTER; AddOpcode(opcode); @@ -334,6 +348,20 @@ void TranslationArrayBuilder::StoreInt64StackSlot(int index) { DCHECK_EQ(TranslationOpcodeOperandCount(opcode), 1); } +void TranslationArrayBuilder::StoreSignedBigInt64StackSlot(int index) { + auto opcode = TranslationOpcode::SIGNED_BIGINT64_STACK_SLOT; + AddOpcode(opcode); + Add(index); + DCHECK_EQ(TranslationOpcodeOperandCount(opcode), 1); +} + +void TranslationArrayBuilder::StoreUnsignedBigInt64StackSlot(int index) { + auto opcode = TranslationOpcode::UNSIGNED_BIGINT64_STACK_SLOT; + AddOpcode(opcode); + Add(index); + DCHECK_EQ(TranslationOpcodeOperandCount(opcode), 1); +} + void TranslationArrayBuilder::StoreUint32StackSlot(int index) { auto opcode = TranslationOpcode::UINT32_STACK_SLOT; AddOpcode(opcode); diff --git a/deps/v8/src/deoptimizer/translation-array.h b/deps/v8/src/deoptimizer/translation-array.h index f25f22c7bb5cdd..2dee8e86c225c9 100644 --- a/deps/v8/src/deoptimizer/translation-array.h +++ b/deps/v8/src/deoptimizer/translation-array.h @@ -90,6 +90,8 @@ class TranslationArrayBuilder { void StoreRegister(Register reg); void StoreInt32Register(Register reg); void StoreInt64Register(Register reg); + void StoreSignedBigInt64Register(Register reg); + void StoreUnsignedBigInt64Register(Register reg); void StoreUint32Register(Register reg); void StoreBoolRegister(Register reg); void StoreFloatRegister(FloatRegister reg); @@ -97,6 +99,8 @@ class TranslationArrayBuilder { void StoreStackSlot(int index); void StoreInt32StackSlot(int index); void StoreInt64StackSlot(int index); + void StoreSignedBigInt64StackSlot(int index); + void StoreUnsignedBigInt64StackSlot(int index); void StoreUint32StackSlot(int index); void StoreBoolStackSlot(int index); void StoreFloatStackSlot(int index); @@ -113,12 +117,12 @@ class TranslationArrayBuilder { void AddDoubleRegister(DoubleRegister reg); int Size() const { - return V8_UNLIKELY(FLAG_turbo_compress_translation_arrays) + return V8_UNLIKELY(v8_flags.turbo_compress_translation_arrays) ? static_cast<int>(contents_for_compression_.size()) : static_cast<int>(contents_.size()); } int SizeInBytes() const { - return V8_UNLIKELY(FLAG_turbo_compress_translation_arrays) + return V8_UNLIKELY(v8_flags.turbo_compress_translation_arrays) ? Size() * kInt32Size : Size(); } diff --git a/deps/v8/src/deoptimizer/translation-opcode.h b/deps/v8/src/deoptimizer/translation-opcode.h index 32730525abe9dc..1f83738ec4b252 100644 --- a/deps/v8/src/deoptimizer/translation-opcode.h +++ b/deps/v8/src/deoptimizer/translation-opcode.h @@ -30,6 +30,10 @@ namespace internal { V(INT32_STACK_SLOT, 1) \ V(INT64_REGISTER, 1) \ V(INT64_STACK_SLOT, 1) \ + V(SIGNED_BIGINT64_REGISTER, 1) \ + V(SIGNED_BIGINT64_STACK_SLOT, 1) \ + V(UNSIGNED_BIGINT64_REGISTER, 1) \ + V(UNSIGNED_BIGINT64_STACK_SLOT, 1) \ V(INTERPRETED_FRAME, 5) \ V(JAVA_SCRIPT_BUILTIN_CONTINUATION_FRAME, 3) \ V(JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH_FRAME, 3) \ diff --git a/deps/v8/src/diagnostics/code-tracer.h b/deps/v8/src/diagnostics/code-tracer.h index 801e09c8023533..ecd54c41b1685f 100644 --- a/deps/v8/src/diagnostics/code-tracer.h +++ b/deps/v8/src/diagnostics/code-tracer.h @@ -26,8 +26,8 @@ class CodeTracer final : public Malloced { return; } - if (FLAG_redirect_code_traces_to != nullptr) { - base::StrNCpy(filename_, FLAG_redirect_code_traces_to, + if (v8_flags.redirect_code_traces_to != nullptr) { + base::StrNCpy(filename_, v8_flags.redirect_code_traces_to, filename_.length()); } else if (isolate_id >= 0) { base::SNPrintF(filename_, "code-%d-%d.asm", @@ -102,7 +102,7 @@ class CodeTracer final : public Malloced { FILE* file() const { return file_; } private: - static bool ShouldRedirect() { return FLAG_redirect_code_traces; } + static bool ShouldRedirect() { return v8_flags.redirect_code_traces; } base::EmbeddedVector<char, 128> filename_; FILE* file_; diff --git a/deps/v8/src/diagnostics/disassembler.cc b/deps/v8/src/diagnostics/disassembler.cc index b31a02283b9d82..dad22ba046e3a2 100644 --- a/deps/v8/src/diagnostics/disassembler.cc +++ b/deps/v8/src/diagnostics/disassembler.cc @@ -275,16 +275,6 @@ static void PrintRelocInfo(std::ostringstream& out, Isolate* isolate, relocinfo->wasm_stub_call_address())); out << " ;; wasm stub: " << runtime_stub_name; #endif // V8_ENABLE_WEBASSEMBLY - } else if (RelocInfo::IsRuntimeEntry(rmode) && isolate != nullptr) { - // A runtime entry relocinfo might be a deoptimization bailout. - Address addr = relocinfo->target_address(); - DeoptimizeKind type; - if (Deoptimizer::IsDeoptimizationEntry(isolate, addr, &type)) { - out << " ;; " << Deoptimizer::MessageFor(type) - << " deoptimization bailout"; - } else { - out << " ;; " << RelocInfo::RelocModeName(rmode); - } } else { out << " ;; " << RelocInfo::RelocModeName(rmode); } @@ -326,8 +316,7 @@ static int DecodeIt(Isolate* isolate, ExternalReferenceEncoder* ref_encoder, } else if (!rit.done() && rit.rinfo()->pc() == reinterpret_cast<Address>(pc) && (rit.rinfo()->rmode() == RelocInfo::INTERNAL_REFERENCE || - rit.rinfo()->rmode() == RelocInfo::LITERAL_CONSTANT || - rit.rinfo()->rmode() == RelocInfo::DATA_EMBEDDED_OBJECT)) { + rit.rinfo()->rmode() == RelocInfo::LITERAL_CONSTANT)) { // raw pointer embedded in code stream, e.g., jump table byte* ptr = base::ReadUnalignedValue<byte*>(reinterpret_cast<Address>(pc)); @@ -376,7 +365,8 @@ static int DecodeIt(Isolate* isolate, ExternalReferenceEncoder* ref_encoder, } // Instruction address and instruction offset. - if (FLAG_log_colour && reinterpret_cast<Address>(prev_pc) == current_pc) { + if (v8_flags.log_colour && + reinterpret_cast<Address>(prev_pc) == current_pc) { // If this is the given "current" pc, make it yellow and bold. out << "\033[33;1m"; } @@ -432,7 +422,8 @@ static int DecodeIt(Isolate* isolate, ExternalReferenceEncoder* ref_encoder, } } - if (FLAG_log_colour && reinterpret_cast<Address>(prev_pc) == current_pc) { + if (v8_flags.log_colour && + reinterpret_cast<Address>(prev_pc) == current_pc) { out << "\033[m"; } @@ -451,7 +442,7 @@ static int DecodeIt(Isolate* isolate, ExternalReferenceEncoder* ref_encoder, int Disassembler::Decode(Isolate* isolate, std::ostream& os, byte* begin, byte* end, CodeReference code, Address current_pc) { - DCHECK_WITH_MSG(FLAG_text_is_readable, + DCHECK_WITH_MSG(v8_flags.text_is_readable, "Builtins disassembly requires a readable .text section"); V8NameConverter v8NameConverter(isolate, code); if (isolate) { diff --git a/deps/v8/src/diagnostics/etw-jit-win.cc b/deps/v8/src/diagnostics/etw-jit-win.cc index 8d9719f3ede480..39395f3ea18a16 100644 --- a/deps/v8/src/diagnostics/etw-jit-win.cc +++ b/deps/v8/src/diagnostics/etw-jit-win.cc @@ -3,8 +3,6 @@ // found in the LICENSE file. #include "src/diagnostics/etw-jit-win.h" -#include <atomic> - #include "include/v8-callbacks.h" #include "include/v8-isolate.h" #include "include/v8-local-handle.h" @@ -36,7 +34,7 @@ namespace { class IsolateLoadScriptData { public: - IsolateLoadScriptData(Isolate* isolate) : isolate_(isolate) {} + explicit IsolateLoadScriptData(Isolate* isolate) : isolate_(isolate) {} virtual ~IsolateLoadScriptData() { // When this is destroyed, it is because Isolate is being destroyed // also in Isolate::Deinit, that while already cancel all cancellable @@ -51,7 +49,7 @@ class IsolateLoadScriptData { static void LogIsolatePendingLogs(Isolate* isolate); static void UpdateAllIsolates(bool etw_enabled); static bool MaybeAddLoadedScript(Isolate* isolate, int script_id); - static void EnableLog(Isolate* isolate, bool only_if_pending); + static void EnableLog(Isolate* isolate); static void DisableLog(Isolate* isolate); private: @@ -73,8 +71,7 @@ class IsolateLoadScriptData { } auto v8_isolate = reinterpret_cast<v8::Isolate*>(isolate_); Isolate* isolate = isolate_; - auto task = - MakeCancelableTask(isolate_, [isolate] { EnableLog(isolate, false); }); + auto task = MakeCancelableTask(isolate_, [isolate] { EnableLog(isolate); }); pending_log_task_id_ = task->id(); auto taskrunner = V8::GetCurrentPlatform()->GetForegroundTaskRunner(v8_isolate); @@ -126,7 +123,7 @@ void IsolateLoadScriptData::RemoveIsolate(Isolate* isolate) { isolate_map.Pointer()->erase(isolate); } -void IsolateLoadScriptData::EnableLog(Isolate* isolate, bool only_if_pending) { +void IsolateLoadScriptData::EnableLog(Isolate* isolate) { bool has_pending_log = false; { base::MutexGuard guard(isolates_mutex.Pointer()); @@ -136,15 +133,11 @@ void IsolateLoadScriptData::EnableLog(Isolate* isolate, bool only_if_pending) { data.CancelPendingLog(); } } - if (only_if_pending && !has_pending_log) { - return; - } - auto v8_isolate = reinterpret_cast<v8::Isolate*>(isolate); + // This cannot be done while isolate_mutex is locked, as it can call // EventHandler while in the call for all the existing code. - v8_isolate->SetJitCodeEventHandler( - has_pending_log ? kJitCodeEventEnumExisting : kJitCodeEventDefault, - EventHandler); + isolate->v8_file_logger()->SetEtwCodeEventHandler( + has_pending_log ? kJitCodeEventEnumExisting : kJitCodeEventDefault); } void IsolateLoadScriptData::DisableLog(Isolate* isolate) { @@ -156,8 +149,7 @@ void IsolateLoadScriptData::DisableLog(Isolate* isolate) { } data.RemoveAllLoadedScripts(); } - auto v8_isolate = reinterpret_cast<v8::Isolate*>(isolate); - v8_isolate->SetJitCodeEventHandler(kJitCodeEventDefault, EventHandler); + isolate->v8_file_logger()->ResetEtwCodeEventHandler(); } void IsolateLoadScriptData::UpdateAllIsolates(bool etw_enabled) { @@ -186,6 +178,10 @@ bool IsolateLoadScriptData::MaybeAddLoadedScript(Isolate* isolate, } // namespace +void EnableETWLog(Isolate* isolate) { + IsolateLoadScriptData::EnableLog(isolate); +} + // TODO(v8/11911): UnboundScript::GetLineNumber should be replaced SharedFunctionInfo GetSharedFunctionInfo(const JitCodeEvent* event) { return event->script.IsEmpty() ? SharedFunctionInfo() @@ -237,11 +233,8 @@ std::wstring GetScriptMethodName(const JitCodeEvent* event) { : GetScriptMethodNameFromSharedFunctionInfo(sfi); } -void MaybeSetHandlerNow(Isolate* isolate) { - IsolateLoadScriptData::EnableLog(isolate, true); -} - void UpdateETWEnabled(bool enabled) { + DCHECK(v8_flags.enable_etw_stack_walking); if (enabled == is_etw_enabled) { return; } @@ -258,6 +251,7 @@ void WINAPI ETWEnableCallback(LPCGUID /* source_id */, ULONG is_enabled, ULONGLONG match_all_keyword, PEVENT_FILTER_DESCRIPTOR /* filter_data */, PVOID /* callback_context */) { + DCHECK(v8_flags.enable_etw_stack_walking); bool is_etw_enabled_now = is_enabled && level >= kTraceLevel && (match_any_keyword & kJScriptRuntimeKeyword) && @@ -337,7 +331,6 @@ void EventHandler(const JitCodeEvent* event) { Field("MethodAddressRangeID", TlgInUINT16), Field("SourceID", TlgInUINT64), Field("Line", TlgInUINT32), Field("Column", TlgInUINT32), Field("MethodName", TlgInUNICODESTRING)); - LogEventData(g_v8Provider, &method_load_event_meta, &method_load_event_fields, script_context, event->code_start, (uint64_t)event->code_len, (uint32_t)0, // MethodId diff --git a/deps/v8/src/diagnostics/etw-jit-win.h b/deps/v8/src/diagnostics/etw-jit-win.h index 2d123eb28eb837..324b6ad1c2efa6 100644 --- a/deps/v8/src/diagnostics/etw-jit-win.h +++ b/deps/v8/src/diagnostics/etw-jit-win.h @@ -5,6 +5,10 @@ #ifndef V8_DIAGNOSTICS_ETW_JIT_WIN_H_ #define V8_DIAGNOSTICS_ETW_JIT_WIN_H_ +#include <atomic> + +#include "include/v8config.h" + namespace v8 { class Isolate; @@ -12,12 +16,19 @@ struct JitCodeEvent; namespace internal { namespace ETWJITInterface { +extern std::atomic<bool> is_etw_enabled; + void Register(); void Unregister(); void AddIsolate(Isolate* isolate); void RemoveIsolate(Isolate* isolate); void EventHandler(const v8::JitCodeEvent* event); -void MaybeSetHandlerNow(Isolate* isolate); +void EnableETWLog(Isolate* isolate); +V8_INLINE void MaybeSetHandlerNow(Isolate* isolate) { + if (is_etw_enabled) { + EnableETWLog(isolate); + } +} } // namespace ETWJITInterface } // namespace internal } // namespace v8 diff --git a/deps/v8/src/diagnostics/gdb-jit.cc b/deps/v8/src/diagnostics/gdb-jit.cc index 4f8a027a39381b..ee087fac171b4f 100644 --- a/deps/v8/src/diagnostics/gdb-jit.cc +++ b/deps/v8/src/diagnostics/gdb-jit.cc @@ -1986,7 +1986,7 @@ static void AddJITCodeEntry(CodeMap* map, const base::AddressRegion region, const char* name_hint) { #if defined(DEBUG) && !V8_OS_WIN static int file_num = 0; - if (FLAG_gdbjit_dump && dump_if_enabled) { + if (v8_flags.gdbjit_dump && dump_if_enabled) { static const int kMaxFileNameSize = 64; char file_name[64]; @@ -2014,7 +2014,7 @@ static void AddCode(const char* name, base::AddressRegion region, CodeMap* code_map = GetCodeMap(); RemoveJITCodeEntries(code_map, region); - if (!FLAG_gdbjit_full && !code_desc.IsLineInfoAvailable()) { + if (!v8_flags.gdbjit_full && !code_desc.IsLineInfoAvailable()) { delete lineinfo; return; } @@ -2026,12 +2026,12 @@ static void AddCode(const char* name, base::AddressRegion region, const char* name_hint = nullptr; bool should_dump = false; - if (FLAG_gdbjit_dump) { - if (strlen(FLAG_gdbjit_dump_filter) == 0) { + if (v8_flags.gdbjit_dump) { + if (strlen(v8_flags.gdbjit_dump_filter) == 0) { name_hint = name; should_dump = true; } else if (name != nullptr) { - name_hint = strstr(name, FLAG_gdbjit_dump_filter); + name_hint = strstr(name, v8_flags.gdbjit_dump_filter); should_dump = (name_hint != nullptr); } } @@ -2039,7 +2039,7 @@ static void AddCode(const char* name, base::AddressRegion region, } void EventHandler(const v8::JitCodeEvent* event) { - if (!FLAG_gdbjit) return; + if (!v8_flags.gdbjit) return; if ((event->code_type != v8::JitCodeEvent::JIT_CODE) && (event->code_type != v8::JitCodeEvent::WASM_CODE)) { return; diff --git a/deps/v8/src/diagnostics/objects-debug.cc b/deps/v8/src/diagnostics/objects-debug.cc index 3a57608e3061e2..0db2d9e2438122 100644 --- a/deps/v8/src/diagnostics/objects-debug.cc +++ b/deps/v8/src/diagnostics/objects-debug.cc @@ -46,6 +46,7 @@ #ifdef V8_INTL_SUPPORT #include "src/objects/js-date-time-format-inl.h" #include "src/objects/js-display-names-inl.h" +#include "src/objects/js-duration-format-inl.h" #endif // V8_INTL_SUPPORT #include "src/objects/js-generator-inl.h" #ifdef V8_INTL_SUPPORT @@ -63,6 +64,7 @@ #include "src/objects/js-segmenter-inl.h" #include "src/objects/js-segments-inl.h" #endif // V8_INTL_SUPPORT +#include "src/objects/js-raw-json-inl.h" #include "src/objects/js-shared-array-inl.h" #include "src/objects/js-struct-inl.h" #include "src/objects/js-temporal-objects-inl.h" @@ -1098,9 +1100,9 @@ void CodeDataContainer::CodeDataContainerVerify(Isolate* isolate) { CHECK_EQ(code.kind(), kind()); CHECK_EQ(code.builtin_id(), builtin_id()); if (V8_REMOVE_BUILTINS_CODE_OBJECTS) { - // When FLAG_interpreted_frames_native_stack is enabled each interpreted - // function gets its own copy of the InterpreterEntryTrampoline. - // Thus, there could be Code'ful builtins. + // When v8_flags.interpreted_frames_native_stack is enabled each + // interpreted function gets its own copy of the + // InterpreterEntryTrampoline. Thus, there could be Code'ful builtins. CHECK_IMPLIES(isolate->embedded_blob_code() && is_off_heap_trampoline(), builtin_id() == Builtin::kInterpreterEntryTrampoline); } @@ -1299,7 +1301,7 @@ void JSSharedArray::JSSharedArrayVerify(Isolate* isolate) { void WeakCell::WeakCellVerify(Isolate* isolate) { CHECK(IsWeakCell()); - CHECK(target().IsJSReceiver() || target().IsUndefined(isolate)); + CHECK(target().IsUndefined(isolate) || target().CanBeHeldWeakly()); CHECK(prev().IsWeakCell() || prev().IsUndefined(isolate)); if (prev().IsWeakCell()) { @@ -1327,7 +1329,7 @@ void WeakCell::WeakCellVerify(Isolate* isolate) { void JSWeakRef::JSWeakRefVerify(Isolate* isolate) { CHECK(IsJSWeakRef()); JSObjectVerify(isolate); - CHECK(target().IsUndefined(isolate) || target().IsJSReceiver()); + CHECK(target().IsUndefined(isolate) || target().CanBeHeldWeakly()); } void JSFinalizationRegistry::JSFinalizationRegistryVerify(Isolate* isolate) { @@ -1926,7 +1928,7 @@ void Script::ScriptVerify(Isolate* isolate) { void NormalizedMapCache::NormalizedMapCacheVerify(Isolate* isolate) { WeakFixedArray::cast(*this).WeakFixedArrayVerify(isolate); - if (FLAG_enable_slow_asserts) { + if (v8_flags.enable_slow_asserts) { for (int i = 0; i < length(); i++) { MaybeObject e = WeakFixedArray::Get(i); HeapObject heap_object; diff --git a/deps/v8/src/diagnostics/objects-printer.cc b/deps/v8/src/diagnostics/objects-printer.cc index 55270dfe2ba5b7..ce4d15b2c27e14 100644 --- a/deps/v8/src/diagnostics/objects-printer.cc +++ b/deps/v8/src/diagnostics/objects-printer.cc @@ -413,7 +413,7 @@ void PrintTypedArrayElements(std::ostream& os, const ElementType* data_ptr, size_t length, bool is_on_heap) { if (length == 0) return; size_t previous_index = 0; - if (i::FLAG_mock_arraybuffer_allocator && !is_on_heap) { + if (i::v8_flags.mock_arraybuffer_allocator && !is_on_heap) { // Don't try to print data that's not actually allocated. os << "\n 0-" << length << ": <mocked array buffer bytes>"; return; @@ -1269,10 +1269,6 @@ void FeedbackNexus::Print(std::ostream& os) { case FeedbackSlotKind::kStoreGlobalSloppy: case FeedbackSlotKind::kStoreGlobalStrict: case FeedbackSlotKind::kStoreInArrayLiteral: - case FeedbackSlotKind::kSetKeyedSloppy: - case FeedbackSlotKind::kSetKeyedStrict: - case FeedbackSlotKind::kSetNamedSloppy: - case FeedbackSlotKind::kSetNamedStrict: case FeedbackSlotKind::kDefineNamedOwn: { os << InlineCacheState2String(ic_state()); break; @@ -1306,6 +1302,24 @@ void FeedbackNexus::Print(std::ostream& os) { } break; } + case FeedbackSlotKind::kSetNamedSloppy: + case FeedbackSlotKind::kSetNamedStrict: + case FeedbackSlotKind::kSetKeyedSloppy: + case FeedbackSlotKind::kSetKeyedStrict: { + os << InlineCacheState2String(ic_state()); + if (ic_state() == InlineCacheState::MONOMORPHIC) { + os << "\n " << Brief(GetFeedback()) << ": "; + StoreHandler::PrintHandler(GetFeedbackExtra().GetHeapObjectOrSmi(), os); + } else if (ic_state() == InlineCacheState::POLYMORPHIC) { + WeakFixedArray array = + WeakFixedArray::cast(GetFeedback().GetHeapObject()); + for (int i = 0; i < array.length(); i += 2) { + os << "\n " << Brief(array.Get(i)) << ": "; + StoreHandler::PrintHandler(array.Get(i + 1).GetHeapObjectOrSmi(), os); + } + } + break; + } case FeedbackSlotKind::kBinaryOp: { os << "BinaryOp:" << GetBinaryOperationFeedback(); break; @@ -1319,7 +1333,6 @@ void FeedbackNexus::Print(std::ostream& os) { break; } case FeedbackSlotKind::kLiteral: - case FeedbackSlotKind::kTypeProfile: break; case FeedbackSlotKind::kJumpLoop: os << "JumpLoop"; @@ -1544,7 +1557,7 @@ void JSArrayBuffer::JSArrayBufferPrint(std::ostream& os) { if (is_detachable()) os << "\n - detachable"; if (was_detached()) os << "\n - detached"; if (is_shared()) os << "\n - shared"; - if (is_resizable()) os << "\n - resizable"; + if (is_resizable_by_js()) os << "\n - resizable_by_js"; JSObjectPrintBody(os, *this, !was_detached()); } @@ -1873,7 +1886,6 @@ void ArrayBoilerplateDescription::ArrayBoilerplateDescriptionPrint( void AsmWasmData::AsmWasmDataPrint(std::ostream& os) { PrintHeader(os, "AsmWasmData"); os << "\n - native module: " << Brief(managed_native_module()); - os << "\n - export_wrappers: " << Brief(export_wrappers()); os << "\n - uses bitset: " << uses_bitset().value(); os << "\n"; } @@ -1881,7 +1893,6 @@ void AsmWasmData::AsmWasmDataPrint(std::ostream& os) { void WasmTypeInfo::WasmTypeInfoPrint(std::ostream& os) { PrintHeader(os, "WasmTypeInfo"); os << "\n - type address: " << reinterpret_cast<void*>(native_type()); - // TODO(manoskouk): Print supertype info. os << "\n - supertypes: "; for (int i = 0; i < supertypes_length(); i++) { os << "\n - " << Brief(supertypes(i)); @@ -1923,7 +1934,8 @@ void WasmStruct::WasmStructPrint(std::ostream& os) { case wasm::kRtt: { Tagged_t raw = base::ReadUnalignedValue<Tagged_t>(field_address); #if V8_COMPRESS_POINTERS - Address obj = DecompressTaggedPointer(address(), raw); + Address obj = + V8HeapCompressionScheme::DecompressTaggedPointer(address(), raw); #else Address obj = raw; #endif @@ -2156,7 +2168,6 @@ void WasmModuleObject::WasmModuleObjectPrint(std::ostream& os) { PrintHeader(os, "WasmModuleObject"); os << "\n - module: " << module(); os << "\n - native module: " << native_module(); - os << "\n - export wrappers: " << Brief(export_wrappers()); os << "\n - script: " << Brief(script()); os << "\n"; } @@ -2400,6 +2411,12 @@ void JSTemporalCalendar::JSTemporalCalendarPrint(std::ostream& os) { JSObjectPrintHeader(os, *this, "JSTemporalCalendar"); JSObjectPrintBody(os, *this); } + +void JSRawJson::JSRawJsonPrint(std::ostream& os) { + JSObjectPrintHeader(os, *this, "JSRawJson"); + JSObjectPrintBody(os, *this); +} + #ifdef V8_INTL_SUPPORT void JSV8BreakIterator::JSV8BreakIteratorPrint(std::ostream& os) { JSObjectPrintHeader(os, *this, "JSV8BreakIterator"); @@ -2440,6 +2457,15 @@ void JSDisplayNames::JSDisplayNamesPrint(std::ostream& os) { JSObjectPrintBody(os, *this); } +void JSDurationFormat::JSDurationFormatPrint(std::ostream& os) { + JSObjectPrintHeader(os, *this, "JSDurationFormat"); + os << "\n - style_flags: " << style_flags(); + os << "\n - display_flags: " << display_flags(); + os << "\n - icu locale: " << Brief(icu_locale()); + os << "\n - icu number formatter: " << Brief(icu_number_formatter()); + JSObjectPrintBody(os, *this); +} + void JSListFormat::JSListFormatPrint(std::ostream& os) { JSObjectPrintHeader(os, *this, "JSListFormat"); os << "\n - locale: " << Brief(locale()); @@ -2931,8 +2957,8 @@ inline i::Object GetObjectFromRaw(void* object) { if (RoundDown<i::kPtrComprCageBaseAlignment>(object_ptr) == i::kNullAddress) { // Try to decompress pointer. i::Isolate* isolate = i::Isolate::Current(); - object_ptr = - i::DecompressTaggedAny(isolate, static_cast<i::Tagged_t>(object_ptr)); + object_ptr = i::V8HeapCompressionScheme::DecompressTaggedAny( + isolate, static_cast<i::Tagged_t>(object_ptr)); } #endif return i::Object(object_ptr); @@ -2943,14 +2969,17 @@ inline i::Object GetObjectFromRaw(void* object) { // // The following functions are used by our gdb macros. // +V8_DONT_STRIP_SYMBOL V8_EXPORT_PRIVATE extern i::Object _v8_internal_Get_Object(void* object) { return GetObjectFromRaw(object); } +V8_DONT_STRIP_SYMBOL V8_EXPORT_PRIVATE extern void _v8_internal_Print_Object(void* object) { GetObjectFromRaw(object).Print(); } +V8_DONT_STRIP_SYMBOL V8_EXPORT_PRIVATE extern void _v8_internal_Print_LoadHandler(void* object) { #ifdef OBJECT_PRINT i::StdoutStream os; @@ -2959,6 +2988,7 @@ V8_EXPORT_PRIVATE extern void _v8_internal_Print_LoadHandler(void* object) { #endif } +V8_DONT_STRIP_SYMBOL V8_EXPORT_PRIVATE extern void _v8_internal_Print_StoreHandler(void* object) { #ifdef OBJECT_PRINT i::StdoutStream os; @@ -2967,6 +2997,7 @@ V8_EXPORT_PRIVATE extern void _v8_internal_Print_StoreHandler(void* object) { #endif } +V8_DONT_STRIP_SYMBOL V8_EXPORT_PRIVATE extern void _v8_internal_Print_Code(void* object) { i::Address address = reinterpret_cast<i::Address>(object); i::Isolate* isolate = i::Isolate::Current(); @@ -3009,11 +3040,13 @@ V8_EXPORT_PRIVATE extern void _v8_internal_Print_Code(void* object) { #endif // ENABLE_DISASSEMBLER } +V8_DONT_STRIP_SYMBOL V8_EXPORT_PRIVATE extern void _v8_internal_Print_StackTrace() { i::Isolate* isolate = i::Isolate::Current(); isolate->PrintStack(stdout); } +V8_DONT_STRIP_SYMBOL V8_EXPORT_PRIVATE extern void _v8_internal_Print_TransitionTree(void* object) { i::Object o(GetObjectFromRaw(object)); if (!o.IsMap()) { diff --git a/deps/v8/src/diagnostics/perf-jit.cc b/deps/v8/src/diagnostics/perf-jit.cc index 3fde07b76ff8b6..ca5fc54a4b1895 100644 --- a/deps/v8/src/diagnostics/perf-jit.cc +++ b/deps/v8/src/diagnostics/perf-jit.cc @@ -143,7 +143,8 @@ void LinuxPerfJitLogger::OpenJitDumpFile() { // If --perf-prof-delete-file is given, unlink the file right after opening // it. This keeps the file handle to the file valid. This only works on Linux, // which is the only platform supported for --perf-prof anyway. - if (FLAG_perf_prof_delete_file) CHECK_EQ(0, unlink(perf_dump_name.begin())); + if (v8_flags.perf_prof_delete_file) + CHECK_EQ(0, unlink(perf_dump_name.begin())); marker_address_ = OpenMarkerFile(fd); if (marker_address_ == nullptr) return; @@ -216,7 +217,7 @@ void LinuxPerfJitLogger::LogRecordedBuffer( Handle<AbstractCode> abstract_code, MaybeHandle<SharedFunctionInfo> maybe_shared, const char* name, int length) { - if (FLAG_perf_basic_prof_only_functions) { + if (v8_flags.perf_basic_prof_only_functions) { CodeKind code_kind = abstract_code->kind(isolate_); if (code_kind != CodeKind::INTERPRETED_FUNCTION && code_kind != CodeKind::TURBOFAN && code_kind != CodeKind::MAGLEV && @@ -236,7 +237,7 @@ void LinuxPerfJitLogger::LogRecordedBuffer( // Debug info has to be emitted first. Handle<SharedFunctionInfo> shared; - if (FLAG_perf_prof && maybe_shared.ToHandle(&shared)) { + if (v8_flags.perf_prof && maybe_shared.ToHandle(&shared)) { // TODO(herhut): This currently breaks for js2wasm/wasm2js functions. if (code->kind() != CodeKind::JS_TO_WASM_FUNCTION && code->kind() != CodeKind::WASM_TO_JS_FUNCTION) { @@ -248,7 +249,7 @@ void LinuxPerfJitLogger::LogRecordedBuffer( uint8_t* code_pointer = reinterpret_cast<uint8_t*>(code->InstructionStart()); // Unwinding info comes right after debug info. - if (FLAG_perf_prof_unwinding_info) LogWriteUnwindingInfo(*code); + if (v8_flags.perf_prof_unwinding_info) LogWriteUnwindingInfo(*code); WriteJitCodeLoadEntry(code_pointer, code->InstructionSize(), code_name, length); @@ -261,7 +262,7 @@ void LinuxPerfJitLogger::LogRecordedBuffer(const wasm::WasmCode* code, if (perf_output_handle_ == nullptr) return; - if (FLAG_perf_prof_annotate_wasm) LogWriteDebugInfo(code); + if (v8_flags.perf_prof_annotate_wasm) LogWriteDebugInfo(code); WriteJitCodeLoadEntry(code->instructions().begin(), code->instructions().length(), name, length); diff --git a/deps/v8/src/diagnostics/unwinding-info-win64.cc b/deps/v8/src/diagnostics/unwinding-info-win64.cc index 37f02cbf1d2f59..767eb015ab1087 100644 --- a/deps/v8/src/diagnostics/unwinding-info-win64.cc +++ b/deps/v8/src/diagnostics/unwinding-info-win64.cc @@ -25,18 +25,18 @@ namespace v8 { namespace internal { namespace win64_unwindinfo { -bool CanEmitUnwindInfoForBuiltins() { return FLAG_win64_unwinding_info; } +bool CanEmitUnwindInfoForBuiltins() { return v8_flags.win64_unwinding_info; } bool CanRegisterUnwindInfoForNonABICompliantCodeRange() { - return !FLAG_jitless; + return !v8_flags.jitless; } bool RegisterUnwindInfoForExceptionHandlingOnly() { DCHECK(CanRegisterUnwindInfoForNonABICompliantCodeRange()); #if defined(V8_OS_WIN_ARM64) - return !FLAG_win64_unwinding_info; + return !v8_flags.win64_unwinding_info; #else - return !IsWindows8OrGreater() || !FLAG_win64_unwinding_info; + return !IsWindows8OrGreater() || !v8_flags.win64_unwinding_info; #endif } diff --git a/deps/v8/src/execution/arm64/simulator-arm64.cc b/deps/v8/src/execution/arm64/simulator-arm64.cc index de3c992ac9278e..adb1ef10410ad5 100644 --- a/deps/v8/src/execution/arm64/simulator-arm64.cc +++ b/deps/v8/src/execution/arm64/simulator-arm64.cc @@ -2693,27 +2693,6 @@ void Simulator::VisitDataProcessing2Source(Instruction* instr) { } } -// The algorithm used is described in section 8.2 of -// Hacker's Delight, by Henry S. Warren, Jr. -// It assumes that a right shift on a signed integer is an arithmetic shift. -static int64_t MultiplyHighSigned(int64_t u, int64_t v) { - uint64_t u0, v0, w0; - int64_t u1, v1, w1, w2, t; - - u0 = u & 0xFFFFFFFFLL; - u1 = u >> 32; - v0 = v & 0xFFFFFFFFLL; - v1 = v >> 32; - - w0 = u0 * v0; - t = u1 * v0 + (w0 >> 32); - w1 = t & 0xFFFFFFFFLL; - w2 = t >> 32; - w1 = u0 * v1 + w1; - - return u1 * v1 + w2 + (w1 >> 32); -} - void Simulator::VisitDataProcessing3Source(Instruction* instr) { int64_t result = 0; // Extract and sign- or zero-extend 32-bit arguments for widening operations. @@ -2748,7 +2727,13 @@ void Simulator::VisitDataProcessing3Source(Instruction* instr) { break; case SMULH_x: DCHECK_EQ(instr->Ra(), kZeroRegCode); - result = MultiplyHighSigned(xreg(instr->Rn()), xreg(instr->Rm())); + result = + base::bits::SignedMulHigh64(xreg(instr->Rn()), xreg(instr->Rm())); + break; + case UMULH_x: + DCHECK_EQ(instr->Ra(), kZeroRegCode); + result = + base::bits::UnsignedMulHigh64(xreg(instr->Rn()), xreg(instr->Rm())); break; default: UNIMPLEMENTED(); diff --git a/deps/v8/src/execution/frame-constants.h b/deps/v8/src/execution/frame-constants.h index 423f41bcdd10a6..34291940b8d9b1 100644 --- a/deps/v8/src/execution/frame-constants.h +++ b/deps/v8/src/execution/frame-constants.h @@ -55,14 +55,14 @@ class CommonFrameConstants : public AllStatic { static constexpr int kCallerSPOffset = kCallerPCOffset + 1 * kPCOnStackSize; // Fixed part of the frame consists of return address, caller fp, - // constant pool (if v8_flags.enable_embedded_constant_pool), context, and + // constant pool (if V8_EMBEDDED_CONSTANT_POOL_BOOL), context, and // function. CommonFrame::IterateExpressions assumes that kLastObjectOffset // is the last object pointer. static constexpr int kFixedFrameSizeAboveFp = kPCOnStackSize + kFPOnStackSize; static constexpr int kFixedSlotCountAboveFp = kFixedFrameSizeAboveFp / kSystemPointerSize; static constexpr int kCPSlotSize = - v8_flags.enable_embedded_constant_pool.value() ? kSystemPointerSize : 0; + V8_EMBEDDED_CONSTANT_POOL_BOOL ? kSystemPointerSize : 0; static constexpr int kCPSlotCount = kCPSlotSize / kSystemPointerSize; static constexpr int kConstantPoolOffset = kCPSlotSize ? -1 * kSystemPointerSize : 0; diff --git a/deps/v8/src/execution/frames.cc b/deps/v8/src/execution/frames.cc index 8e982f83628556..0ca10a21e190f5 100644 --- a/deps/v8/src/execution/frames.cc +++ b/deps/v8/src/execution/frames.cc @@ -607,7 +607,7 @@ void StackFrame::IteratePc(RootVisitor* v, Address* pc_address, Address pc = holder.InstructionStart(isolate_, old_pc) + pc_offset; // TODO(v8:10026): avoid replacing a signed pointer. PointerAuthentication::ReplacePC(pc_address, pc, kSystemPointerSize); - if (v8_flags.enable_embedded_constant_pool && constant_pool_address) { + if (V8_EMBEDDED_CONSTANT_POOL_BOOL && constant_pool_address) { *constant_pool_address = holder.constant_pool(); } } @@ -842,7 +842,7 @@ void ExitFrame::ComputeCallerState(State* state) const { state->pc_address = ResolveReturnAddressLocation( reinterpret_cast<Address*>(fp() + ExitFrameConstants::kCallerPCOffset)); state->callee_pc_address = nullptr; - if (v8_flags.enable_embedded_constant_pool) { + if (V8_EMBEDDED_CONSTANT_POOL_BOOL) { state->constant_pool_address = reinterpret_cast<Address*>( fp() + ExitFrameConstants::kConstantPoolOffset); } @@ -1109,8 +1109,8 @@ void VisitSpillSlot(Isolate* isolate, RootVisitor* v, if (!HAS_SMI_TAG(value) && value <= 0xffffffff) { // We don't need to update smi values or full pointers. was_compressed = true; - *spill_slot.location() = - DecompressTaggedPointer(cage_base, static_cast<Tagged_t>(value)); + *spill_slot.location() = V8HeapCompressionScheme::DecompressTaggedPointer( + cage_base, static_cast<Tagged_t>(value)); if (DEBUG_BOOL) { // Ensure that the spill slot contains correct heap object. HeapObject raw = HeapObject::cast(Object(*spill_slot.location())); @@ -1144,8 +1144,8 @@ void VisitSpillSlot(Isolate* isolate, RootVisitor* v, if (!HAS_SMI_TAG(compressed_value)) { was_compressed = slot_contents <= 0xFFFFFFFF; // We don't need to update smi values. - *spill_slot.location() = - DecompressTaggedPointer(cage_base, compressed_value); + *spill_slot.location() = V8HeapCompressionScheme::DecompressTaggedPointer( + cage_base, compressed_value); } } #endif @@ -1154,7 +1154,8 @@ void VisitSpillSlot(Isolate* isolate, RootVisitor* v, if (was_compressed) { // Restore compression. Generated code should be able to trust that // compressed spill slots remain compressed. - *spill_slot.location() = CompressTagged(*spill_slot.location()); + *spill_slot.location() = + V8HeapCompressionScheme::CompressTagged(*spill_slot.location()); } #endif } @@ -1469,14 +1470,27 @@ void MaglevFrame::Iterate(RootVisitor* v) const { // the stack guard in the prologue of the maglev function. This means that // we've set up the frame header, but not the spill slots yet. - // DCHECK the frame setup under the above assumption. Include one extra slot - // for the single argument into StackGuardWithGap, and another for the saved - // new.target register. - DCHECK_EQ(actual_frame_size, StandardFrameConstants::kFixedFrameSizeFromFp + - 2 * kSystemPointerSize); - DCHECK_EQ(isolate()->c_function(), - Runtime::FunctionForId(Runtime::kStackGuardWithGap)->entry); - DCHECK_EQ(maglev_safepoint_entry.num_pushed_registers(), 0); + if (v8_flags.maglev_ool_prologue) { + // DCHECK the frame setup under the above assumption. The + // MaglevOutOfLinePrologue builtin creates an INTERNAL frame for the + // StackGuardWithGap call (where extra slots and args are), so the MAGLEV + // frame itself is exactly kFixedFrameSizeFromFp. + DCHECK_EQ(actual_frame_size, + StandardFrameConstants::kFixedFrameSizeFromFp); + DCHECK_EQ(isolate()->c_function(), + Runtime::FunctionForId(Runtime::kStackGuardWithGap)->entry); + DCHECK_EQ(maglev_safepoint_entry.num_pushed_registers(), 0); + } else { + // DCHECK the frame setup under the above assumption. Include one extra + // slot for the single argument into StackGuardWithGap, and another for + // the saved new.target register. + DCHECK_EQ(actual_frame_size, + StandardFrameConstants::kFixedFrameSizeFromFp + + 2 * kSystemPointerSize); + DCHECK_EQ(isolate()->c_function(), + Runtime::FunctionForId(Runtime::kStackGuardWithGap)->entry); + DCHECK_EQ(maglev_safepoint_entry.num_pushed_registers(), 0); + } spill_slot_count = 0; tagged_slot_count = 0; } @@ -2705,14 +2719,6 @@ void WasmCompileLazyFrame::Iterate(RootVisitor* v) const { int func_index = GetFunctionIndex(); wasm::NativeModule* native_module = GetNativeModule(); - if (!native_module) { - // This GC was triggered by lazy compilation, because otherwise this frame - // would not be on the stack. The native module gets set on the stack after - // a successful compilation. The native module being nullptr means that - // compilation failed, and we don't have to preserve any references because - // the stack will get unwound immediately after the GC. - return; - } // Scan the spill slots of the parameter registers. Parameters in WebAssembly // get reordered such that first all value parameters get put into registers. @@ -2752,22 +2758,27 @@ void WasmCompileLazyFrame::Iterate(RootVisitor* v) const { } // Next we scan the slots of stack parameters. - wasm::WasmCode* wasm_code = native_module->GetCode(func_index); - uint32_t first_tagged_stack_slot = wasm_code->first_tagged_parameter_slot(); - uint32_t num_tagged_stack_slots = wasm_code->num_tagged_parameter_slots(); - - // Visit tagged parameters that have been passed to the function of this - // frame. Conceptionally these parameters belong to the parent frame. However, - // the exact count is only known by this frame (in the presence of tail calls, - // this information cannot be derived from the call site). - if (num_tagged_stack_slots > 0) { - FullObjectSlot tagged_parameter_base(&Memory<Address>(caller_sp())); - tagged_parameter_base += first_tagged_stack_slot; - FullObjectSlot tagged_parameter_limit = - tagged_parameter_base + num_tagged_stack_slots; - - v->VisitRootPointers(Root::kStackRoots, "stack parameter", - tagged_parameter_base, tagged_parameter_limit); + // If there is no code, then lazy compilation failed (which can happen with + // lazy validation). In that case, just do not scan parameters, which will + // never be used anyway because the stack will get unwound when returning to + // the CEntry stub. + if (wasm::WasmCode* wasm_code = native_module->GetCode(func_index)) { + uint32_t first_tagged_stack_slot = wasm_code->first_tagged_parameter_slot(); + uint32_t num_tagged_stack_slots = wasm_code->num_tagged_parameter_slots(); + + // Visit tagged parameters that have been passed to the function of this + // frame. Conceptionally these parameters belong to the parent frame. + // However, the exact count is only known by this frame (in the presence of + // tail calls, this information cannot be derived from the call site). + if (num_tagged_stack_slots > 0) { + FullObjectSlot tagged_parameter_base(&Memory<Address>(caller_sp())); + tagged_parameter_base += first_tagged_stack_slot; + FullObjectSlot tagged_parameter_limit = + tagged_parameter_base + num_tagged_stack_slots; + + v->VisitRootPointers(Root::kStackRoots, "stack parameter", + tagged_parameter_base, tagged_parameter_limit); + } } } #endif // V8_ENABLE_WEBASSEMBLY diff --git a/deps/v8/src/execution/isolate-inl.h b/deps/v8/src/execution/isolate-inl.h index 14dc36529097ce..00b5f82f606752 100644 --- a/deps/v8/src/execution/isolate-inl.h +++ b/deps/v8/src/execution/isolate-inl.h @@ -115,7 +115,7 @@ Object Isolate::VerifyBuiltinsResult(Object result) { // because that's the assumption in generated code (which might call this // builtin). if (!result.IsSmi()) { - DCHECK_EQ(result.ptr(), DecompressTaggedPointer( + DCHECK_EQ(result.ptr(), V8HeapCompressionScheme::DecompressTaggedPointer( this, static_cast<Tagged_t>(result.ptr()))); } #endif @@ -131,12 +131,12 @@ ObjectPair Isolate::VerifyBuiltinsResult(ObjectPair pair) { // because that's the assumption in generated code (which might call this // builtin). if (!HAS_SMI_TAG(pair.x)) { - DCHECK_EQ(pair.x, - DecompressTaggedPointer(this, static_cast<Tagged_t>(pair.x))); + DCHECK_EQ(pair.x, V8HeapCompressionScheme::DecompressTaggedPointer( + this, static_cast<Tagged_t>(pair.x))); } if (!HAS_SMI_TAG(pair.y)) { - DCHECK_EQ(pair.y, - DecompressTaggedPointer(this, static_cast<Tagged_t>(pair.y))); + DCHECK_EQ(pair.y, V8HeapCompressionScheme::DecompressTaggedPointer( + this, static_cast<Tagged_t>(pair.y))); } #endif // V8_COMPRESS_POINTERS #endif // V8_HOST_ARCH_64_BIT diff --git a/deps/v8/src/execution/isolate-utils-inl.h b/deps/v8/src/execution/isolate-utils-inl.h index 66337c27dfe3e9..161edbe2aaf91b 100644 --- a/deps/v8/src/execution/isolate-utils-inl.h +++ b/deps/v8/src/execution/isolate-utils-inl.h @@ -18,8 +18,8 @@ namespace internal { // Aliases for GetPtrComprCageBase when // V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE. Each Isolate has its own cage, whose // base address is also the Isolate root. -V8_INLINE constexpr Address GetIsolateRootAddress(Address on_heap_addr) { - return GetPtrComprCageBaseAddress(on_heap_addr); +V8_INLINE Address GetIsolateRootAddress(Address on_heap_addr) { + return V8HeapCompressionScheme::GetPtrComprCageBaseAddress(on_heap_addr); } V8_INLINE Address GetIsolateRootAddress(PtrComprCageBase cage_base) { diff --git a/deps/v8/src/execution/isolate.cc b/deps/v8/src/execution/isolate.cc index 2e4f402c66e31c..3d6717a8778433 100644 --- a/deps/v8/src/execution/isolate.cc +++ b/deps/v8/src/execution/isolate.cc @@ -94,6 +94,7 @@ #include "src/objects/slots.h" #include "src/objects/smi.h" #include "src/objects/source-text-module-inl.h" +#include "src/objects/string-set-inl.h" #include "src/objects/visitors.h" #include "src/profiler/heap-profiler.h" #include "src/profiler/tracing-cpu-profiler.h" @@ -983,8 +984,9 @@ void CaptureAsyncStackTrace(Isolate* isolate, Handle<JSPromise> promise, Builtin::kAsyncFunctionAwaitResolveClosure) || IsBuiltinFunction(isolate, reaction->fulfill_handler(), Builtin::kAsyncGeneratorAwaitResolveClosure) || - IsBuiltinFunction(isolate, reaction->fulfill_handler(), - Builtin::kAsyncGeneratorYieldResolveClosure)) { + IsBuiltinFunction( + isolate, reaction->fulfill_handler(), + Builtin::kAsyncGeneratorYieldWithAwaitResolveClosure)) { // Now peek into the handlers' AwaitContext to get to // the JSGeneratorObject for the async function. Handle<Context> context( @@ -1106,8 +1108,9 @@ void CaptureAsyncStackTrace(Isolate* isolate, CallSiteBuilder* builder) { Builtin::kAsyncFunctionAwaitResolveClosure) || IsBuiltinFunction(isolate, promise_reaction_job_task->handler(), Builtin::kAsyncGeneratorAwaitResolveClosure) || - IsBuiltinFunction(isolate, promise_reaction_job_task->handler(), - Builtin::kAsyncGeneratorYieldResolveClosure) || + IsBuiltinFunction( + isolate, promise_reaction_job_task->handler(), + Builtin::kAsyncGeneratorYieldWithAwaitResolveClosure) || IsBuiltinFunction(isolate, promise_reaction_job_task->handler(), Builtin::kAsyncFunctionAwaitRejectClosure) || IsBuiltinFunction(isolate, promise_reaction_job_task->handler(), @@ -3278,13 +3281,15 @@ Isolate* Isolate::GetProcessWideSharedIsolate(bool* created_shared_isolate) { DCHECK(HasFlagThatRequiresSharedHeap()); FATAL( "Build configuration does not support creating shared heap. The RO " - "heap must be shared, and pointer compression must either be off or " - "use a shared cage. V8 is compiled with RO heap %s and pointers %s.", + "heap must be shared, pointer compression must either be off or " + "use a shared cage, and write barriers must not be disabled. V8 is " + "compiled with RO heap %s, pointers %s and write barriers %s.", V8_SHARED_RO_HEAP_BOOL ? "SHARED" : "NOT SHARED", !COMPRESS_POINTERS_BOOL ? "NOT COMPRESSED" : (COMPRESS_POINTERS_IN_SHARED_CAGE_BOOL ? "COMPRESSED IN SHARED CAGE" - : "COMPRESSED IN PER-ISOLATE CAGE")); + : "COMPRESSED IN PER-ISOLATE CAGE"), + V8_DISABLE_WRITE_BARRIERS_BOOL ? "DISABLED" : "ENABLED"); } base::MutexGuard guard(process_wide_shared_isolate_mutex_.Pointer()); @@ -3582,7 +3587,7 @@ void Isolate::Deinit() { } // All client isolates should already be detached. - if (is_shared() || is_shared_space_isolate()) { + if (is_shared()) { global_safepoint()->AssertNoClientsOnTearDown(); } @@ -3626,6 +3631,9 @@ void Isolate::Deinit() { // At this point there are no more background threads left in this isolate. heap_.safepoint()->AssertMainThreadIsOnlyThread(); + // Tear down data using the shared heap before detaching. + heap_.TearDownWithSharedHeap(); + { // This isolate might have to park for a shared GC initiated by another // client isolate before it can actually detach from the shared isolate. @@ -3634,6 +3642,11 @@ void Isolate::Deinit() { DetachFromSharedSpaceIsolate(); } + // All client isolates should already be detached. + if (is_shared_space_isolate()) { + global_safepoint()->AssertNoClientsOnTearDown(); + } + // Since there are no other threads left, we can lock this mutex without any // ceremony. This signals to the tear down code that we are in a safepoint. base::RecursiveMutexGuard safepoint(&heap_.safepoint()->local_heaps_mutex_); @@ -4143,11 +4156,14 @@ bool Isolate::Init(SnapshotData* startup_snapshot_data, if (HasFlagThatRequiresSharedHeap() && v8_flags.shared_space) { if (process_wide_shared_space_isolate_) { - attach_to_shared_space_isolate = process_wide_shared_space_isolate_; + owns_shareable_data_ = false; } else { process_wide_shared_space_isolate_ = this; is_shared_space_isolate_ = true; + DCHECK(owns_shareable_data_); } + + attach_to_shared_space_isolate = process_wide_shared_space_isolate_; } CHECK_IMPLIES(is_shared_space_isolate_, V8_CAN_CREATE_SHARED_HEAP_BOOL); @@ -4230,8 +4246,9 @@ bool Isolate::Init(SnapshotData* startup_snapshot_data, // during deserialization. base::Optional<base::MutexGuard> clients_guard; - if (shared_isolate_) { - clients_guard.emplace(&shared_isolate_->global_safepoint()->clients_mutex_); + if (Isolate* isolate = + shared_isolate_ ? shared_isolate_ : attach_to_shared_space_isolate) { + clients_guard.emplace(&isolate->global_safepoint()->clients_mutex_); } // The main thread LocalHeap needs to be set up when attaching to the shared @@ -4240,6 +4257,11 @@ bool Isolate::Init(SnapshotData* startup_snapshot_data, AttachToSharedIsolate(); AttachToSharedSpaceIsolate(attach_to_shared_space_isolate); + // Ensure that we use at most one of shared_isolate() and + // shared_space_isolate(). + DCHECK_IMPLIES(shared_isolate(), !shared_space_isolate()); + DCHECK_IMPLIES(shared_space_isolate(), !shared_isolate()); + // SetUp the object heap. DCHECK(!heap_.HasBeenSetUp()); heap_.SetUp(main_thread_local_heap()); @@ -4252,9 +4274,11 @@ bool Isolate::Init(SnapshotData* startup_snapshot_data, string_forwarding_table_ = std::make_shared<StringForwardingTable>(this); } else { // Only refer to shared string table after attaching to the shared isolate. - DCHECK_NOT_NULL(shared_isolate()); - string_table_ = shared_isolate()->string_table_; - string_forwarding_table_ = shared_isolate()->string_forwarding_table_; + DCHECK(has_shared_heap()); + DCHECK(!is_shared()); + DCHECK(!is_shared_space_isolate()); + string_table_ = shared_heap_isolate()->string_table_; + string_forwarding_table_ = shared_heap_isolate()->string_forwarding_table_; } if (V8_SHORT_BUILTIN_CALLS_BOOL && v8_flags.short_builtin_calls) { @@ -4287,9 +4311,14 @@ bool Isolate::Init(SnapshotData* startup_snapshot_data, } #ifdef V8_EXTERNAL_CODE_SPACE if (heap_.code_range()) { - code_cage_base_ = GetPtrComprCageBaseAddress(heap_.code_range()->base()); + code_cage_base_ = ExternalCodeCompressionScheme::GetPtrComprCageBaseAddress( + heap_.code_range()->base()); } else { - code_cage_base_ = cage_base(); + CHECK(jitless_); + // In jitless mode the code space pages will be allocated in the main + // pointer compression cage. + code_cage_base_ = + ExternalCodeCompressionScheme::GetPtrComprCageBaseAddress(cage_base()); } #endif // V8_EXTERNAL_CODE_SPACE @@ -4301,9 +4330,9 @@ bool Isolate::Init(SnapshotData* startup_snapshot_data, isolate_data_.shared_external_pointer_table_ = new ExternalPointerTable(); shared_external_pointer_table().Init(this); } else { - DCHECK_NOT_NULL(shared_isolate()); + DCHECK(has_shared_heap()); isolate_data_.shared_external_pointer_table_ = - shared_isolate()->isolate_data_.shared_external_pointer_table_; + shared_heap_isolate()->isolate_data_.shared_external_pointer_table_; } #endif // V8_COMPRESS_POINTERS @@ -5946,6 +5975,7 @@ void Isolate::AttachToSharedIsolate() { if (shared_isolate_) { DCHECK(shared_isolate_->is_shared()); + DCHECK(!v8_flags.shared_space); shared_isolate_->global_safepoint()->AppendClient(this); } @@ -5958,6 +5988,7 @@ void Isolate::DetachFromSharedIsolate() { DCHECK(attached_to_shared_isolate_); if (shared_isolate_) { + DCHECK(!v8_flags.shared_space); shared_isolate_->global_safepoint()->RemoveClient(this); shared_isolate_ = nullptr; } @@ -5971,6 +6002,7 @@ void Isolate::AttachToSharedSpaceIsolate(Isolate* shared_space_isolate) { DCHECK(!shared_space_isolate_.has_value()); shared_space_isolate_ = shared_space_isolate; if (shared_space_isolate) { + DCHECK(v8_flags.shared_space); shared_space_isolate->global_safepoint()->AppendClient(this); } } @@ -5979,6 +6011,7 @@ void Isolate::DetachFromSharedSpaceIsolate() { DCHECK(shared_space_isolate_.has_value()); Isolate* shared_space_isolate = shared_space_isolate_.value(); if (shared_space_isolate) { + DCHECK(v8_flags.shared_space); shared_space_isolate->global_safepoint()->RemoveClient(this); } shared_space_isolate_.reset(); @@ -6000,6 +6033,49 @@ ExternalPointerHandle Isolate::GetOrCreateWaiterQueueNodeExternalPointer() { } #endif // V8_COMPRESS_POINTERS +void Isolate::LocalsBlockListCacheSet(Handle<ScopeInfo> scope_info, + Handle<ScopeInfo> outer_scope_info, + Handle<StringSet> locals_blocklist) { + Handle<EphemeronHashTable> cache; + if (heap()->locals_block_list_cache().IsEphemeronHashTable()) { + cache = handle(EphemeronHashTable::cast(heap()->locals_block_list_cache()), + this); + } else { + CHECK(heap()->locals_block_list_cache().IsUndefined()); + constexpr int kInitialCapacity = 8; + cache = EphemeronHashTable::New(this, kInitialCapacity); + } + DCHECK(cache->IsEphemeronHashTable()); + + Handle<Object> value; + if (!outer_scope_info.is_null()) { + value = factory()->NewTuple2(outer_scope_info, locals_blocklist, + AllocationType::kYoung); + } else { + value = locals_blocklist; + } + + CHECK(!value.is_null()); + cache = EphemeronHashTable::Put(cache, scope_info, value); + heap()->set_locals_block_list_cache(*cache); +} + +Object Isolate::LocalsBlockListCacheGet(Handle<ScopeInfo> scope_info) { + DisallowGarbageCollection no_gc; + + if (!heap()->locals_block_list_cache().IsEphemeronHashTable()) { + return ReadOnlyRoots(this).the_hole_value(); + } + + Object maybe_value = + EphemeronHashTable::cast(heap()->locals_block_list_cache()) + .Lookup(scope_info); + if (maybe_value.IsTuple2()) return Tuple2::cast(maybe_value).value2(); + + CHECK(maybe_value.IsStringSet() || maybe_value.IsTheHole()); + return maybe_value; +} + namespace { class DefaultWasmAsyncResolvePromiseTask : public v8::Task { public: diff --git a/deps/v8/src/execution/isolate.h b/deps/v8/src/execution/isolate.h index 0fd3597f0cb17d..c25ad662bf3579 100644 --- a/deps/v8/src/execution/isolate.h +++ b/deps/v8/src/execution/isolate.h @@ -529,7 +529,6 @@ using DebugObjectCache = std::vector<Handle<HeapObject>>; V(bool, formatting_stack_trace, false) \ /* Perform side effect checks on function call and API callbacks. */ \ V(DebugInfo::ExecutionMode, debug_execution_mode, DebugInfo::kBreakpoints) \ - V(debug::TypeProfileMode, type_profile_mode, debug::TypeProfileMode::kNone) \ V(bool, disable_bytecode_flushing, false) \ V(int, last_console_context_id, 0) \ V(v8_inspector::V8Inspector*, inspector, nullptr) \ @@ -1431,10 +1430,6 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory { return is_precise_count_code_coverage() || is_block_count_code_coverage(); } - bool is_collecting_type_profile() const { - return type_profile_mode() == debug::TypeProfileMode::kCollect; - } - // Collect feedback vectors with data for code coverage or type profile. // Reset the list, when both code coverage and type profile are not // needed anymore. This keeps many feedback vectors alive, but code @@ -1712,7 +1707,9 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory { // shared heap object cache holds objects in shared among Isolates. Otherwise // this object cache is per-Isolate like the startup object cache. std::vector<Object>* shared_heap_object_cache() { - if (shared_isolate()) return shared_isolate()->shared_heap_object_cache(); + if (has_shared_heap()) { + return &shared_heap_isolate()->shared_heap_object_cache_; + } return &shared_heap_object_cache_; } @@ -1996,10 +1993,29 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory { DCHECK(shared_isolate->is_shared()); DCHECK_NULL(shared_isolate_); DCHECK(!attached_to_shared_isolate_); + DCHECK(!v8_flags.shared_space); shared_isolate_ = shared_isolate; owns_shareable_data_ = false; } + // Returns true when this isolate supports allocation in shared spaces. + bool has_shared_heap() const { + return v8_flags.shared_space ? shared_space_isolate() : shared_isolate(); + } + + // Returns the isolate that owns the shared spaces. + Isolate* shared_heap_isolate() const { + DCHECK(has_shared_heap()); + Isolate* isolate = + v8_flags.shared_space ? shared_space_isolate() : shared_isolate(); + DCHECK_NOT_NULL(isolate); + return isolate; + } + + bool is_shared_heap_isolate() const { + return is_shared() || is_shared_space_isolate(); + } + GlobalSafepoint* global_safepoint() const { return global_safepoint_.get(); } bool owns_shareable_data() { return owns_shareable_data_; } @@ -2009,7 +2025,8 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory { // TODO(pthier): Unify with owns_shareable_data() once the flag // --shared-string-table is removed. bool OwnsStringTables() { - return !v8_flags.shared_string_table || is_shared(); + return !v8_flags.shared_string_table || is_shared() || + is_shared_space_isolate(); } #if USE_SIMULATOR @@ -2020,6 +2037,16 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory { wasm::StackMemory*& wasm_stacks() { return wasm_stacks_; } #endif + // Access to the global "locals block list cache". Caches outer-stack + // allocated variables per ScopeInfo for debug-evaluate. + // We also store a strong reference to the outer ScopeInfo to keep all + // blocklists along a scope chain alive. + void LocalsBlockListCacheSet(Handle<ScopeInfo> scope_info, + Handle<ScopeInfo> outer_scope_info, + Handle<StringSet> locals_blocklist); + // Returns either `TheHole` or `StringSet`. + Object LocalsBlockListCacheGet(Handle<ScopeInfo> scope_info); + private: explicit Isolate(std::unique_ptr<IsolateAllocator> isolate_allocator, bool is_shared); diff --git a/deps/v8/src/execution/local-isolate.cc b/deps/v8/src/execution/local-isolate.cc index aa30289ae6a293..ff3cfc99eff732 100644 --- a/deps/v8/src/execution/local-isolate.cc +++ b/deps/v8/src/execution/local-isolate.cc @@ -60,11 +60,6 @@ int LocalIsolate::GetNextUniqueSharedFunctionInfoId() { } #endif // V8_SFI_HAS_UNIQUE_ID -bool LocalIsolate::is_collecting_type_profile() const { - // TODO(leszeks): Figure out if it makes sense to check this asynchronously. - return isolate_->is_collecting_type_profile(); -} - // Used for lazy initialization, based on an assumption that most // LocalIsolates won't be used to parse any BigInt literals. void LocalIsolate::InitializeBigIntProcessor() { diff --git a/deps/v8/src/execution/local-isolate.h b/deps/v8/src/execution/local-isolate.h index 149b332ca752d6..f67bc7845295cd 100644 --- a/deps/v8/src/execution/local-isolate.h +++ b/deps/v8/src/execution/local-isolate.h @@ -106,8 +106,6 @@ class V8_EXPORT_PRIVATE LocalIsolate final : private HiddenLocalFactory { int GetNextUniqueSharedFunctionInfoId(); #endif // V8_SFI_HAS_UNIQUE_ID - bool is_collecting_type_profile() const; - // TODO(cbruni): rename this back to logger() once the V8FileLogger // refactoring is completed. LocalLogger* v8_file_logger() const { return logger_.get(); } diff --git a/deps/v8/src/execution/loong64/simulator-loong64.cc b/deps/v8/src/execution/loong64/simulator-loong64.cc index f15015fcb68cf3..9e5f13b1e1fdca 100644 --- a/deps/v8/src/execution/loong64/simulator-loong64.cc +++ b/deps/v8/src/execution/loong64/simulator-loong64.cc @@ -45,42 +45,6 @@ uint32_t get_fcsr_condition_bit(uint32_t cc) { } } -static int64_t MultiplyHighSigned(int64_t u, int64_t v) { - uint64_t u0, v0, w0; - int64_t u1, v1, w1, w2, t; - - u0 = u & 0xFFFFFFFFL; - u1 = u >> 32; - v0 = v & 0xFFFFFFFFL; - v1 = v >> 32; - - w0 = u0 * v0; - t = u1 * v0 + (w0 >> 32); - w1 = t & 0xFFFFFFFFL; - w2 = t >> 32; - w1 = u0 * v1 + w1; - - return u1 * v1 + w2 + (w1 >> 32); -} - -static uint64_t MultiplyHighUnsigned(uint64_t u, uint64_t v) { - uint64_t u0, v0, w0; - uint64_t u1, v1, w1, w2, t; - - u0 = u & 0xFFFFFFFFL; - u1 = u >> 32; - v0 = v & 0xFFFFFFFFL; - v1 = v >> 32; - - w0 = u0 * v0; - t = u1 * v0 + (w0 >> 32); - w1 = t & 0xFFFFFFFFL; - w2 = t >> 32; - w1 = u0 * v1 + w1; - - return u1 * v1 + w2 + (w1 >> 32); -} - #ifdef PRINT_SIM_LOG inline void printf_instr(const char* _Format, ...) { va_list varList; @@ -3793,13 +3757,13 @@ void Simulator::DecodeTypeOp17() { printf_instr("MULH_D\t %s: %016lx, %s, %016lx, %s, %016lx\n", Registers::Name(rd_reg()), rd(), Registers::Name(rj_reg()), rj(), Registers::Name(rk_reg()), rk()); - SetResult(rd_reg(), MultiplyHighSigned(rj(), rk())); + SetResult(rd_reg(), base::bits::SignedMulHigh64(rj(), rk())); break; case MULH_DU: printf_instr("MULH_DU\t %s: %016lx, %s, %016lx, %s, %016lx\n", Registers::Name(rd_reg()), rd(), Registers::Name(rj_reg()), rj(), Registers::Name(rk_reg()), rk()); - SetResult(rd_reg(), MultiplyHighUnsigned(rj_u(), rk_u())); + SetResult(rd_reg(), base::bits::UnsignedMulHigh64(rj_u(), rk_u())); break; case MULW_D_W: { printf_instr("MULW_D_W\t %s: %016lx, %s, %016lx, %s, %016lx\n", diff --git a/deps/v8/src/execution/mips64/simulator-mips64.cc b/deps/v8/src/execution/mips64/simulator-mips64.cc index b3149083ac8191..2dce655aede342 100644 --- a/deps/v8/src/execution/mips64/simulator-mips64.cc +++ b/deps/v8/src/execution/mips64/simulator-mips64.cc @@ -43,24 +43,6 @@ uint32_t get_fcsr_condition_bit(uint32_t cc) { } } -static int64_t MultiplyHighSigned(int64_t u, int64_t v) { - uint64_t u0, v0, w0; - int64_t u1, v1, w1, w2, t; - - u0 = u & 0xFFFFFFFFL; - u1 = u >> 32; - v0 = v & 0xFFFFFFFFL; - v1 = v >> 32; - - w0 = u0 * v0; - t = u1 * v0 + (w0 >> 32); - w1 = t & 0xFFFFFFFFL; - w2 = t >> 32; - w1 = u0 * v1 + w1; - - return u1 * v1 + w2 + (w1 >> 32); -} - // This macro provides a platform independent use of sscanf. The reason for // SScanF not being implemented in a platform independent was through // ::v8::internal::OS in the same way as base::SNPrintF is that the Windows C @@ -4090,14 +4072,14 @@ void Simulator::DecodeTypeRegisterSPECIAL() { case DMULT: // DMULT == D_MUL_MUH. if (kArchVariant != kMips64r6) { set_register(LO, rs() * rt()); - set_register(HI, MultiplyHighSigned(rs(), rt())); + set_register(HI, base::bits::SignedMulHigh64(rs(), rt())); } else { switch (sa()) { case MUL_OP: SetResult(rd_reg(), rs() * rt()); break; case MUH_OP: - SetResult(rd_reg(), MultiplyHighSigned(rs(), rt())); + SetResult(rd_reg(), base::bits::SignedMulHigh64(rs(), rt())); break; default: UNIMPLEMENTED_MIPS(); @@ -4106,7 +4088,12 @@ void Simulator::DecodeTypeRegisterSPECIAL() { } break; case DMULTU: - UNIMPLEMENTED_MIPS(); + if (kArchVariant != kMips64r6) { + set_register(LO, rs_u() * rt_u()); + set_register(HI, base::bits::UnsignedMulHigh64(rs_u(), rt_u())); + } else { + UNIMPLEMENTED_MIPS(); + } break; case DIV: case DDIV: { diff --git a/deps/v8/src/execution/ppc/frame-constants-ppc.cc b/deps/v8/src/execution/ppc/frame-constants-ppc.cc index 757d20249e66fa..45a53b07b6d0ae 100644 --- a/deps/v8/src/execution/ppc/frame-constants-ppc.cc +++ b/deps/v8/src/execution/ppc/frame-constants-ppc.cc @@ -16,7 +16,7 @@ namespace internal { Register JavaScriptFrame::fp_register() { return v8::internal::fp; } Register JavaScriptFrame::context_register() { return cp; } Register JavaScriptFrame::constant_pool_pointer_register() { - DCHECK(v8_flags.enable_embedded_constant_pool); + DCHECK(V8_EMBEDDED_CONSTANT_POOL_BOOL); return kConstantPoolRegister; } diff --git a/deps/v8/src/execution/ppc/frame-constants-ppc.h b/deps/v8/src/execution/ppc/frame-constants-ppc.h index 6c82a864fe57c8..352d309379d421 100644 --- a/deps/v8/src/execution/ppc/frame-constants-ppc.h +++ b/deps/v8/src/execution/ppc/frame-constants-ppc.h @@ -16,9 +16,9 @@ namespace internal { class EntryFrameConstants : public AllStatic { public: // Need to take constant pool into account. - static constexpr int kCallerFPOffset = - v8_flags.enable_embedded_constant_pool.value() ? -4 * kSystemPointerSize - : -3 * kSystemPointerSize; + static constexpr int kCallerFPOffset = V8_EMBEDDED_CONSTANT_POOL_BOOL + ? -4 * kSystemPointerSize + : -3 * kSystemPointerSize; }; class WasmCompileLazyFrameConstants : public TypedFrameConstants { diff --git a/deps/v8/src/execution/ppc/simulator-ppc.cc b/deps/v8/src/execution/ppc/simulator-ppc.cc index 0177f2b784dd8b..cf4c11d6cb038f 100644 --- a/deps/v8/src/execution/ppc/simulator-ppc.cc +++ b/deps/v8/src/execution/ppc/simulator-ppc.cc @@ -2584,6 +2584,32 @@ void Simulator::ExecuteGeneric(Instruction* instr) { } break; } + case MULHD: { + int rt = instr->RTValue(); + int ra = instr->RAValue(); + int rb = instr->RBValue(); + int64_t ra_val = get_register(ra); + int64_t rb_val = get_register(rb); + int64_t alu_out = base::bits::SignedMulHigh64(ra_val, rb_val); + set_register(rt, alu_out); + if (instr->Bit(0)) { // RC bit set + SetCR0(static_cast<intptr_t>(alu_out)); + } + break; + } + case MULHDU: { + int rt = instr->RTValue(); + int ra = instr->RAValue(); + int rb = instr->RBValue(); + uint64_t ra_val = get_register(ra); + uint64_t rb_val = get_register(rb); + uint64_t alu_out = base::bits::UnsignedMulHigh64(ra_val, rb_val); + set_register(rt, alu_out); + if (instr->Bit(0)) { // RC bit set + SetCR0(static_cast<intptr_t>(alu_out)); + } + break; + } case NEGX: { int rt = instr->RTValue(); int ra = instr->RAValue(); diff --git a/deps/v8/src/execution/riscv/simulator-riscv.cc b/deps/v8/src/execution/riscv/simulator-riscv.cc index c97f77cc52c68f..334ce9fcf51362 100644 --- a/deps/v8/src/execution/riscv/simulator-riscv.cc +++ b/deps/v8/src/execution/riscv/simulator-riscv.cc @@ -867,7 +867,7 @@ struct type_sew_t<128> { double vs2 = vs2_is_widen \ ? Rvvelt<double>(rvv_vs2_reg(), i) \ : static_cast<double>(Rvvelt<float>(rvv_vs2_reg(), i)); \ - double vs3 = static_cast<double>(Rvvelt<float>(rvv_vd_reg(), i)); \ + double vs3 = Rvvelt<double>(rvv_vd_reg(), i); \ BODY32; \ break; \ } \ @@ -892,7 +892,7 @@ struct type_sew_t<128> { ? static_cast<double>(Rvvelt<double>(rvv_vs2_reg(), i)) \ : static_cast<double>(Rvvelt<float>(rvv_vs2_reg(), i)); \ double vs1 = static_cast<double>(Rvvelt<float>(rvv_vs1_reg(), i)); \ - double vs3 = static_cast<double>(Rvvelt<float>(rvv_vd_reg(), i)); \ + double vs3 = Rvvelt<double>(rvv_vd_reg(), i); \ BODY32; \ break; \ } \ @@ -6861,7 +6861,7 @@ void Simulator::DecodeRvvFVV() { } case E32: { double& vd = Rvvelt<double>(rvv_vd_reg(), 0, true); - float vs1 = Rvvelt<float>(rvv_vs1_reg(), 0); + double vs1 = Rvvelt<double>(rvv_vs1_reg(), 0); double alu_out = vs1; for (uint64_t i = rvv_vstart(); i < rvv_vl(); ++i) { double vs2 = static_cast<double>(Rvvelt<float>(rvv_vs2_reg(), i)); @@ -6921,19 +6921,19 @@ void Simulator::DecodeRvvFVV() { break; case RO_V_VFWMACC_VV: RVV_VI_CHECK_DSS(true); - RVV_VI_VFP_VV_LOOP_WIDEN({RVV_VI_VFP_FMA(float, vs2, vs1, vs3)}, false) + RVV_VI_VFP_VV_LOOP_WIDEN({RVV_VI_VFP_FMA(double, vs2, vs1, vs3)}, false) break; case RO_V_VFWNMACC_VV: RVV_VI_CHECK_DSS(true); - RVV_VI_VFP_VV_LOOP_WIDEN({RVV_VI_VFP_FMA(float, -vs2, vs1, -vs3)}, false) + RVV_VI_VFP_VV_LOOP_WIDEN({RVV_VI_VFP_FMA(double, -vs2, vs1, -vs3)}, false) break; case RO_V_VFWMSAC_VV: RVV_VI_CHECK_DSS(true); - RVV_VI_VFP_VV_LOOP_WIDEN({RVV_VI_VFP_FMA(float, vs2, vs1, -vs3)}, false) + RVV_VI_VFP_VV_LOOP_WIDEN({RVV_VI_VFP_FMA(double, vs2, vs1, -vs3)}, false) break; case RO_V_VFWNMSAC_VV: RVV_VI_CHECK_DSS(true); - RVV_VI_VFP_VV_LOOP_WIDEN({RVV_VI_VFP_FMA(float, -vs2, vs1, +vs3)}, false) + RVV_VI_VFP_VV_LOOP_WIDEN({RVV_VI_VFP_FMA(double, -vs2, vs1, +vs3)}, false) break; case RO_V_VFMV_FS: switch (rvv_vsew()) { @@ -7071,19 +7071,19 @@ void Simulator::DecodeRvvFVF() { break; case RO_V_VFWMACC_VF: RVV_VI_CHECK_DSS(true); - RVV_VI_VFP_VF_LOOP_WIDEN({RVV_VI_VFP_FMA(float, vs2, fs1, vs3)}, false) + RVV_VI_VFP_VF_LOOP_WIDEN({RVV_VI_VFP_FMA(double, vs2, fs1, vs3)}, false) break; case RO_V_VFWNMACC_VF: RVV_VI_CHECK_DSS(true); - RVV_VI_VFP_VF_LOOP_WIDEN({RVV_VI_VFP_FMA(float, -vs2, fs1, -vs3)}, false) + RVV_VI_VFP_VF_LOOP_WIDEN({RVV_VI_VFP_FMA(double, -vs2, fs1, -vs3)}, false) break; case RO_V_VFWMSAC_VF: RVV_VI_CHECK_DSS(true); - RVV_VI_VFP_VF_LOOP_WIDEN({RVV_VI_VFP_FMA(float, vs2, fs1, -vs3)}, false) + RVV_VI_VFP_VF_LOOP_WIDEN({RVV_VI_VFP_FMA(double, vs2, fs1, -vs3)}, false) break; case RO_V_VFWNMSAC_VF: RVV_VI_CHECK_DSS(true); - RVV_VI_VFP_VF_LOOP_WIDEN({RVV_VI_VFP_FMA(float, -vs2, fs1, vs3)}, false) + RVV_VI_VFP_VF_LOOP_WIDEN({RVV_VI_VFP_FMA(double, -vs2, fs1, vs3)}, false) break; default: UNSUPPORTED_RISCV(); diff --git a/deps/v8/src/execution/s390/simulator-s390.cc b/deps/v8/src/execution/s390/simulator-s390.cc index 66fe039cb3e0f6..4d4a0bbf395872 100644 --- a/deps/v8/src/execution/s390/simulator-s390.cc +++ b/deps/v8/src/execution/s390/simulator-s390.cc @@ -1291,6 +1291,8 @@ void Simulator::EvalTableInit() { EvalTable[LLGCR] = &Simulator::Evaluate_LLGCR; EvalTable[LLGHR] = &Simulator::Evaluate_LLGHR; EvalTable[MLGR] = &Simulator::Evaluate_MLGR; + EvalTable[MGRK] = &Simulator::Evaluate_MGRK; + EvalTable[MG] = &Simulator::Evaluate_MG; EvalTable[DLGR] = &Simulator::Evaluate_DLGR; EvalTable[ALCGR] = &Simulator::Evaluate_ALCGR; EvalTable[SLBGR] = &Simulator::Evaluate_SLBGR; @@ -8744,7 +8746,37 @@ EVALUATE(LLGHR) { return length; } +EVALUATE(MG) { + UNIMPLEMENTED(); + USE(instr); + return 0; +} + +EVALUATE(MGRK) { + DCHECK_OPCODE(MGRK); + DECODE_RRF_A_INSTRUCTION(r1, r2, r3); + // 64-bit Non-clobbering arithmetics / bitwise ops. + int64_t r2_val = get_register(r2); + int64_t r3_val = get_register(r3); + set_register(r1, base::bits::SignedMulHigh64(r2_val, r3_val)); + set_register(r1 + 1, r2_val * r3_val); + return length; +} + EVALUATE(MLGR) { + DCHECK_OPCODE(MLGR); + DECODE_RRE_INSTRUCTION(r1, r2); + // 64-bit Non-clobbering unsigned arithmetics + CHECK_EQ(r1 % 2, 0); + uint64_t r1_plus_1_val = get_register(r1 + 1); + uint64_t r2_val = get_register(r2); + + set_register(r1, base::bits::UnsignedMulHigh64(r2_val, r1_plus_1_val)); + set_register(r1 + 1, r2_val * r1_plus_1_val); + return length; +} + +EVALUATE(MLG) { UNIMPLEMENTED(); USE(instr); return 0; @@ -9957,12 +9989,6 @@ EVALUATE(LGAT) { return 0; } -EVALUATE(MLG) { - UNIMPLEMENTED(); - USE(instr); - return 0; -} - EVALUATE(DLG) { DCHECK_OPCODE(DLG); #ifdef V8_TARGET_ARCH_S390X diff --git a/deps/v8/src/execution/s390/simulator-s390.h b/deps/v8/src/execution/s390/simulator-s390.h index 399207725f69c0..b7ccf1007cd7ab 100644 --- a/deps/v8/src/execution/s390/simulator-s390.h +++ b/deps/v8/src/execution/s390/simulator-s390.h @@ -1205,6 +1205,9 @@ class Simulator : public SimulatorBase { EVALUATE(CZXT); EVALUATE(CDZT); EVALUATE(CXZT); + EVALUATE(MG); + EVALUATE(MGRK); + #undef EVALUATE }; diff --git a/deps/v8/src/execution/tiering-manager.cc b/deps/v8/src/execution/tiering-manager.cc index 7da8359550b4c6..a18463e7ec4a0d 100644 --- a/deps/v8/src/execution/tiering-manager.cc +++ b/deps/v8/src/execution/tiering-manager.cc @@ -11,12 +11,14 @@ #include "src/codegen/compilation-cache.h" #include "src/codegen/compiler.h" #include "src/codegen/pending-optimization-table.h" +#include "src/common/globals.h" #include "src/diagnostics/code-tracer.h" #include "src/execution/execution.h" #include "src/execution/frames-inl.h" #include "src/handles/global-handles.h" #include "src/init/bootstrapper.h" #include "src/interpreter/interpreter.h" +#include "src/objects/code-kind.h" #include "src/objects/code.h" #include "src/tracing/trace-event.h" @@ -261,7 +263,7 @@ void TieringManager::RequestOsrAtNextOpportunity(JSFunction function) { } void TieringManager::MaybeOptimizeFrame(JSFunction function, - CodeKind code_kind) { + CodeKind calling_code_kind) { const TieringState tiering_state = function.feedback_vector().tiering_state(); const TieringState osr_tiering_state = function.feedback_vector().osr_tiering_state(); @@ -288,24 +290,15 @@ void TieringManager::MaybeOptimizeFrame(JSFunction function, // Continue below and do a normal optimized compile as well. } - const bool is_marked_for_any_optimization = - (static_cast<uint32_t>(tiering_state) & kNoneOrInProgressMask) != 0; // Baseline OSR uses a separate mechanism and must not be considered here, // therefore we limit to kOptimizedJSFunctionCodeKindsMask. // TODO(v8:7700): Change the condition below for Maglev OSR once it is // implemented. - if (is_marked_for_any_optimization || - function.HasAvailableHigherTierCodeThanWithFilter( - code_kind, kOptimizedJSFunctionCodeKindsMask)) { + if (IsRequestTurbofan(tiering_state) || + function.HasAvailableCodeKind(CodeKind::TURBOFAN)) { // OSR kicks in only once we've previously decided to tier up, but we are - // still in the lower-tier frame (this implies a long-running loop). - // - // TODO(v8:7700): In the presence of Maglev, OSR is triggered much earlier - // than with the old pipeline since we tier up to Maglev earlier which - // affects both conditions above. This *seems* fine (when stuck in a loop - // we want to tier up, regardless of the active tier), but we may want to - // think about this again at some point. - if (SmallEnoughForOSR(isolate_, function, code_kind)) { + // still in a lower-tier frame (this implies a long-running loop). + if (SmallEnoughForOSR(isolate_, function, calling_code_kind)) { TryIncrementOsrUrgency(isolate_, function); } @@ -314,20 +307,33 @@ void TieringManager::MaybeOptimizeFrame(JSFunction function, return; } - DCHECK(!is_marked_for_any_optimization && - !function.HasAvailableHigherTierCodeThanWithFilter( - code_kind, kOptimizedJSFunctionCodeKindsMask)); - OptimizationDecision d = ShouldOptimize(function, code_kind); + DCHECK(!IsRequestTurbofan(tiering_state)); + DCHECK(!function.HasAvailableCodeKind(CodeKind::TURBOFAN)); + OptimizationDecision d = ShouldOptimize(function, calling_code_kind); + // We might be stuck in a baseline frame that wants to tier up to Maglev, but + // is in a loop, and can't OSR, because Maglev doesn't have OSR. Allow it to + // skip over Maglev by re-checking ShouldOptimize as if we were in Maglev. + // TODO(v8:7700): Remove this when Maglev can OSR. + static_assert(!CodeKindCanOSR(CodeKind::MAGLEV)); + if (d.should_optimize() && d.code_kind == CodeKind::MAGLEV) { + bool is_marked_for_maglev_optimization = + IsRequestMaglev(tiering_state) || + function.HasAvailableCodeKind(CodeKind::MAGLEV); + if (is_marked_for_maglev_optimization) { + d = ShouldOptimize(function, CodeKind::MAGLEV); + } + } + if (d.should_optimize()) Optimize(function, d); } -OptimizationDecision TieringManager::ShouldOptimize(JSFunction function, - CodeKind code_kind) { - if (TiersUpToMaglev(code_kind) && +OptimizationDecision TieringManager::ShouldOptimize( + JSFunction function, CodeKind calling_code_kind) { + if (TiersUpToMaglev(calling_code_kind) && function.shared().PassesFilter(v8_flags.maglev_filter) && !function.shared(isolate_).maglev_compilation_failed()) { return OptimizationDecision::Maglev(); - } else if (code_kind == CodeKind::TURBOFAN) { + } else if (calling_code_kind == CodeKind::TURBOFAN) { // Already in the top tier. return OptimizationDecision::DoNotOptimize(); } diff --git a/deps/v8/src/flags/flag-definitions.h b/deps/v8/src/flags/flag-definitions.h index 16f796d43d0b0a..c31a38930475db 100644 --- a/deps/v8/src/flags/flag-definitions.h +++ b/deps/v8/src/flags/flag-definitions.h @@ -225,26 +225,28 @@ DEFINE_BOOL(harmony_shipping, true, "enable all shipped harmony features") #define HARMONY_INPROGRESS_BASE(V) \ V(harmony_weak_refs_with_cleanup_some, \ "harmony weak references with FinalizationRegistry.prototype.cleanupSome") \ - V(harmony_import_assertions, "harmony import assertions") \ V(harmony_temporal, "Temporal") \ V(harmony_shadow_realm, "harmony ShadowRealm") \ V(harmony_struct, "harmony structs, shared structs, and shared arrays") \ - V(harmony_change_array_by_copy, "harmony change-Array-by-copy") \ - V(harmony_regexp_unicode_sets, "harmony RegExp Unicode Sets") + V(harmony_regexp_unicode_sets, "harmony RegExp Unicode Sets") \ + V(harmony_json_parse_with_source, "harmony json parse with source") #ifdef V8_INTL_SUPPORT -#define HARMONY_INPROGRESS(V) \ - HARMONY_INPROGRESS_BASE(V) \ - V(harmony_intl_best_fit_matcher, "Intl BestFitMatcher") +#define HARMONY_INPROGRESS(V) \ + HARMONY_INPROGRESS_BASE(V) \ + V(harmony_intl_best_fit_matcher, "Intl BestFitMatcher") \ + V(harmony_intl_duration_format, "Intl DurationFormat API") #else #define HARMONY_INPROGRESS(V) HARMONY_INPROGRESS_BASE(V) #endif // Features that are complete (but still behind the --harmony flag). -#define HARMONY_STAGED_BASE(V) \ - V(harmony_array_grouping, "harmony array grouping") \ - V(harmony_rab_gsab, \ - "harmony ResizableArrayBuffer / GrowableSharedArrayBuffer") +#define HARMONY_STAGED_BASE(V) \ + V(harmony_rab_gsab, \ + "harmony ResizableArrayBuffer / GrowableSharedArrayBuffer") \ + V(harmony_array_grouping, "harmony array grouping") \ + V(harmony_change_array_by_copy, "harmony change-Array-by-copy") \ + V(harmony_symbol_as_weakmap_key, "harmony symbols as weakmap keys") #ifdef V8_INTL_SUPPORT #define HARMONY_STAGED(V) HARMONY_STAGED_BASE(V) @@ -257,7 +259,8 @@ DEFINE_BOOL(harmony_shipping, true, "enable all shipped harmony features") V(harmony_sharedarraybuffer, "harmony sharedarraybuffer") \ V(harmony_atomics, "harmony atomics") \ V(harmony_class_static_blocks, "harmony static initializer blocks") \ - V(harmony_array_find_last, "harmony array find last helpers") + V(harmony_array_find_last, "harmony array find last helpers") \ + V(harmony_import_assertions, "harmony import assertions") #ifdef V8_INTL_SUPPORT #define HARMONY_SHIPPING(V) \ @@ -453,7 +456,7 @@ DEFINE_BOOL(future, FUTURE_BOOL, DEFINE_BOOL(maglev, false, "enable the maglev optimizing compiler") DEFINE_BOOL(maglev_inlining, false, "enable inlining in the maglev optimizing compiler") -DEFINE_BOOL(maglev_reuse_stack_slots, false, +DEFINE_BOOL(maglev_reuse_stack_slots, true, "reuse stack slots in the maglev optimizing compiler") // We stress maglev by setting a very low interrupt budget for maglev. This @@ -475,6 +478,11 @@ DEFINE_BOOL(print_maglev_code, false, "print maglev code") DEFINE_BOOL(trace_maglev_graph_building, false, "trace maglev graph building") DEFINE_BOOL(trace_maglev_regalloc, false, "trace maglev register allocation") +// TODO(v8:7700): Remove once stable. +DEFINE_BOOL(maglev_function_context_specialization, true, + "enable function context specialization in maglev") +DEFINE_BOOL(maglev_ool_prologue, false, "use the Maglev out of line prologue") + #if ENABLE_SPARKPLUG DEFINE_WEAK_IMPLICATION(future, sparkplug) DEFINE_WEAK_IMPLICATION(future, flush_baseline_code) @@ -958,9 +966,20 @@ DEFINE_FLOAT(script_delay_fraction, 0.0, "busy wait after each Script::Run by the given fraction of the " "run's duration") -DEFINE_BOOL(turboshaft, false, "enable TurboFan's Turboshaft phases") +DEFINE_BOOL(turboshaft, false, "enable TurboFan's Turboshaft phases for JS") +DEFINE_WEAK_IMPLICATION(future, turboshaft) DEFINE_BOOL(turboshaft_trace_reduction, false, "trace individual Turboshaft reduction steps") +DEFINE_BOOL(turboshaft_wasm, false, + "enable TurboFan's Turboshaft phases for wasm") +#ifdef DEBUG +DEFINE_UINT64(turboshaft_opt_bisect_limit, std::numeric_limits<uint64_t>::max(), + "stop applying optional optimizations after a specified number " + "of steps, useful for bisecting optimization bugs") +DEFINE_UINT64(turboshaft_opt_bisect_break, std::numeric_limits<uint64_t>::max(), + "abort after a specified number of steps, useful for bisecting " + "optimization bugs") +#endif // DEBUG // Favor memory over execution speed. DEFINE_BOOL(optimize_for_size, false, @@ -1093,7 +1112,10 @@ FOREACH_WASM_FEATURE_FLAG(DECL_WASM_FLAG) DEFINE_IMPLICATION(experimental_wasm_gc, experimental_wasm_typed_funcref) -DEFINE_BOOL(wasm_gc_js_interop, false, "experimental WasmGC-JS interop") +DEFINE_IMPLICATION(experimental_wasm_stack_switching, + experimental_wasm_type_reflection) + +DEFINE_BOOL(wasm_gc_js_interop, true, "experimental WasmGC-JS interop") DEFINE_BOOL(wasm_staging, false, "enable staged wasm features") @@ -1127,12 +1149,8 @@ DEFINE_BOOL(trace_wasm_inlining, false, "trace wasm inlining") DEFINE_BOOL(trace_wasm_speculative_inlining, false, "trace wasm speculative inlining") DEFINE_BOOL(trace_wasm_typer, false, "trace wasm typer") -DEFINE_BOOL(wasm_type_canonicalization, false, - "apply isorecursive canonicalization on wasm types") DEFINE_IMPLICATION(wasm_speculative_inlining, wasm_inlining) DEFINE_WEAK_IMPLICATION(experimental_wasm_gc, wasm_speculative_inlining) -DEFINE_WEAK_IMPLICATION(experimental_wasm_typed_funcref, - wasm_type_canonicalization) DEFINE_BOOL(wasm_loop_unrolling, true, "enable loop unrolling for wasm functions") @@ -1224,6 +1242,8 @@ DEFINE_BOOL(global_gc_scheduling, true, DEFINE_BOOL(gc_global, false, "always perform global GCs") DEFINE_BOOL(shared_space, false, "Implement shared heap as shared space on a main isolate.") +// Don't use a map space with --shared-space in order to avoid shared map space. +DEFINE_NEG_IMPLICATION(shared_space, use_map_space) // TODO(12950): The next two flags only have an effect if // V8_ENABLE_ALLOCATION_TIMEOUT is set, so we should only define them in that @@ -1399,9 +1419,9 @@ DEFINE_BOOL(compact, true, "Perform compaction on full GCs based on V8's default heuristics") DEFINE_BOOL(compact_code_space, true, "Perform code space compaction on full collections.") -DEFINE_BOOL(compact_maps, false, +DEFINE_BOOL(compact_maps, true, "Perform compaction on maps on full collections.") -DEFINE_BOOL(use_map_space, true, "Use separate space for maps.") +DEFINE_BOOL(use_map_space, false, "Use separate space for maps.") // Without a map space we have to compact maps. DEFINE_NEG_VALUE_IMPLICATION(use_map_space, compact_maps, true) DEFINE_BOOL(compact_on_every_full_gc, false, @@ -1685,15 +1705,20 @@ DEFINE_BOOL( trace_side_effect_free_debug_evaluate, false, "print debug messages for side-effect-free debug-evaluate for testing") DEFINE_BOOL(hard_abort, true, "abort by crashing") +DEFINE_NEG_IMPLICATION(fuzzing, hard_abort) DEFINE_BOOL(experimental_async_stack_tagging_api, true, "enable experimental async stacks tagging API") DEFINE_BOOL(experimental_value_unavailable, false, "enable experimental <value unavailable> in scopes") +DEFINE_BOOL(experimental_reuse_locals_blocklists, false, + "enable reuse of local blocklists across multiple debug-evaluates") DEFINE_BOOL( live_edit_top_frame, true, "enable support for live-editing the top-most function on the stack") +DEFINE_BOOL(experimental_remove_internal_scopes_property, true, + "don't report the artificial [[Scopes]] property for functions") // disassembler DEFINE_BOOL(log_colour, ENABLE_LOG_COLOUR, @@ -1973,11 +1998,14 @@ DEFINE_BOOL(trace_minor_mc_parallel_marking, false, "trace parallel marking for the young generation") DEFINE_BOOL(minor_mc, false, "perform young generation mark compact GCs") DEFINE_IMPLICATION(minor_mc, separate_gc_phases) -DEFINE_BOOL(concurrent_minor_mc, false, - "perform young generation mark compact GCs concurrently") -DEFINE_NEG_NEG_IMPLICATION(concurrent_marking, concurrent_minor_mc) -DEFINE_IMPLICATION(concurrent_minor_mc, minor_mc) -DEFINE_IMPLICATION(concurrent_minor_mc, concurrent_marking) + +DEFINE_BOOL(concurrent_minor_mc_marking, false, + "perform young generation marking concurrently") +DEFINE_NEG_NEG_IMPLICATION(concurrent_marking, concurrent_minor_mc_marking) + +DEFINE_BOOL(concurrent_minor_mc_sweeping, false, + "perform young generation sweeping concurrently") +DEFINE_NEG_NEG_IMPLICATION(concurrent_sweeping, concurrent_minor_mc_sweeping) // // Dev shell flags @@ -2216,12 +2244,15 @@ DEFINE_BOOL(interpreted_frames_native_stack, false, "Show interpreted frames on the native stack (useful for external " "profilers).") +#if defined(V8_OS_WIN) && defined(V8_ENABLE_ETW_STACK_WALKING) DEFINE_BOOL(enable_etw_stack_walking, false, "Enable etw stack walking for windows") +DEFINE_WEAK_IMPLICATION(future, enable_etw_stack_walking) // Don't move code objects. DEFINE_NEG_IMPLICATION(enable_etw_stack_walking, compact_code_space) -#ifndef V8_TARGET_ARCH_ARM -DEFINE_IMPLICATION(enable_etw_stack_walking, interpreted_frames_native_stack) +#else +DEFINE_BOOL_READONLY(enable_etw_stack_walking, false, + "Enable etw stack walking for windows") #endif // @@ -2335,16 +2366,6 @@ DEFINE_IMPLICATION(verify_predictable, predictable) DEFINE_INT(dump_allocations_digest_at_alloc, -1, "dump allocations digest each n-th allocation") -// -// Read-only flags -// -#undef FLAG -#define FLAG FLAG_READONLY - -// assembler.h -DEFINE_BOOL(enable_embedded_constant_pool, V8_EMBEDDED_CONSTANT_POOL, - "enable use of embedded constant pools (PPC only)") - // Cleanup... #undef FLAG_FULL #undef FLAG_READONLY diff --git a/deps/v8/src/handles/global-handles.cc b/deps/v8/src/handles/global-handles.cc index 40051df972b3a8..d5958fae0ed740 100644 --- a/deps/v8/src/handles/global-handles.cc +++ b/deps/v8/src/handles/global-handles.cc @@ -292,6 +292,7 @@ void GlobalHandles::NodeSpace<NodeType>::Release(NodeType* node) { template <class NodeType> void GlobalHandles::NodeSpace<NodeType>::Free(NodeType* node) { + CHECK(node->IsInUse()); node->Release(first_free_); first_free_ = node; BlockType* block = BlockType::From(node); @@ -730,14 +731,13 @@ void GlobalHandles::TracedNode::Verify(const Address* const* slot) { const TracedNode* node = FromLocation(*slot); auto* global_handles = GlobalHandles::From(node); DCHECK(node->IsInUse()); - auto* incremental_marking = - global_handles->isolate()->heap()->incremental_marking(); + Heap* heap = global_handles->isolate()->heap(); + auto* incremental_marking = heap->incremental_marking(); if (incremental_marking && incremental_marking->IsMarking()) { Object object = node->object(); if (object.IsHeapObject()) { DCHECK_IMPLIES(node->markbit<AccessMode::ATOMIC>(), - !incremental_marking->marking_state()->IsWhite( - HeapObject::cast(object))); + !heap->marking_state()->IsWhite(HeapObject::cast(object))); } } DCHECK_IMPLIES(ObjectInYoungGeneration(node->object()), @@ -817,7 +817,7 @@ Handle<Object> GlobalHandles::CopyGlobal(Address* location) { GlobalHandles* global_handles = Node::FromLocation(location)->global_handles(); #ifdef VERIFY_HEAP - if (i::FLAG_verify_heap) { + if (v8_flags.verify_heap) { Object(*location).ObjectVerify(global_handles->isolate()); } #endif // VERIFY_HEAP @@ -847,7 +847,7 @@ void GlobalHandles::CopyTracedReference(const Address* const* from, TracedNode::Verify(from); TracedNode::Verify(to); #ifdef VERIFY_HEAP - if (i::FLAG_verify_heap) { + if (v8_flags.verify_heap) { Object(**to).ObjectVerify(global_handles->isolate()); } #endif // VERIFY_HEAP @@ -931,17 +931,26 @@ void GlobalHandles::Destroy(Address* location) { // static void GlobalHandles::DestroyTracedReference(Address* location) { - if (location != nullptr) { - TracedNode* node = TracedNode::FromLocation(location); - auto* global_handles = GlobalHandles::From(node); - // When marking is off the handle may be freed immediately. Note that this - // includes also the case when invoking the first pass callbacks during the - // atomic pause which requires releasing a node fully. - if (!global_handles->is_marking_) { - NodeSpace<TracedNode>::Release(node); - return; - } + if (!location) return; + TracedNode* node = TracedNode::FromLocation(location); + auto* global_handles = GlobalHandles::From(node); + DCHECK_IMPLIES(global_handles->is_marking_, + !global_handles->is_sweeping_on_mutator_thread_); + DCHECK_IMPLIES(global_handles->is_sweeping_on_mutator_thread_, + !global_handles->is_marking_); + + // If sweeping on the mutator thread is running then the handle destruction + // may be a result of a Reset() call from a destructor. The node will be + // reclaimed on the next cycle. + // + // This allows v8::TracedReference::Reset() calls from destructors on + // objects that may be used from stack and heap. + if (global_handles->is_sweeping_on_mutator_thread_) { + return; + } + + if (global_handles->is_marking_) { // Incremental marking is on. This also covers the scavenge case which // prohibits eagerly reclaiming nodes when marking is on during a scavenge. // @@ -950,7 +959,13 @@ void GlobalHandles::DestroyTracedReference(Address* location) { // marked. Eagerly clear out the object here to avoid needlessly marking it // from this point on. The node will be reclaimed on the next cycle. node->clear_object(); + return; } + + // In case marking and sweeping are off, the handle may be freed immediately. + // Note that this includes also the case when invoking the first pass + // callbacks during the atomic pause which requires releasing a node fully. + NodeSpace<TracedNode>::Release(node); } using GenericCallback = v8::WeakCallbackInfo<void>::Callback; @@ -1022,7 +1037,7 @@ void GlobalHandles::IterateWeakRootsForPhantomHandles( void GlobalHandles::ComputeWeaknessForYoungObjects( WeakSlotCallback is_unmodified) { - if (!FLAG_reclaim_unmodified_wrappers) return; + if (!v8_flags.reclaim_unmodified_wrappers) return; // Treat all objects as roots during incremental marking to avoid corrupting // marking worklists. @@ -1068,7 +1083,7 @@ void GlobalHandles::ProcessWeakYoungObjects( } } - if (!FLAG_reclaim_unmodified_wrappers) return; + if (!v8_flags.reclaim_unmodified_wrappers) return; auto* const handler = isolate()->heap()->GetEmbedderRootsHandler(); for (TracedNode* node : traced_young_nodes_) { @@ -1220,7 +1235,7 @@ void GlobalHandles::PostGarbageCollectionProcessing( DCHECK_EQ(Heap::NOT_IN_GC, isolate_->heap()->gc_state()); const bool synchronous_second_pass = - FLAG_optimize_for_size || FLAG_predictable || + v8_flags.optimize_for_size || v8_flags.predictable || isolate_->heap()->IsTearingDown() || (gc_callback_flags & (kGCCallbackFlagForced | kGCCallbackFlagCollectAllAvailableGarbage | diff --git a/deps/v8/src/handles/global-handles.h b/deps/v8/src/handles/global-handles.h index 47a921a1e1e6f7..7ab929f9ca567b 100644 --- a/deps/v8/src/handles/global-handles.h +++ b/deps/v8/src/handles/global-handles.h @@ -165,6 +165,13 @@ class V8_EXPORT_PRIVATE GlobalHandles final { void IterateAllRootsForTesting(v8::PersistentHandleVisitor* v); + void NotifyStartSweepingOnMutatorThread() { + is_sweeping_on_mutator_thread_ = true; + } + void NotifyEndSweepingOnMutatorThread() { + is_sweeping_on_mutator_thread_ = false; + } + #ifdef DEBUG void PrintStats(); void Print(); @@ -198,6 +205,7 @@ class V8_EXPORT_PRIVATE GlobalHandles final { Isolate* const isolate_; bool is_marking_ = false; + bool is_sweeping_on_mutator_thread_ = false; std::unique_ptr<NodeSpace<Node>> regular_nodes_; // Contains all nodes holding young objects. Note: when the list diff --git a/deps/v8/src/handles/handles-inl.h b/deps/v8/src/handles/handles-inl.h index 43c2ef807ea58b..e47ee146bff615 100644 --- a/deps/v8/src/handles/handles-inl.h +++ b/deps/v8/src/handles/handles-inl.h @@ -115,7 +115,7 @@ HandleScope& HandleScope::operator=(HandleScope&& other) V8_NOEXCEPT { void HandleScope::CloseScope(Isolate* isolate, Address* prev_next, Address* prev_limit) { #ifdef DEBUG - int before = FLAG_check_handle_count ? NumberOfHandles(isolate) : 0; + int before = v8_flags.check_handle_count ? NumberOfHandles(isolate) : 0; #endif DCHECK_NOT_NULL(isolate); HandleScopeData* current = isolate->handle_scope_data(); @@ -136,7 +136,7 @@ void HandleScope::CloseScope(Isolate* isolate, Address* prev_next, static_cast<size_t>(reinterpret_cast<Address>(limit) - reinterpret_cast<Address>(current->next))); #ifdef DEBUG - int after = FLAG_check_handle_count ? NumberOfHandles(isolate) : 0; + int after = v8_flags.check_handle_count ? NumberOfHandles(isolate) : 0; DCHECK_LT(after - before, kCheckHandleThreshold); DCHECK_LT(before, kCheckHandleThreshold); #endif diff --git a/deps/v8/src/handles/handles.cc b/deps/v8/src/handles/handles.cc index 940da8eb95b947..332dc723b063bb 100644 --- a/deps/v8/src/handles/handles.cc +++ b/deps/v8/src/handles/handles.cc @@ -34,6 +34,13 @@ ASSERT_TRIVIALLY_COPYABLE(HandleBase); ASSERT_TRIVIALLY_COPYABLE(Handle<Object>); ASSERT_TRIVIALLY_COPYABLE(MaybeHandle<Object>); +#ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING + +ASSERT_TRIVIALLY_COPYABLE(DirectHandle<Object>); +ASSERT_TRIVIALLY_COPYABLE(DirectMaybeHandle<Object>); + +#endif // V8_ENABLE_CONSERVATIVE_STACK_SCANNING + #ifdef DEBUG bool HandleBase::IsDereferenceAllowed() const { DCHECK_NOT_NULL(location_); @@ -51,7 +58,7 @@ bool HandleBase::IsDereferenceAllowed() const { if (!AllowHandleDereference::IsAllowed()) return false; // Allocations in the shared heap may be dereferenced by multiple threads. - if (isolate->is_shared()) return true; + if (heap_object.InSharedWritableHeap()) return true; LocalHeap* local_heap = isolate->CurrentLocalHeap(); @@ -78,7 +85,42 @@ bool HandleBase::IsDereferenceAllowed() const { // TODO(leszeks): Check if the main thread owns this handle. return true; } -#endif + +#ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING + +template <typename T> +bool DirectHandle<T>::IsDereferenceAllowed() const { + DCHECK_NE(obj_, kTaggedNullAddress); + Object object(obj_); + if (object.IsSmi()) return true; + HeapObject heap_object = HeapObject::cast(object); + if (IsReadOnlyHeapObject(heap_object)) return true; + Isolate* isolate = GetIsolateFromWritableObject(heap_object); + if (!AllowHandleDereference::IsAllowed()) return false; + + // Allocations in the shared heap may be dereferenced by multiple threads. + if (isolate->is_shared()) return true; + + LocalHeap* local_heap = isolate->CurrentLocalHeap(); + + // Local heap can't access handles when parked + if (!local_heap->IsHandleDereferenceAllowed()) { + StdoutStream{} << "Cannot dereference handle owned by " + << "non-running local heap\n"; + return false; + } + + // If LocalHeap::Current() is null, we're on the main thread -- if we were to + // check main thread HandleScopes here, we should additionally check the + // main-thread LocalHeap. + DCHECK_EQ(ThreadId::Current(), isolate->thread_id()); + + return true; +} + +#endif // V8_ENABLE_CONSERVATIVE_STACK_SCANNING + +#endif // DEBUG int HandleScope::NumberOfHandles(Isolate* isolate) { HandleScopeImplementer* impl = isolate->handle_scope_implementer(); diff --git a/deps/v8/src/handles/handles.h b/deps/v8/src/handles/handles.h index 0d2bdb078fb98d..29da54bf9fa59a 100644 --- a/deps/v8/src/handles/handles.h +++ b/deps/v8/src/handles/handles.h @@ -37,6 +37,8 @@ class SmallOrderedNameDictionary; class SwissNameDictionary; class WasmExportedFunctionData; +constexpr Address kTaggedNullAddress = 0x1; + // ---------------------------------------------------------------------------- // Base class for Handle instantiations. Don't use directly. class HandleBase { @@ -371,6 +373,87 @@ struct HandleScopeData final { } }; +#ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING + +// ---------------------------------------------------------------------------- +// A DirectHandle provides a reference to an object without an intermediate +// pointer. +// +// A DirectHandle is a simple wrapper around a tagged pointer to a heap object +// or a SMI. Its methods are symmetrical with Handle, so that Handles can be +// easily migrated. +// +// DirectHandles are intended to be used with conservative stack scanning, as +// they do not provide a mechanism for keeping an object alive across a garbage +// collection. +// +// Further motivation is explained in the design doc: +// https://docs.google.com/document/d/1uRGYQM76vk1fc_aDqDH3pm2qhaJtnK2oyzeVng4cS6I/ +template <typename T> +class DirectHandle final { + public: + V8_INLINE explicit DirectHandle() : obj_(kTaggedNullAddress) { + // Skip static type check in order to allow DirectHandle<XXX>::null() as + // default parameter values in non-inl header files without requiring full + // definition of type XXX. + } + + V8_INLINE bool is_null() const { return obj_ == kTaggedNullAddress; } + + V8_INLINE explicit DirectHandle(Address object) : obj_(object) { + // This static type check also fails for forward class declarations. + static_assert(std::is_convertible<T*, Object*>::value, + "static type violation"); + } + + // Constructor for handling automatic up casting. + // Ex. DirectHandle<JSFunction> can be passed when DirectHandle<Object> is + // expected. + template <typename S, typename = typename std::enable_if< + std::is_convertible<S*, T*>::value>::type> + V8_INLINE DirectHandle(DirectHandle<S> handle) : obj_(handle.obj_) {} + + V8_INLINE T operator->() const { return obj_; } + + V8_INLINE T operator*() const { + SLOW_DCHECK(IsDereferenceAllowed()); + return T::unchecked_cast(Object(obj_)); + } + + template <typename S> + inline static const DirectHandle<T> cast(DirectHandle<S> that); + + // Consider declaring values that contain empty handles as + // MaybeHandle to force validation before being used as handles. + static const DirectHandle<T> null() { return DirectHandle<T>(); } + + protected: +#ifdef DEBUG + bool V8_EXPORT_PRIVATE IsDereferenceAllowed() const; +#else + V8_INLINE + bool V8_EXPORT_PRIVATE IsDereferenceAllowed() const { return true; } +#endif // DEBUG + + private: + // DirectHandles of different classes are allowed to access each other's + // obj_. + template <typename> + friend class DirectHandle; + // MaybeDirectHandle is allowed to access obj_. + template <typename> + friend class MaybeDirectHandle; + + // This is a direct pointer to either a tagged object or SMI. Design overview: + // https://docs.google.com/document/d/1uRGYQM76vk1fc_aDqDH3pm2qhaJtnK2oyzeVng4cS6I/ + T obj_; +}; + +template <typename T> +std::ostream& operator<<(std::ostream& os, DirectHandle<T> handle); + +#endif + } // namespace internal } // namespace v8 diff --git a/deps/v8/src/handles/maybe-handles.h b/deps/v8/src/handles/maybe-handles.h index 15397ef0df5628..07607a71733034 100644 --- a/deps/v8/src/handles/maybe-handles.h +++ b/deps/v8/src/handles/maybe-handles.h @@ -117,6 +117,71 @@ class MaybeObjectHandle { MaybeHandle<Object> handle_; }; +#ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING + +template <typename T> +class DirectMaybeHandle final { + public: + V8_INLINE DirectMaybeHandle() = default; + + V8_INLINE DirectMaybeHandle(NullMaybeHandleType) {} + + // Constructor for handling automatic up casting from DirectHandle. + // Ex. DirectHandle<JSArray> can be passed when DirectMaybeHandle<Object> is + // expected. + template <typename S, typename = typename std::enable_if< + std::is_convertible<S*, T*>::value>::type> + V8_INLINE DirectMaybeHandle(DirectHandle<S> handle) + : location_(handle.location_) {} + + // Constructor for handling automatic up casting. + // Ex. DirectMaybeHandle<JSArray> can be passed when DirectHandle<Object> is + // expected. + template <typename S, typename = typename std::enable_if< + std::is_convertible<S*, T*>::value>::type> + V8_INLINE DirectMaybeHandle(DirectMaybeHandle<S> maybe_handle) + : location_(maybe_handle.location_) {} + + V8_INLINE DirectMaybeHandle(T object, Isolate* isolate); + V8_INLINE DirectMaybeHandle(T object, LocalHeap* local_heap); + + V8_INLINE void Assert() const { DCHECK_NE(location_, kTaggedNullAddress); } + V8_INLINE void Check() const { CHECK_NE(location_, kTaggedNullAddress); } + + V8_INLINE DirectHandle<T> ToDirectHandleChecked() const { + Check(); + return DirectHandle<T>(location_); + } + + // Convert to a DirectHandle with a type that can be upcasted to. + template <typename S> + V8_WARN_UNUSED_RESULT V8_INLINE bool ToDirectHandle( + DirectHandle<S>* out) const { + if (location_ == kTaggedNullAddress) { + *out = DirectHandle<T>::null(); + return false; + } else { + *out = DirectHandle<T>(location_); + return true; + } + } + + // Returns the raw address where this direct handle is stored. + V8_INLINE Address address() const { return location_; } + + bool is_null() const { return location_ == kTaggedNullAddress; } + + protected: + Address location_ = kTaggedNullAddress; + + // DirectMaybeHandles of different classes are allowed to access each + // other's location_. + template <typename> + friend class DirectMaybeHandle; +}; + +#endif // V8_ENABLE_CONSERVATIVE_STACK_SCANNING + } // namespace internal } // namespace v8 diff --git a/deps/v8/src/handles/shared-object-conveyor-handles.cc b/deps/v8/src/handles/shared-object-conveyor-handles.cc index 839ce212bd9666..37b4a2672a9d90 100644 --- a/deps/v8/src/handles/shared-object-conveyor-handles.cc +++ b/deps/v8/src/handles/shared-object-conveyor-handles.cc @@ -12,7 +12,8 @@ namespace internal { // TODO(v8:12547): Currently the shared isolate owns all the conveyors. Change // the owner to the main isolate once the shared isolate is removed. SharedObjectConveyorHandles::SharedObjectConveyorHandles(Isolate* isolate) - : persistent_handles_(isolate->shared_isolate()->NewPersistentHandles()) {} + : persistent_handles_( + isolate->shared_heap_isolate()->NewPersistentHandles()) {} uint32_t SharedObjectConveyorHandles::Persist(HeapObject shared_object) { DCHECK(shared_object.IsShared()); diff --git a/deps/v8/src/heap/allocation-stats.h b/deps/v8/src/heap/allocation-stats.h index a024b956e0edc0..f02f2594d98666 100644 --- a/deps/v8/src/heap/allocation-stats.h +++ b/deps/v8/src/heap/allocation-stats.h @@ -59,6 +59,8 @@ class AllocationStats { #endif void IncreaseAllocatedBytes(size_t bytes, const BasicMemoryChunk* page) { + DCHECK_IMPLIES(V8_COMPRESS_POINTERS_8GB_BOOL, + IsAligned(bytes, kObjectAlignment8GbHeap)); #ifdef DEBUG size_t size = size_; DCHECK_GE(size + bytes, size); diff --git a/deps/v8/src/heap/array-buffer-sweeper.cc b/deps/v8/src/heap/array-buffer-sweeper.cc index fd36cf89c035d3..088e9e4ac565b5 100644 --- a/deps/v8/src/heap/array-buffer-sweeper.cc +++ b/deps/v8/src/heap/array-buffer-sweeper.cc @@ -109,7 +109,6 @@ ArrayBufferSweeper::~ArrayBufferSweeper() { void ArrayBufferSweeper::EnsureFinished() { if (!sweeping_in_progress()) return; - TRACE_GC(heap_->tracer(), GCTracer::Scope::MC_COMPLETE_SWEEP_ARRAY_BUFFERS); TryAbortResult abort_result = heap_->isolate()->cancelable_task_manager()->TryAbort(job_->id_); diff --git a/deps/v8/src/heap/base-space.cc b/deps/v8/src/heap/base-space.cc index aabbeaebf540a8..5f28afc2406010 100644 --- a/deps/v8/src/heap/base-space.cc +++ b/deps/v8/src/heap/base-space.cc @@ -17,12 +17,16 @@ const char* BaseSpace::GetSpaceName(AllocationSpace space) { return "map_space"; case CODE_SPACE: return "code_space"; + case SHARED_SPACE: + return "shared_space"; case LO_SPACE: return "large_object_space"; case NEW_LO_SPACE: return "new_large_object_space"; case CODE_LO_SPACE: return "code_large_object_space"; + case SHARED_LO_SPACE: + return "shared_large_object_space"; case RO_SPACE: return "read_only_space"; } diff --git a/deps/v8/src/heap/base/basic-slot-set.h b/deps/v8/src/heap/base/basic-slot-set.h new file mode 100644 index 00000000000000..2f0bc1c8727697 --- /dev/null +++ b/deps/v8/src/heap/base/basic-slot-set.h @@ -0,0 +1,464 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef V8_HEAP_BASE_BASIC_SLOT_SET_H_ +#define V8_HEAP_BASE_BASIC_SLOT_SET_H_ + +#include <cstddef> +#include <memory> + +#include "src/base/atomic-utils.h" +#include "src/base/bits.h" +#include "src/base/platform/memory.h" + +namespace heap { +namespace base { + +enum SlotCallbackResult { KEEP_SLOT, REMOVE_SLOT }; + +// Data structure for maintaining a set of slots in a standard (non-large) +// page. +// The data structure assumes that the slots are pointer size aligned and +// splits the valid slot offset range into buckets. +// Each bucket is a bitmap with a bit corresponding to a single slot offset. +template <size_t SlotGranularity> +class BasicSlotSet { + static constexpr auto kSystemPointerSize = sizeof(void*); + + public: + using Address = uintptr_t; + + enum AccessMode : uint8_t { + ATOMIC, + NON_ATOMIC, + }; + + enum EmptyBucketMode { + FREE_EMPTY_BUCKETS, // An empty bucket will be deallocated immediately. + KEEP_EMPTY_BUCKETS // An empty bucket will be kept. + }; + + BasicSlotSet() = delete; + + static BasicSlotSet* Allocate(size_t buckets) { + // BasicSlotSet* slot_set --+ + // | + // v + // +-----------------+-------------------------+ + // | initial buckets | buckets array | + // +-----------------+-------------------------+ + // pointer-sized pointer-sized * buckets + // + // + // The BasicSlotSet pointer points to the beginning of the buckets array for + // faster access in the write barrier. The number of buckets is needed for + // calculating the size of this data structure. + size_t buckets_size = buckets * sizeof(Bucket*); + size_t size = kInitialBucketsSize + buckets_size; + void* allocation = v8::base::AlignedAlloc(size, kSystemPointerSize); + CHECK(allocation); + BasicSlotSet* slot_set = reinterpret_cast<BasicSlotSet*>( + reinterpret_cast<uint8_t*>(allocation) + kInitialBucketsSize); + DCHECK( + IsAligned(reinterpret_cast<uintptr_t>(slot_set), kSystemPointerSize)); +#ifdef DEBUG + *slot_set->initial_buckets() = buckets; +#endif + for (size_t i = 0; i < buckets; i++) { + *slot_set->bucket(i) = nullptr; + } + return slot_set; + } + + static void Delete(BasicSlotSet* slot_set, size_t buckets) { + if (slot_set == nullptr) return; + + for (size_t i = 0; i < buckets; i++) { + slot_set->ReleaseBucket(i); + } + +#ifdef DEBUG + size_t initial_buckets = *slot_set->initial_buckets(); + + for (size_t i = buckets; i < initial_buckets; i++) { + DCHECK_NULL(*slot_set->bucket(i)); + } +#endif + + v8::base::AlignedFree(reinterpret_cast<uint8_t*>(slot_set) - + kInitialBucketsSize); + } + + constexpr static size_t BucketsForSize(size_t size) { + return (size + (SlotGranularity * kBitsPerBucket) - 1) / + (SlotGranularity * kBitsPerBucket); + } + + // Converts the slot offset into bucket index. + constexpr static size_t BucketForSlot(size_t slot_offset) { + DCHECK(IsAligned(slot_offset, SlotGranularity)); + return slot_offset / (SlotGranularity * kBitsPerBucket); + } + + // The slot offset specifies a slot at address page_start_ + slot_offset. + // AccessMode defines whether there can be concurrent access on the buckets + // or not. + template <AccessMode access_mode> + void Insert(size_t slot_offset) { + size_t bucket_index; + int cell_index, bit_index; + SlotToIndices(slot_offset, &bucket_index, &cell_index, &bit_index); + Bucket* bucket = LoadBucket<access_mode>(bucket_index); + if (bucket == nullptr) { + bucket = new Bucket; + if (!SwapInNewBucket<access_mode>(bucket_index, bucket)) { + delete bucket; + bucket = LoadBucket<access_mode>(bucket_index); + } + } + // Check that monotonicity is preserved, i.e., once a bucket is set we do + // not free it concurrently. + DCHECK(bucket != nullptr); + DCHECK_EQ(bucket->cells(), LoadBucket<access_mode>(bucket_index)->cells()); + uint32_t mask = 1u << bit_index; + if ((bucket->template LoadCell<access_mode>(cell_index) & mask) == 0) { + bucket->template SetCellBits<access_mode>(cell_index, mask); + } + } + + // The slot offset specifies a slot at address page_start_ + slot_offset. + // Returns true if the set contains the slot. + bool Contains(size_t slot_offset) { + size_t bucket_index; + int cell_index, bit_index; + SlotToIndices(slot_offset, &bucket_index, &cell_index, &bit_index); + Bucket* bucket = LoadBucket(bucket_index); + if (bucket == nullptr) return false; + return (bucket->LoadCell(cell_index) & (1u << bit_index)) != 0; + } + + // The slot offset specifies a slot at address page_start_ + slot_offset. + void Remove(size_t slot_offset) { + size_t bucket_index; + int cell_index, bit_index; + SlotToIndices(slot_offset, &bucket_index, &cell_index, &bit_index); + Bucket* bucket = LoadBucket(bucket_index); + if (bucket != nullptr) { + uint32_t cell = bucket->LoadCell(cell_index); + uint32_t bit_mask = 1u << bit_index; + if (cell & bit_mask) { + bucket->ClearCellBits(cell_index, bit_mask); + } + } + } + + // The slot offsets specify a range of slots at addresses: + // [page_start_ + start_offset ... page_start_ + end_offset). + void RemoveRange(size_t start_offset, size_t end_offset, size_t buckets, + EmptyBucketMode mode) { + CHECK_LE(end_offset, buckets * kBitsPerBucket * SlotGranularity); + DCHECK_LE(start_offset, end_offset); + size_t start_bucket; + int start_cell, start_bit; + SlotToIndices(start_offset, &start_bucket, &start_cell, &start_bit); + size_t end_bucket; + int end_cell, end_bit; + SlotToIndices(end_offset, &end_bucket, &end_cell, &end_bit); + uint32_t start_mask = (1u << start_bit) - 1; + uint32_t end_mask = ~((1u << end_bit) - 1); + Bucket* bucket; + if (start_bucket == end_bucket && start_cell == end_cell) { + bucket = LoadBucket(start_bucket); + if (bucket != nullptr) { + bucket->ClearCellBits(start_cell, ~(start_mask | end_mask)); + } + return; + } + size_t current_bucket = start_bucket; + int current_cell = start_cell; + bucket = LoadBucket(current_bucket); + if (bucket != nullptr) { + bucket->ClearCellBits(current_cell, ~start_mask); + } + current_cell++; + if (current_bucket < end_bucket) { + if (bucket != nullptr) { + ClearBucket(bucket, current_cell, kCellsPerBucket); + } + // The rest of the current bucket is cleared. + // Move on to the next bucket. + current_bucket++; + current_cell = 0; + } + DCHECK(current_bucket == end_bucket || + (current_bucket < end_bucket && current_cell == 0)); + while (current_bucket < end_bucket) { + if (mode == FREE_EMPTY_BUCKETS) { + ReleaseBucket(current_bucket); + } else { + DCHECK(mode == KEEP_EMPTY_BUCKETS); + bucket = LoadBucket(current_bucket); + if (bucket != nullptr) { + ClearBucket(bucket, 0, kCellsPerBucket); + } + } + current_bucket++; + } + // All buckets between start_bucket and end_bucket are cleared. + DCHECK(current_bucket == end_bucket); + if (current_bucket == buckets) return; + bucket = LoadBucket(current_bucket); + DCHECK(current_cell <= end_cell); + if (bucket == nullptr) return; + while (current_cell < end_cell) { + bucket->StoreCell(current_cell, 0); + current_cell++; + } + // All cells between start_cell and end_cell are cleared. + DCHECK(current_bucket == end_bucket && current_cell == end_cell); + bucket->ClearCellBits(end_cell, ~end_mask); + } + + // The slot offset specifies a slot at address page_start_ + slot_offset. + bool Lookup(size_t slot_offset) { + size_t bucket_index; + int cell_index, bit_index; + SlotToIndices(slot_offset, &bucket_index, &cell_index, &bit_index); + Bucket* bucket = LoadBucket(bucket_index); + if (bucket == nullptr) return false; + return (bucket->LoadCell(cell_index) & (1u << bit_index)) != 0; + } + + // Iterate over all slots in the set and for each slot invoke the callback. + // If the callback returns REMOVE_SLOT then the slot is removed from the set. + // Returns the new number of slots. + // + // Iteration can be performed concurrently with other operations that use + // atomic access mode such as insertion and removal. However there is no + // guarantee about ordering and linearizability. + // + // Sample usage: + // Iterate([](Address slot) { + // if (good(slot)) return KEEP_SLOT; + // else return REMOVE_SLOT; + // }); + // + // Releases memory for empty buckets with FREE_EMPTY_BUCKETS. + template <typename Callback> + size_t Iterate(Address chunk_start, size_t start_bucket, size_t end_bucket, + Callback callback, EmptyBucketMode mode) { + return Iterate(chunk_start, start_bucket, end_bucket, callback, + [this, mode](size_t bucket_index) { + if (mode == EmptyBucketMode::FREE_EMPTY_BUCKETS) { + ReleaseBucket(bucket_index); + } + }); + } + + bool FreeEmptyBuckets(size_t buckets) { + bool empty = true; + for (size_t bucket_index = 0; bucket_index < buckets; bucket_index++) { + if (!FreeBucketIfEmpty(bucket_index)) { + empty = false; + } + } + + return empty; + } + + static const int kCellsPerBucket = 32; + static const int kCellsPerBucketLog2 = 5; + static const int kCellSizeBytesLog2 = 2; + static const int kCellSizeBytes = 1 << kCellSizeBytesLog2; + static const int kBitsPerCell = 32; + static const int kBitsPerCellLog2 = 5; + static const int kBitsPerBucket = kCellsPerBucket * kBitsPerCell; + static const int kBitsPerBucketLog2 = kCellsPerBucketLog2 + kBitsPerCellLog2; + + class Bucket final { + uint32_t cells_[kCellsPerBucket]; + + public: + Bucket() { + for (int i = 0; i < kCellsPerBucket; i++) { + cells_[i] = 0; + } + } + + uint32_t* cells() { return cells_; } + uint32_t* cell(int cell_index) { return cells() + cell_index; } + + template <AccessMode access_mode = AccessMode::ATOMIC> + uint32_t LoadCell(int cell_index) { + DCHECK_LT(cell_index, kCellsPerBucket); + if (access_mode == AccessMode::ATOMIC) + return v8::base::AsAtomic32::Acquire_Load(cells() + cell_index); + return *(cells() + cell_index); + } + + template <AccessMode access_mode = AccessMode::ATOMIC> + void SetCellBits(int cell_index, uint32_t mask) { + if (access_mode == AccessMode::ATOMIC) { + v8::base::AsAtomic32::SetBits(cell(cell_index), mask, mask); + } else { + uint32_t* c = cell(cell_index); + *c = (*c & ~mask) | mask; + } + } + + void ClearCellBits(int cell_index, uint32_t mask) { + v8::base::AsAtomic32::SetBits(cell(cell_index), 0u, mask); + } + + void StoreCell(int cell_index, uint32_t value) { + v8::base::AsAtomic32::Release_Store(cell(cell_index), value); + } + + bool IsEmpty() { + for (int i = 0; i < kCellsPerBucket; i++) { + if (cells_[i] != 0) { + return false; + } + } + return true; + } + }; + + protected: + template <typename Callback, typename EmptyBucketCallback> + size_t Iterate(Address chunk_start, size_t start_bucket, size_t end_bucket, + Callback callback, EmptyBucketCallback empty_bucket_callback) { + size_t new_count = 0; + for (size_t bucket_index = start_bucket; bucket_index < end_bucket; + bucket_index++) { + Bucket* bucket = LoadBucket(bucket_index); + if (bucket != nullptr) { + size_t in_bucket_count = 0; + size_t cell_offset = bucket_index << kBitsPerBucketLog2; + for (int i = 0; i < kCellsPerBucket; i++, cell_offset += kBitsPerCell) { + uint32_t cell = bucket->LoadCell(i); + if (cell) { + uint32_t old_cell = cell; + uint32_t mask = 0; + while (cell) { + int bit_offset = v8::base::bits::CountTrailingZeros(cell); + uint32_t bit_mask = 1u << bit_offset; + Address slot = (cell_offset + bit_offset) * SlotGranularity; + if (callback(chunk_start + slot) == KEEP_SLOT) { + ++in_bucket_count; + } else { + mask |= bit_mask; + } + cell ^= bit_mask; + } + uint32_t new_cell = old_cell & ~mask; + if (old_cell != new_cell) { + bucket->ClearCellBits(i, mask); + } + } + } + if (in_bucket_count == 0) { + empty_bucket_callback(bucket_index); + } + new_count += in_bucket_count; + } + } + return new_count; + } + + bool FreeBucketIfEmpty(size_t bucket_index) { + Bucket* bucket = LoadBucket<AccessMode::NON_ATOMIC>(bucket_index); + if (bucket != nullptr) { + if (bucket->IsEmpty()) { + ReleaseBucket<AccessMode::NON_ATOMIC>(bucket_index); + } else { + return false; + } + } + + return true; + } + + void ClearBucket(Bucket* bucket, int start_cell, int end_cell) { + DCHECK_GE(start_cell, 0); + DCHECK_LE(end_cell, kCellsPerBucket); + int current_cell = start_cell; + while (current_cell < kCellsPerBucket) { + bucket->StoreCell(current_cell, 0); + current_cell++; + } + } + + template <AccessMode access_mode = AccessMode::ATOMIC> + void ReleaseBucket(size_t bucket_index) { + Bucket* bucket = LoadBucket<access_mode>(bucket_index); + StoreBucket<access_mode>(bucket_index, nullptr); + delete bucket; + } + + template <AccessMode access_mode = AccessMode::ATOMIC> + Bucket* LoadBucket(Bucket** bucket) { + if (access_mode == AccessMode::ATOMIC) + return v8::base::AsAtomicPointer::Acquire_Load(bucket); + return *bucket; + } + + template <AccessMode access_mode = AccessMode::ATOMIC> + Bucket* LoadBucket(size_t bucket_index) { + return LoadBucket(bucket(bucket_index)); + } + + template <AccessMode access_mode = AccessMode::ATOMIC> + void StoreBucket(Bucket** bucket, Bucket* value) { + if (access_mode == AccessMode::ATOMIC) { + v8::base::AsAtomicPointer::Release_Store(bucket, value); + } else { + *bucket = value; + } + } + + template <AccessMode access_mode = AccessMode::ATOMIC> + void StoreBucket(size_t bucket_index, Bucket* value) { + StoreBucket(bucket(bucket_index), value); + } + + template <AccessMode access_mode = AccessMode::ATOMIC> + bool SwapInNewBucket(size_t bucket_index, Bucket* value) { + Bucket** b = bucket(bucket_index); + if (access_mode == AccessMode::ATOMIC) { + return v8::base::AsAtomicPointer::Release_CompareAndSwap( + b, nullptr, value) == nullptr; + } else { + DCHECK_NULL(*b); + *b = value; + return true; + } + } + + // Converts the slot offset into bucket/cell/bit index. + static void SlotToIndices(size_t slot_offset, size_t* bucket_index, + int* cell_index, int* bit_index) { + DCHECK(IsAligned(slot_offset, SlotGranularity)); + size_t slot = slot_offset / SlotGranularity; + *bucket_index = slot >> kBitsPerBucketLog2; + *cell_index = + static_cast<int>((slot >> kBitsPerCellLog2) & (kCellsPerBucket - 1)); + *bit_index = static_cast<int>(slot & (kBitsPerCell - 1)); + } + + Bucket** buckets() { return reinterpret_cast<Bucket**>(this); } + Bucket** bucket(size_t bucket_index) { return buckets() + bucket_index; } + +#ifdef DEBUG + size_t* initial_buckets() { return reinterpret_cast<size_t*>(this) - 1; } + static const int kInitialBucketsSize = sizeof(size_t); +#else + static const int kInitialBucketsSize = 0; +#endif +}; + +} // namespace base +} // namespace heap + +#endif // V8_HEAP_BASE_BASIC_SLOT_SET_H_ diff --git a/deps/v8/src/heap/basic-memory-chunk.h b/deps/v8/src/heap/basic-memory-chunk.h index db80da75c995ce..60a711b622e614 100644 --- a/deps/v8/src/heap/basic-memory-chunk.h +++ b/deps/v8/src/heap/basic-memory-chunk.h @@ -353,7 +353,7 @@ class BasicMemoryChunk { size_t size_; // Flags that are only mutable from the main thread when no concurrent - // component (e.g. marker, sweeper) is running. + // component (e.g. marker, sweeper, compilation, allocation) is running. MainThreadFlags main_thread_flags_{NO_FLAGS}; // TODO(v8:7464): Find a way to remove this. diff --git a/deps/v8/src/heap/code-range.cc b/deps/v8/src/heap/code-range.cc index dfe730730cd41e..ae240d0f1c1188 100644 --- a/deps/v8/src/heap/code-range.cc +++ b/deps/v8/src/heap/code-range.cc @@ -147,15 +147,14 @@ bool CodeRange::InitReservation(v8::PageAllocator* page_allocator, if (!VirtualMemoryCage::InitReservation(params)) return false; - if (V8_EXTERNAL_CODE_SPACE_BOOL) { - // Ensure that the code range does not cross the 4Gb boundary and thus - // default compression scheme of truncating the Code pointers to 32-bits - // still works. - Address base = page_allocator_->begin(); - Address last = base + page_allocator_->size() - 1; - CHECK_EQ(GetPtrComprCageBaseAddress(base), - GetPtrComprCageBaseAddress(last)); - } +#ifdef V8_EXTERNAL_CODE_SPACE + // Ensure that ExternalCodeCompressionScheme is applicable to all objects + // stored in the code range. + Address base = page_allocator_->begin(); + Address last = base + page_allocator_->size() - 1; + CHECK_EQ(ExternalCodeCompressionScheme::GetPtrComprCageBaseAddress(base), + ExternalCodeCompressionScheme::GetPtrComprCageBaseAddress(last)); +#endif // V8_EXTERNAL_CODE_SPACE // On some platforms, specifically Win64, we need to reserve some pages at // the beginning of an executable space. See diff --git a/deps/v8/src/heap/concurrent-allocator-inl.h b/deps/v8/src/heap/concurrent-allocator-inl.h index f6eed1b696ffb4..efce44d36398a7 100644 --- a/deps/v8/src/heap/concurrent-allocator-inl.h +++ b/deps/v8/src/heap/concurrent-allocator-inl.h @@ -13,7 +13,6 @@ #include "src/heap/incremental-marking.h" #include "src/heap/local-heap.h" #include "src/heap/spaces-inl.h" -#include "src/heap/spaces.h" #include "src/objects/heap-object.h" namespace v8 { @@ -22,6 +21,7 @@ namespace internal { AllocationResult ConcurrentAllocator::AllocateRaw(int size_in_bytes, AllocationAlignment alignment, AllocationOrigin origin) { + size_in_bytes = ALIGN_TO_ALLOCATION_ALIGNMENT(size_in_bytes); DCHECK(!v8_flags.enable_third_party_heap); // TODO(dinfuehr): Add support for allocation observers #ifdef DEBUG @@ -34,15 +34,47 @@ AllocationResult ConcurrentAllocator::AllocateRaw(int size_in_bytes, AllocationResult result; if (USE_ALLOCATION_ALIGNMENT_BOOL && alignment != kTaggedAligned) { - result = lab_.AllocateRawAligned(size_in_bytes, alignment); + result = AllocateInLabFastAligned(size_in_bytes, alignment); } else { - result = lab_.AllocateRawUnaligned(size_in_bytes); + result = AllocateInLabFastUnaligned(size_in_bytes); } return result.IsFailure() ? AllocateInLabSlow(size_in_bytes, alignment, origin) : result; } +AllocationResult ConcurrentAllocator::AllocateInLabFastUnaligned( + int size_in_bytes) { + size_in_bytes = ALIGN_TO_ALLOCATION_ALIGNMENT(size_in_bytes); + + if (!lab_.CanIncrementTop(size_in_bytes)) { + return AllocationResult::Failure(); + } + + HeapObject object = HeapObject::FromAddress(lab_.IncrementTop(size_in_bytes)); + return AllocationResult::FromObject(object); +} + +AllocationResult ConcurrentAllocator::AllocateInLabFastAligned( + int size_in_bytes, AllocationAlignment alignment) { + size_in_bytes = ALIGN_TO_ALLOCATION_ALIGNMENT(size_in_bytes); + Address current_top = lab_.top(); + int filler_size = Heap::GetFillToAlign(current_top, alignment); + int aligned_size = filler_size + size_in_bytes; + + if (!lab_.CanIncrementTop(aligned_size)) { + return AllocationResult::Failure(); + } + + HeapObject object = HeapObject::FromAddress(lab_.IncrementTop(aligned_size)); + + if (filler_size > 0) { + object = owning_heap()->PrecedeWithFiller(object, filler_size); + } + + return AllocationResult::FromObject(object); +} + } // namespace internal } // namespace v8 diff --git a/deps/v8/src/heap/concurrent-allocator.cc b/deps/v8/src/heap/concurrent-allocator.cc index 4b02b141709d12..b4494d9a6311eb 100644 --- a/deps/v8/src/heap/concurrent-allocator.cc +++ b/deps/v8/src/heap/concurrent-allocator.cc @@ -8,7 +8,9 @@ #include "src/execution/isolate.h" #include "src/handles/persistent-handles.h" #include "src/heap/concurrent-allocator-inl.h" +#include "src/heap/gc-tracer-inl.h" #include "src/heap/heap.h" +#include "src/heap/linear-allocation-area.h" #include "src/heap/local-heap-inl.h" #include "src/heap/local-heap.h" #include "src/heap/marking.h" @@ -41,7 +43,7 @@ void StressConcurrentAllocatorTask::RunInternal() { heap->CreateFillerObjectAtBackground(result.ToAddress(), kSmallObjectSize); } else { - local_heap.TryPerformCollection(); + heap->CollectGarbageFromAnyThread(&local_heap); } result = local_heap.AllocateRaw(kMediumObjectSize, AllocationType::kOld, @@ -51,7 +53,7 @@ void StressConcurrentAllocatorTask::RunInternal() { heap->CreateFillerObjectAtBackground(result.ToAddress(), kMediumObjectSize); } else { - local_heap.TryPerformCollection(); + heap->CollectGarbageFromAnyThread(&local_heap); } result = local_heap.AllocateRaw(kLargeObjectSize, AllocationType::kOld, @@ -61,7 +63,7 @@ void StressConcurrentAllocatorTask::RunInternal() { heap->CreateFillerObjectAtBackground(result.ToAddress(), kLargeObjectSize); } else { - local_heap.TryPerformCollection(); + heap->CollectGarbageFromAnyThread(&local_heap); } local_heap.Safepoint(); } @@ -77,24 +79,35 @@ void StressConcurrentAllocatorTask::Schedule(Isolate* isolate) { kDelayInSeconds); } +ConcurrentAllocator::ConcurrentAllocator(LocalHeap* local_heap, + PagedSpace* space) + : local_heap_(local_heap), space_(space), owning_heap_(space_->heap()) {} + void ConcurrentAllocator::FreeLinearAllocationArea() { // The code page of the linear allocation area needs to be unprotected // because we are going to write a filler into that memory area below. base::Optional<CodePageMemoryModificationScope> optional_scope; - if (lab_.IsValid() && space_->identity() == CODE_SPACE) { + if (IsLabValid() && space_->identity() == CODE_SPACE) { optional_scope.emplace(MemoryChunk::FromAddress(lab_.top())); } - lab_.CloseAndMakeIterable(); + if (lab_.top() != lab_.limit() && + owning_heap()->incremental_marking()->black_allocation()) { + Page::FromAddress(lab_.top()) + ->DestroyBlackAreaBackground(lab_.top(), lab_.limit()); + } + + MakeLabIterable(); + ResetLab(); } void ConcurrentAllocator::MakeLinearAllocationAreaIterable() { // The code page of the linear allocation area needs to be unprotected // because we are going to write a filler into that memory area below. base::Optional<CodePageMemoryModificationScope> optional_scope; - if (lab_.IsValid() && space_->identity() == CODE_SPACE) { + if (IsLabValid() && space_->identity() == CODE_SPACE) { optional_scope.emplace(MemoryChunk::FromAddress(lab_.top())); } - lab_.MakeIterable(); + MakeLabIterable(); } void ConcurrentAllocator::MarkLinearAllocationAreaBlack() { @@ -128,54 +141,134 @@ void ConcurrentAllocator::UnmarkLinearAllocationArea() { AllocationResult ConcurrentAllocator::AllocateInLabSlow( int size_in_bytes, AllocationAlignment alignment, AllocationOrigin origin) { - if (!EnsureLab(origin)) { + if (!AllocateLab(origin)) { return AllocationResult::Failure(); } AllocationResult allocation = - lab_.AllocateRawAligned(size_in_bytes, alignment); + AllocateInLabFastAligned(size_in_bytes, alignment); DCHECK(!allocation.IsFailure()); return allocation; } -bool ConcurrentAllocator::EnsureLab(AllocationOrigin origin) { - auto result = space_->RawAllocateBackground(local_heap_, kMinLabSize, - kMaxLabSize, origin); +bool ConcurrentAllocator::AllocateLab(AllocationOrigin origin) { + auto result = AllocateFromSpaceFreeList(kMinLabSize, kMaxLabSize, origin); if (!result) return false; + owning_heap()->StartIncrementalMarkingIfAllocationLimitIsReachedBackground(); + + FreeLinearAllocationArea(); + + Address lab_start = result->first; + Address lab_end = lab_start + result->second; + lab_ = LinearAllocationArea(lab_start, lab_end); + DCHECK(IsLabValid()); + if (IsBlackAllocationEnabled()) { - Address top = result->first; - Address limit = top + result->second; + Address top = lab_.top(); + Address limit = lab_.limit(); Page::FromAllocationAreaAddress(top)->CreateBlackAreaBackground(top, limit); } - HeapObject object = HeapObject::FromAddress(result->first); - LocalAllocationBuffer saved_lab = std::move(lab_); - lab_ = LocalAllocationBuffer::FromResult( - space_->heap(), AllocationResult::FromObject(object), result->second); - DCHECK(lab_.IsValid()); - if (!lab_.TryMerge(&saved_lab)) { - saved_lab.CloseAndMakeIterable(); - } return true; } +base::Optional<std::pair<Address, size_t>> +ConcurrentAllocator::AllocateFromSpaceFreeList(size_t min_size_in_bytes, + size_t max_size_in_bytes, + AllocationOrigin origin) { + DCHECK(!space_->is_compaction_space()); + DCHECK(space_->identity() == OLD_SPACE || space_->identity() == CODE_SPACE || + space_->identity() == MAP_SPACE || space_->identity() == SHARED_SPACE); + DCHECK(origin == AllocationOrigin::kRuntime || + origin == AllocationOrigin::kGC); + DCHECK_IMPLIES(!local_heap_, origin == AllocationOrigin::kGC); + + base::Optional<std::pair<Address, size_t>> result = + space_->TryAllocationFromFreeListBackground(min_size_in_bytes, + max_size_in_bytes, origin); + if (result) return result; + + // Sweeping is still in progress. + if (owning_heap()->sweeping_in_progress()) { + // First try to refill the free-list, concurrent sweeper threads + // may have freed some objects in the meantime. + { + TRACE_GC_EPOCH(owning_heap()->tracer(), + GCTracer::Scope::MC_BACKGROUND_SWEEPING, + ThreadKind::kBackground); + space_->RefillFreeList(); + } + + // Retry the free list allocation. + result = space_->TryAllocationFromFreeListBackground( + min_size_in_bytes, max_size_in_bytes, origin); + if (result) return result; + + // Now contribute to sweeping from background thread and then try to + // reallocate. + int max_freed; + { + TRACE_GC_EPOCH(owning_heap()->tracer(), + GCTracer::Scope::MC_BACKGROUND_SWEEPING, + ThreadKind::kBackground); + const int kMaxPagesToSweep = 1; + max_freed = owning_heap()->sweeper()->ParallelSweepSpace( + space_->identity(), Sweeper::SweepingMode::kLazyOrConcurrent, + static_cast<int>(min_size_in_bytes), kMaxPagesToSweep); + space_->RefillFreeList(); + } + + if (static_cast<size_t>(max_freed) >= min_size_in_bytes) { + result = space_->TryAllocationFromFreeListBackground( + min_size_in_bytes, max_size_in_bytes, origin); + if (result) return result; + } + } + + if (owning_heap()->ShouldExpandOldGenerationOnSlowAllocation(local_heap_) && + owning_heap()->CanExpandOldGenerationBackground(local_heap_, + space_->AreaSize())) { + result = space_->TryExpandBackground(max_size_in_bytes); + if (result) return result; + } + + if (owning_heap()->sweeping_in_progress()) { + // Complete sweeping for this space. + TRACE_GC_EPOCH(owning_heap()->tracer(), + GCTracer::Scope::MC_BACKGROUND_SWEEPING, + ThreadKind::kBackground); + owning_heap()->DrainSweepingWorklistForSpace(space_->identity()); + + space_->RefillFreeList(); + + // Last try to acquire memory from free list. + return space_->TryAllocationFromFreeListBackground( + min_size_in_bytes, max_size_in_bytes, origin); + } + + return {}; +} + AllocationResult ConcurrentAllocator::AllocateOutsideLab( int size_in_bytes, AllocationAlignment alignment, AllocationOrigin origin) { // Conservative estimate as we don't know the alignment of the allocation. const int requested_filler_size = Heap::GetMaximumFillToAlign(alignment); const int aligned_size_in_bytes = size_in_bytes + requested_filler_size; - auto result = space_->RawAllocateBackground( - local_heap_, aligned_size_in_bytes, aligned_size_in_bytes, origin); + auto result = AllocateFromSpaceFreeList(aligned_size_in_bytes, + aligned_size_in_bytes, origin); if (!result) return AllocationResult::Failure(); + + owning_heap()->StartIncrementalMarkingIfAllocationLimitIsReachedBackground(); + DCHECK_GE(result->second, aligned_size_in_bytes); - HeapObject object = - (requested_filler_size) - ? owning_heap()->AlignWithFiller( - HeapObject::FromAddress(result->first), size_in_bytes, - static_cast<int>(result->second), alignment) - : HeapObject::FromAddress(result->first); + HeapObject object = HeapObject::FromAddress(result->first); + if (requested_filler_size > 0) { + object = owning_heap()->AlignWithFiller( + object, size_in_bytes, static_cast<int>(result->second), alignment); + } + if (IsBlackAllocationEnabled()) { owning_heap()->incremental_marking()->MarkBlackBackground(object, size_in_bytes); @@ -187,7 +280,12 @@ bool ConcurrentAllocator::IsBlackAllocationEnabled() const { return owning_heap()->incremental_marking()->black_allocation(); } -Heap* ConcurrentAllocator::owning_heap() const { return space_->heap(); } +void ConcurrentAllocator::MakeLabIterable() { + if (IsLabValid()) { + owning_heap()->CreateFillerObjectAtBackground( + lab_.top(), static_cast<int>(lab_.limit() - lab_.top())); + } +} } // namespace internal } // namespace v8 diff --git a/deps/v8/src/heap/concurrent-allocator.h b/deps/v8/src/heap/concurrent-allocator.h index c7c3a28465a40c..b9e003e6cbe0e4 100644 --- a/deps/v8/src/heap/concurrent-allocator.h +++ b/deps/v8/src/heap/concurrent-allocator.h @@ -5,8 +5,10 @@ #ifndef V8_HEAP_CONCURRENT_ALLOCATOR_H_ #define V8_HEAP_CONCURRENT_ALLOCATOR_H_ +#include "src/base/optional.h" #include "src/common/globals.h" #include "src/heap/heap.h" +#include "src/heap/linear-allocation-area.h" #include "src/heap/spaces.h" #include "src/tasks/cancelable-task.h" @@ -37,10 +39,7 @@ class ConcurrentAllocator { static constexpr int kMaxLabSize = 32 * KB; static constexpr int kMaxLabObjectSize = 2 * KB; - ConcurrentAllocator(LocalHeap* local_heap, PagedSpace* space) - : local_heap_(local_heap), - space_(space), - lab_(LocalAllocationBuffer::InvalidBuffer()) {} + ConcurrentAllocator(LocalHeap* local_heap, PagedSpace* space); inline AllocationResult AllocateRaw(int object_size, AllocationAlignment alignment, @@ -59,10 +58,20 @@ class ConcurrentAllocator { "size <= kMaxLabObjectSize will fit into a newly allocated LAB of size " "kLabSize after computing the alignment requirements."); + V8_EXPORT_PRIVATE V8_INLINE AllocationResult + AllocateInLabFastUnaligned(int size_in_bytes); + + V8_EXPORT_PRIVATE V8_INLINE AllocationResult + AllocateInLabFastAligned(int size_in_bytes, AllocationAlignment alignment); + V8_EXPORT_PRIVATE AllocationResult AllocateInLabSlow(int size_in_bytes, AllocationAlignment alignment, AllocationOrigin origin); - bool EnsureLab(AllocationOrigin origin); + bool AllocateLab(AllocationOrigin origin); + + base::Optional<std::pair<Address, size_t>> AllocateFromSpaceFreeList( + size_t min_size_in_bytes, size_t max_size_in_bytes, + AllocationOrigin origin); V8_EXPORT_PRIVATE AllocationResult AllocateOutsideLab(int size_in_bytes, AllocationAlignment alignment, @@ -70,13 +79,23 @@ class ConcurrentAllocator { bool IsBlackAllocationEnabled() const; + // Checks whether the LAB is currently in use. + V8_INLINE bool IsLabValid() { return lab_.top() != kNullAddress; } + + // Resets the LAB. + void ResetLab() { lab_ = LinearAllocationArea(kNullAddress, kNullAddress); } + + // Installs a filler object between the LABs top and limit pointers. + void MakeLabIterable(); + // Returns the Heap of space_. This might differ from the LocalHeap's Heap for // shared spaces. - Heap* owning_heap() const; + Heap* owning_heap() const { return owning_heap_; } LocalHeap* const local_heap_; PagedSpace* const space_; - LocalAllocationBuffer lab_; + Heap* const owning_heap_; + LinearAllocationArea lab_; }; } // namespace internal diff --git a/deps/v8/src/heap/concurrent-marking.cc b/deps/v8/src/heap/concurrent-marking.cc index d15cb26bcd4850..652037e2e40ea9 100644 --- a/deps/v8/src/heap/concurrent-marking.cc +++ b/deps/v8/src/heap/concurrent-marking.cc @@ -16,6 +16,7 @@ #include "src/heap/heap.h" #include "src/heap/mark-compact-inl.h" #include "src/heap/mark-compact.h" +#include "src/heap/marking-state-inl.h" #include "src/heap/marking-visitor-inl.h" #include "src/heap/marking-visitor.h" #include "src/heap/marking.h" @@ -52,6 +53,8 @@ class ConcurrentMarkingState final } void IncrementLiveBytes(MemoryChunk* chunk, intptr_t by) { + DCHECK_IMPLIES(V8_COMPRESS_POINTERS_8GB_BOOL, + IsAligned(by, kObjectAlignment8GbHeap)); (*memory_chunk_data_)[chunk].live_bytes += by; } @@ -145,9 +148,7 @@ class ConcurrentMarkingVisitorUtility { if (!object.IsHeapObject()) continue; HeapObject heap_object = HeapObject::cast(object); visitor->SynchronizePageAccess(heap_object); - BasicMemoryChunk* target_page = - BasicMemoryChunk::FromHeapObject(heap_object); - if (!visitor->is_shared_heap() && target_page->InSharedHeap()) continue; + if (!visitor->ShouldMarkObject(heap_object)) continue; visitor->MarkObject(host, heap_object); visitor->RecordSlot(host, slot, heap_object); } @@ -262,7 +263,9 @@ class YoungGenerationConcurrentMarkingVisitor final heap->isolate(), worklists_local), marking_state_(heap->isolate(), memory_chunk_data) {} - bool is_shared_heap() { return false; } + bool ShouldMarkObject(HeapObject object) const { + return !object.InSharedHeap(); + } void SynchronizePageAccess(HeapObject heap_object) { #ifdef THREAD_SANITIZER @@ -555,7 +558,7 @@ class ConcurrentMarkingVisitor final DCHECK(length.IsSmi()); int size = T::SizeFor(Smi::ToInt(length)); marking_state_.IncrementLiveBytes(MemoryChunk::FromHeapObject(object), - size); + ALIGN_TO_ALLOCATION_ALIGNMENT(size)); VisitMapPointer(object); T::BodyDescriptor::IterateBody(map, object, size, this); return size; diff --git a/deps/v8/src/heap/conservative-stack-visitor.cc b/deps/v8/src/heap/conservative-stack-visitor.cc index eaf28607875043..a9785fb284f7c9 100644 --- a/deps/v8/src/heap/conservative-stack-visitor.cc +++ b/deps/v8/src/heap/conservative-stack-visitor.cc @@ -4,9 +4,13 @@ #include "src/heap/conservative-stack-visitor.h" -#include "src/heap/large-spaces.h" -#include "src/heap/paged-spaces-inl.h" -#include "src/heap/paged-spaces.h" +#include "src/execution/isolate-inl.h" +#include "src/heap/mark-compact.h" +#include "src/objects/visitors.h" + +#ifdef V8_COMPRESS_POINTERS +#include "src/common/ptr-compr-inl.h" +#endif // V8_COMPRESS_POINTERS namespace v8 { namespace internal { @@ -16,61 +20,30 @@ ConservativeStackVisitor::ConservativeStackVisitor(Isolate* isolate, : isolate_(isolate), delegate_(delegate) {} void ConservativeStackVisitor::VisitPointer(const void* pointer) { - VisitConservativelyIfPointer(pointer); + auto address = reinterpret_cast<Address>(const_cast<void*>(pointer)); + VisitConservativelyIfPointer(address); +#ifdef V8_COMPRESS_POINTERS + V8HeapCompressionScheme::ProcessIntermediatePointers( + isolate_, address, + [this](Address ptr) { VisitConservativelyIfPointer(ptr); }); +#endif // V8_COMPRESS_POINTERS } -bool ConservativeStackVisitor::CheckPage(Address address, MemoryChunk* page) { - if (address < page->area_start() || address >= page->area_end()) return false; - +void ConservativeStackVisitor::VisitConservativelyIfPointer(Address address) { Address base_ptr; -#ifdef V8_ENABLE_INNER_POINTER_RESOLUTION_OSB - base_ptr = page->object_start_bitmap()->FindBasePtr(address); -#elif V8_ENABLE_INNER_POINTER_RESOLUTION_MB +#ifdef V8_ENABLE_INNER_POINTER_RESOLUTION_MB base_ptr = isolate_->heap()->mark_compact_collector()->FindBasePtrForMarking( address); #else #error "Some inner pointer resolution mechanism is needed" -#endif // V8_ENABLE_INNER_POINTER_RESOLUTION_(OSB|MB) - - if (base_ptr == kNullAddress) return false; - - // At this point, base_ptr *must* refer to the valid object. We check if - // |address| resides inside the object or beyond it in unused memory. +#endif // V8_ENABLE_INNER_POINTER_RESOLUTION_MB + if (base_ptr == kNullAddress) return; HeapObject obj = HeapObject::FromAddress(base_ptr); - Address obj_end = obj.address() + obj.Size(); - - if (address >= obj_end) { - // |address| points to unused memory. - return false; - } - Object root = obj; delegate_->VisitRootPointer(Root::kHandleScope, nullptr, FullObjectSlot(&root)); // Check that the delegate visitor did not modify the root slot. DCHECK_EQ(root, obj); - return true; -} - -void ConservativeStackVisitor::VisitConservativelyIfPointer( - const void* pointer) { - auto address = reinterpret_cast<Address>(pointer); - - for (Page* page : *isolate_->heap()->old_space()) { - if (CheckPage(address, page)) { - return; - } - } - - for (LargePage* page : *isolate_->heap()->lo_space()) { - if (address >= page->area_start() && address < page->area_end()) { - Object ptr = page->GetObject(); - FullObjectSlot root = FullObjectSlot(&ptr); - delegate_->VisitRootPointer(Root::kHandleScope, nullptr, root); - DCHECK(root == FullObjectSlot(&ptr)); - return; - } - } } } // namespace internal diff --git a/deps/v8/src/heap/conservative-stack-visitor.h b/deps/v8/src/heap/conservative-stack-visitor.h index 649608b4235e67..e63ba5b6737802 100644 --- a/deps/v8/src/heap/conservative-stack-visitor.h +++ b/deps/v8/src/heap/conservative-stack-visitor.h @@ -5,22 +5,23 @@ #ifndef V8_HEAP_CONSERVATIVE_STACK_VISITOR_H_ #define V8_HEAP_CONSERVATIVE_STACK_VISITOR_H_ +#include "include/v8-internal.h" #include "src/heap/base/stack.h" -#include "src/heap/memory-chunk.h" namespace v8 { namespace internal { -class ConservativeStackVisitor : public ::heap::base::StackVisitor { +class RootVisitor; + +class V8_EXPORT_PRIVATE ConservativeStackVisitor + : public ::heap::base::StackVisitor { public: ConservativeStackVisitor(Isolate* isolate, RootVisitor* delegate); void VisitPointer(const void* pointer) final; private: - bool CheckPage(Address address, MemoryChunk* page); - - void VisitConservativelyIfPointer(const void* pointer); + void VisitConservativelyIfPointer(Address address); Isolate* isolate_ = nullptr; RootVisitor* delegate_ = nullptr; diff --git a/deps/v8/src/heap/cppgc-js/cpp-heap.cc b/deps/v8/src/heap/cppgc-js/cpp-heap.cc index 2a9742b1ea52b4..28beb83e2760ed 100644 --- a/deps/v8/src/heap/cppgc-js/cpp-heap.cc +++ b/deps/v8/src/heap/cppgc-js/cpp-heap.cc @@ -55,6 +55,49 @@ namespace v8 { +namespace internal { + +class MinorGCHeapGrowing + : public cppgc::internal::StatsCollector::AllocationObserver { + public: + explicit MinorGCHeapGrowing(cppgc::internal::StatsCollector& stats_collector) + : stats_collector_(stats_collector) { + stats_collector.RegisterObserver(this); + } + virtual ~MinorGCHeapGrowing() = default; + + void AllocatedObjectSizeIncreased(size_t) final {} + void AllocatedObjectSizeDecreased(size_t) final {} + void ResetAllocatedObjectSize(size_t allocated_object_size) final { + ConfigureLimit(allocated_object_size); + } + + bool LimitReached() const { + return stats_collector_.allocated_object_size() >= limit_for_atomic_gc_; + } + + private: + void ConfigureLimit(size_t allocated_object_size) { + // Constant growing factor for growing the heap limit. + static constexpr double kGrowingFactor = 1.5; + // For smaller heaps, allow allocating at least LAB in each regular space + // before triggering GC again. + static constexpr size_t kMinLimitIncrease = + cppgc::internal::kPageSize * + cppgc::internal::RawHeap::kNumberOfRegularSpaces; + + const size_t size = std::max(allocated_object_size, initial_heap_size_); + limit_for_atomic_gc_ = std::max(static_cast<size_t>(size * kGrowingFactor), + size + kMinLimitIncrease); + } + + cppgc::internal::StatsCollector& stats_collector_; + size_t initial_heap_size_ = 1 * cppgc::internal::kMB; + size_t limit_for_atomic_gc_ = 0; // See ConfigureLimit(). +}; + +} // namespace internal + namespace { START_ALLOW_USE_DEPRECATED() @@ -286,7 +329,8 @@ class UnifiedHeapConservativeMarkingVisitor final class UnifiedHeapMarker final : public cppgc::internal::MarkerBase { public: UnifiedHeapMarker(Heap* v8_heap, cppgc::internal::HeapBase& cpp_heap, - cppgc::Platform* platform, MarkingConfig config); + cppgc::Platform* platform, + cppgc::internal::MarkingConfig config); ~UnifiedHeapMarker() final = default; @@ -324,7 +368,7 @@ class UnifiedHeapMarker final : public cppgc::internal::MarkerBase { UnifiedHeapMarker::UnifiedHeapMarker(Heap* v8_heap, cppgc::internal::HeapBase& heap, cppgc::Platform* platform, - MarkingConfig config) + cppgc::internal::MarkingConfig config) : cppgc::internal::MarkerBase(heap, platform, config), mutator_unified_heap_marking_state_(v8_heap, nullptr), marking_visitor_(config.collection_type == CppHeap::CollectionType::kMajor @@ -487,6 +531,8 @@ CppHeap::CppHeap( cppgc::internal::HeapBase::StackSupport:: kSupportsConservativeStackScan, marking_support, sweeping_support, *this), + minor_gc_heap_growing_( + std::make_unique<MinorGCHeapGrowing>(*stats_collector())), wrapper_descriptor_(wrapper_descriptor) { CHECK_NE(WrapperDescriptor::kUnknownEmbedderId, wrapper_descriptor_.embedder_id_for_garbage_collected); @@ -509,6 +555,29 @@ void CppHeap::Terminate() { HeapBase::Terminate(); } +namespace { + +class SweepingOnMutatorThreadForGlobalHandlesObserver final + : public cppgc::internal::Sweeper::SweepingOnMutatorThreadObserver { + public: + SweepingOnMutatorThreadForGlobalHandlesObserver(CppHeap& cpp_heap, + GlobalHandles& global_handles) + : cppgc::internal::Sweeper::SweepingOnMutatorThreadObserver( + cpp_heap.sweeper()), + global_handles_(global_handles) {} + + void Start() override { + global_handles_.NotifyStartSweepingOnMutatorThread(); + } + + void End() override { global_handles_.NotifyEndSweepingOnMutatorThread(); } + + private: + GlobalHandles& global_handles_; +}; + +} // namespace + void CppHeap::AttachIsolate(Isolate* isolate) { CHECK(!in_detached_testing_mode_); CHECK_NULL(isolate_); @@ -522,6 +591,9 @@ void CppHeap::AttachIsolate(Isolate* isolate) { SetMetricRecorder(std::make_unique<MetricRecorderAdapter>(*this)); oom_handler().SetCustomHandler(&FatalOutOfMemoryHandlerImpl); ReduceGCCapabilititesFromFlags(); + sweeping_on_mutator_thread_observer_ = + std::make_unique<SweepingOnMutatorThreadForGlobalHandlesObserver>( + *this, *isolate_->global_handles()); no_gc_scope_--; } @@ -538,6 +610,8 @@ void CppHeap::DetachIsolate() { } sweeper_.FinishIfRunning(); + sweeping_on_mutator_thread_observer_.reset(); + auto* heap_profiler = isolate_->heap_profiler(); if (heap_profiler) { heap_profiler->RemoveBuildEmbedderGraphCallback(&CppGraphBuilder::Run, @@ -619,17 +693,20 @@ void CppHeap::InitializeTracing(CollectionType collection_type, #if defined(CPPGC_YOUNG_GENERATION) if (generational_gc_supported() && - *collection_type_ == CollectionType::kMajor) + *collection_type_ == CollectionType::kMajor) { + cppgc::internal::StatsCollector::EnabledScope stats_scope( + stats_collector(), cppgc::internal::StatsCollector::kUnmark); cppgc::internal::SequentialUnmarker unmarker(raw_heap()); + } #endif // defined(CPPGC_YOUNG_GENERATION) current_gc_flags_ = gc_flags; - const UnifiedHeapMarker::MarkingConfig marking_config{ + const cppgc::internal::MarkingConfig marking_config{ *collection_type_, StackState::kNoHeapPointers, SelectMarkingType(), IsForceGC(current_gc_flags_) - ? UnifiedHeapMarker::MarkingConfig::IsForcedGC::kForced - : UnifiedHeapMarker::MarkingConfig::IsForcedGC::kNotForced}; + ? cppgc::internal::MarkingConfig::IsForcedGC::kForced + : cppgc::internal::MarkingConfig::IsForcedGC::kNotForced}; DCHECK_IMPLIES(!isolate_, (MarkingType::kAtomic == marking_config.marking_type) || force_incremental_marking_for_testing_); @@ -695,8 +772,7 @@ void CppHeap::EnterFinalPause(cppgc::EmbedderStackState stack_state) { auto& heap = *isolate()->heap(); marker.conservative_visitor().SetGlobalHandlesMarkingVisitor( std::make_unique<GlobalHandleMarkingVisitor>( - heap, *heap.mark_compact_collector()->marking_state(), - *heap.mark_compact_collector()->local_marking_worklists())); + heap, *heap.mark_compact_collector()->local_marking_worklists())); } marker.EnterAtomicPause(stack_state); if (isolate_ && *collection_type_ == CollectionType::kMinor) { @@ -753,14 +829,14 @@ void CppHeap::TraceEpilogue() { { cppgc::subtle::NoGarbageCollectionScope no_gc(*this); - cppgc::internal::Sweeper::SweepingConfig::CompactableSpaceHandling + cppgc::internal::SweepingConfig::CompactableSpaceHandling compactable_space_handling = compactor_.CompactSpacesIfEnabled(); - const cppgc::internal::Sweeper::SweepingConfig sweeping_config{ + const cppgc::internal::SweepingConfig sweeping_config{ SelectSweepingType(), compactable_space_handling, ShouldReduceMemory(current_gc_flags_) - ? cppgc::internal::Sweeper::SweepingConfig::FreeMemoryHandling:: + ? cppgc::internal::SweepingConfig::FreeMemoryHandling:: kDiscardWherePossible - : cppgc::internal::Sweeper::SweepingConfig::FreeMemoryHandling:: + : cppgc::internal::SweepingConfig::FreeMemoryHandling:: kDoNotDiscard}; DCHECK_IMPLIES(!isolate_, SweepingType::kAtomic == sweeping_config.sweeping_type); @@ -772,15 +848,17 @@ void CppHeap::TraceEpilogue() { sweeper().NotifyDoneIfNeeded(); } -void CppHeap::RunMinorGC(StackState stack_state) { - DCHECK(!sweeper_.IsSweepingInProgress()); - +void CppHeap::RunMinorGCIfNeeded(StackState stack_state) { if (!generational_gc_supported()) return; if (in_no_gc_scope()) return; // Minor GC does not support nesting in full GCs. if (IsMarking()) return; // Minor GCs with the stack are currently not supported. if (stack_state == StackState::kMayContainHeapPointers) return; + // Run only when the limit is reached. + if (!minor_gc_heap_growing_->LimitReached()) return; + + DCHECK(!sweeper_.IsSweepingInProgress()); // Notify GC tracer that CppGC started young GC cycle. isolate_->heap()->tracer()->NotifyYoungCppGCRunning(); @@ -928,8 +1006,8 @@ class CollectCustomSpaceStatisticsAtLastGCTask final : public v8::Task { void Run() final { cppgc::internal::Sweeper& sweeper = heap_.sweeper(); if (sweeper.PerformSweepOnMutatorThread( - heap_.platform()->MonotonicallyIncreasingTime() + - kStepSizeMs.InSecondsF())) { + kStepSizeMs, + cppgc::internal::StatsCollector::kSweepInTaskForStatistics)) { // Sweeping is done. DCHECK(!sweeper.IsSweepingInProgress()); ReportCustomSpaceStatistics(heap_.raw_heap(), std::move(custom_spaces_), @@ -1004,14 +1082,15 @@ CppHeap::PauseConcurrentMarkingScope::PauseConcurrentMarkingScope( } } -void CppHeap::CollectGarbage(Config config) { +void CppHeap::CollectGarbage(cppgc::internal::GCConfig config) { if (in_no_gc_scope() || !isolate_) return; // TODO(mlippautz): Respect full config. - const int flags = (config.free_memory_handling == - Config::FreeMemoryHandling::kDiscardWherePossible) - ? Heap::kReduceMemoryFootprintMask - : Heap::kNoGCFlags; + const int flags = + (config.free_memory_handling == + cppgc::internal::GCConfig::FreeMemoryHandling::kDiscardWherePossible) + ? Heap::kReduceMemoryFootprintMask + : Heap::kNoGCFlags; isolate_->heap()->CollectAllGarbage( flags, GarbageCollectionReason::kCppHeapAllocationFailure); } @@ -1020,7 +1099,9 @@ const cppgc::EmbedderStackState* CppHeap::override_stack_state() const { return HeapBase::override_stack_state(); } -void CppHeap::StartIncrementalGarbageCollection(Config) { UNIMPLEMENTED(); } +void CppHeap::StartIncrementalGarbageCollection(cppgc::internal::GCConfig) { + UNIMPLEMENTED(); +} size_t CppHeap::epoch() const { UNIMPLEMENTED(); } } // namespace internal diff --git a/deps/v8/src/heap/cppgc-js/cpp-heap.h b/deps/v8/src/heap/cppgc-js/cpp-heap.h index c109841ea23626..87473682d091dc 100644 --- a/deps/v8/src/heap/cppgc-js/cpp-heap.h +++ b/deps/v8/src/heap/cppgc-js/cpp-heap.h @@ -28,6 +28,7 @@ class Isolate; namespace internal { class CppMarkingState; +class MinorGCHeapGrowing; // A C++ heap implementation used with V8 to implement unified heap. class V8_EXPORT_PRIVATE CppHeap final @@ -43,9 +44,8 @@ class V8_EXPORT_PRIVATE CppHeap final }; using GarbageCollectionFlags = base::Flags<GarbageCollectionFlagValues>; - using StackState = cppgc::internal::GarbageCollector::Config::StackState; - using CollectionType = - cppgc::internal::GarbageCollector::Config::CollectionType; + using StackState = cppgc::internal::StackState; + using CollectionType = cppgc::internal::CollectionType; class MetricRecorderAdapter final : public cppgc::internal::MetricRecorder { public: @@ -139,9 +139,7 @@ class V8_EXPORT_PRIVATE CppHeap final void FinishSweepingIfRunning(); void FinishSweepingIfOutOfWork(); - void InitializeTracing( - cppgc::internal::GarbageCollector::Config::CollectionType, - GarbageCollectionFlags); + void InitializeTracing(CollectionType, GarbageCollectionFlags); void StartTracing(); bool AdvanceTracing(double max_duration); bool IsTracingDone(); @@ -149,7 +147,7 @@ class V8_EXPORT_PRIVATE CppHeap final void EnterFinalPause(cppgc::EmbedderStackState stack_state); bool FinishConcurrentMarkingIfNeeded(); - void RunMinorGC(StackState); + void RunMinorGCIfNeeded(StackState); // StatsCollector::AllocationObserver interface. void AllocatedObjectSizeIncreased(size_t) final; @@ -168,9 +166,9 @@ class V8_EXPORT_PRIVATE CppHeap final std::unique_ptr<CppMarkingState> CreateCppMarkingStateForMutatorThread(); // cppgc::internal::GarbageCollector interface. - void CollectGarbage(Config) override; + void CollectGarbage(cppgc::internal::GCConfig) override; const cppgc::EmbedderStackState* override_stack_state() const override; - void StartIncrementalGarbageCollection(Config) override; + void StartIncrementalGarbageCollection(cppgc::internal::GCConfig) override; size_t epoch() const override; private: @@ -194,10 +192,14 @@ class V8_EXPORT_PRIVATE CppHeap final Isolate* isolate_ = nullptr; bool marking_done_ = false; // |collection_type_| is initialized when marking is in progress. - base::Optional<cppgc::internal::GarbageCollector::Config::CollectionType> - collection_type_; + base::Optional<CollectionType> collection_type_; GarbageCollectionFlags current_gc_flags_; + std::unique_ptr<MinorGCHeapGrowing> minor_gc_heap_growing_; + + std::unique_ptr<cppgc::internal::Sweeper::SweepingOnMutatorThreadObserver> + sweeping_on_mutator_thread_observer_; + // Buffered allocated bytes. Reporting allocated bytes to V8 can trigger a GC // atomic pause. Allocated bytes are buffer in case this is temporarily // prohibited. @@ -207,7 +209,6 @@ class V8_EXPORT_PRIVATE CppHeap final bool in_detached_testing_mode_ = false; bool force_incremental_marking_for_testing_ = false; - bool is_in_v8_marking_step_ = false; friend class MetricRecorderAdapter; diff --git a/deps/v8/src/heap/cppgc-js/unified-heap-marking-state-inl.h b/deps/v8/src/heap/cppgc-js/unified-heap-marking-state-inl.h index afa830b0519e9b..212b41ed1d3ecf 100644 --- a/deps/v8/src/heap/cppgc-js/unified-heap-marking-state-inl.h +++ b/deps/v8/src/heap/cppgc-js/unified-heap-marking-state-inl.h @@ -14,6 +14,7 @@ #include "src/heap/cppgc-js/unified-heap-marking-state.h" #include "src/heap/heap.h" #include "src/heap/mark-compact.h" +#include "src/heap/marking-state-inl.h" #include "src/heap/marking-worklist-inl.h" namespace v8 { diff --git a/deps/v8/src/heap/cppgc-js/unified-heap-marking-state.cc b/deps/v8/src/heap/cppgc-js/unified-heap-marking-state.cc index 10c2c19cac7a5b..116563769f45c1 100644 --- a/deps/v8/src/heap/cppgc-js/unified-heap-marking-state.cc +++ b/deps/v8/src/heap/cppgc-js/unified-heap-marking-state.cc @@ -13,8 +13,7 @@ namespace internal { UnifiedHeapMarkingState::UnifiedHeapMarkingState( Heap* heap, MarkingWorklists::Local* local_marking_worklist) : heap_(heap), - marking_state_(heap_ ? heap_->mark_compact_collector()->marking_state() - : nullptr), + marking_state_(heap_ ? heap_->marking_state() : nullptr), local_marking_worklist_(local_marking_worklist), track_retaining_path_(v8_flags.track_retaining_path) { DCHECK_IMPLIES(v8_flags.track_retaining_path, diff --git a/deps/v8/src/heap/cppgc-js/unified-heap-marking-verifier.cc b/deps/v8/src/heap/cppgc-js/unified-heap-marking-verifier.cc index b8940d145a387f..3ac2a93dee8f27 100644 --- a/deps/v8/src/heap/cppgc-js/unified-heap-marking-verifier.cc +++ b/deps/v8/src/heap/cppgc-js/unified-heap-marking-verifier.cc @@ -57,7 +57,7 @@ class UnifiedHeapVerificationVisitor final : public JSVisitor { UnifiedHeapMarkingVerifier::UnifiedHeapMarkingVerifier( cppgc::internal::HeapBase& heap_base, - cppgc::internal::Heap::Config::CollectionType collection_type) + cppgc::internal::CollectionType collection_type) : MarkingVerifierBase( heap_base, collection_type, state_, std::make_unique<UnifiedHeapVerificationVisitor>(state_)) {} diff --git a/deps/v8/src/heap/cppgc-js/unified-heap-marking-verifier.h b/deps/v8/src/heap/cppgc-js/unified-heap-marking-verifier.h index 71bed04573541a..78a6ce1e69168f 100644 --- a/deps/v8/src/heap/cppgc-js/unified-heap-marking-verifier.h +++ b/deps/v8/src/heap/cppgc-js/unified-heap-marking-verifier.h @@ -14,7 +14,7 @@ class V8_EXPORT_PRIVATE UnifiedHeapMarkingVerifier final : public cppgc::internal::MarkingVerifierBase { public: UnifiedHeapMarkingVerifier(cppgc::internal::HeapBase&, - cppgc::internal::Heap::Config::CollectionType); + cppgc::internal::CollectionType); ~UnifiedHeapMarkingVerifier() final = default; private: diff --git a/deps/v8/src/heap/cppgc/compactor.cc b/deps/v8/src/heap/cppgc/compactor.cc index e3792a32f83f01..68ee147dda8798 100644 --- a/deps/v8/src/heap/cppgc/compactor.cc +++ b/deps/v8/src/heap/cppgc/compactor.cc @@ -452,13 +452,11 @@ Compactor::Compactor(RawHeap& heap) : heap_(heap) { } } -bool Compactor::ShouldCompact( - GarbageCollector::Config::MarkingType marking_type, - GarbageCollector::Config::StackState stack_state) const { +bool Compactor::ShouldCompact(GCConfig::MarkingType marking_type, + StackState stack_state) const { if (compactable_spaces_.empty() || - (marking_type == GarbageCollector::Config::MarkingType::kAtomic && - stack_state == - GarbageCollector::Config::StackState::kMayContainHeapPointers)) { + (marking_type == GCConfig::MarkingType::kAtomic && + stack_state == StackState::kMayContainHeapPointers)) { // The following check ensures that tests that want to test compaction are // not interrupted by garbage collections that cannot use compaction. DCHECK(!enable_for_next_gc_for_testing_); @@ -474,9 +472,8 @@ bool Compactor::ShouldCompact( return free_list_size > kFreeListSizeThreshold; } -void Compactor::InitializeIfShouldCompact( - GarbageCollector::Config::MarkingType marking_type, - GarbageCollector::Config::StackState stack_state) { +void Compactor::InitializeIfShouldCompact(GCConfig::MarkingType marking_type, + StackState stack_state) { DCHECK(!is_enabled_); if (!ShouldCompact(marking_type, stack_state)) return; @@ -487,9 +484,8 @@ void Compactor::InitializeIfShouldCompact( is_cancelled_ = false; } -void Compactor::CancelIfShouldNotCompact( - GarbageCollector::Config::MarkingType marking_type, - GarbageCollector::Config::StackState stack_state) { +void Compactor::CancelIfShouldNotCompact(GCConfig::MarkingType marking_type, + StackState stack_state) { if (!is_enabled_ || ShouldCompact(marking_type, stack_state)) return; is_cancelled_ = true; diff --git a/deps/v8/src/heap/cppgc/compactor.h b/deps/v8/src/heap/cppgc/compactor.h index d79e6a7a65053b..9638996a429002 100644 --- a/deps/v8/src/heap/cppgc/compactor.h +++ b/deps/v8/src/heap/cppgc/compactor.h @@ -12,9 +12,10 @@ namespace cppgc { namespace internal { +class NormalPageSpace; + class V8_EXPORT_PRIVATE Compactor final { - using CompactableSpaceHandling = - Sweeper::SweepingConfig::CompactableSpaceHandling; + using CompactableSpaceHandling = SweepingConfig::CompactableSpaceHandling; public: explicit Compactor(RawHeap&); @@ -23,10 +24,8 @@ class V8_EXPORT_PRIVATE Compactor final { Compactor(const Compactor&) = delete; Compactor& operator=(const Compactor&) = delete; - void InitializeIfShouldCompact(GarbageCollector::Config::MarkingType, - GarbageCollector::Config::StackState); - void CancelIfShouldNotCompact(GarbageCollector::Config::MarkingType, - GarbageCollector::Config::StackState); + void InitializeIfShouldCompact(GCConfig::MarkingType, StackState); + void CancelIfShouldNotCompact(GCConfig::MarkingType, StackState); // Returns whether spaces need to be processed by the Sweeper after // compaction. CompactableSpaceHandling CompactSpacesIfEnabled(); @@ -39,8 +38,7 @@ class V8_EXPORT_PRIVATE Compactor final { bool IsEnabledForTesting() const { return is_enabled_; } private: - bool ShouldCompact(GarbageCollector::Config::MarkingType, - GarbageCollector::Config::StackState) const; + bool ShouldCompact(GCConfig::MarkingType, StackState) const; RawHeap& heap_; // Compactor does not own the compactable spaces. The heap owns all spaces. diff --git a/deps/v8/src/heap/cppgc/explicit-management.cc b/deps/v8/src/heap/cppgc/explicit-management.cc index 3a18bd3369e841..560b18dc588d5d 100644 --- a/deps/v8/src/heap/cppgc/explicit-management.cc +++ b/deps/v8/src/heap/cppgc/explicit-management.cc @@ -11,6 +11,7 @@ #include "src/heap/cppgc/heap-object-header.h" #include "src/heap/cppgc/heap-page.h" #include "src/heap/cppgc/memory.h" +#include "src/heap/cppgc/object-view.h" namespace cppgc { namespace internal { @@ -36,21 +37,30 @@ void ExplicitManagementImpl::FreeUnreferencedObject(HeapHandle& heap_handle, auto& header = HeapObjectHeader::FromObject(object); header.Finalize(); - size_t object_size = 0; - USE(object_size); - // `object` is guaranteed to be of type GarbageCollected, so getting the // BasePage is okay for regular and large objects. BasePage* base_page = BasePage::FromPayload(object); + +#if defined(CPPGC_YOUNG_GENERATION) + const size_t object_size = ObjectView<>(header).Size(); + + if (auto& heap_base = HeapBase::From(heap_handle); + heap_base.generational_gc_supported()) { + heap_base.remembered_set().InvalidateRememberedSlotsInRange( + object, reinterpret_cast<uint8_t*>(object) + object_size); + // If this object was registered as remembered, remove it. Do that before + // the page gets destroyed. + heap_base.remembered_set().InvalidateRememberedSourceObject(header); + } +#endif // defined(CPPGC_YOUNG_GENERATION) + if (base_page->is_large()) { // Large object. - object_size = LargePage::From(base_page)->ObjectSize(); base_page->space().RemovePage(base_page); base_page->heap().stats_collector()->NotifyExplicitFree( LargePage::From(base_page)->PayloadSize()); LargePage::Destroy(LargePage::From(base_page)); } else { // Regular object. const size_t header_size = header.AllocatedSize(); - object_size = header.ObjectSize(); auto* normal_page = NormalPage::From(base_page); auto& normal_space = *static_cast<NormalPageSpace*>(&base_page->space()); auto& lab = normal_space.linear_allocation_buffer(); @@ -66,15 +76,6 @@ void ExplicitManagementImpl::FreeUnreferencedObject(HeapHandle& heap_handle, // list entry. } } -#if defined(CPPGC_YOUNG_GENERATION) - auto& heap_base = HeapBase::From(heap_handle); - if (heap_base.generational_gc_supported()) { - heap_base.remembered_set().InvalidateRememberedSlotsInRange( - object, reinterpret_cast<uint8_t*>(object) + object_size); - // If this object was registered as remembered, remove it. - heap_base.remembered_set().InvalidateRememberedSourceObject(header); - } -#endif // defined(CPPGC_YOUNG_GENERATION) } namespace { diff --git a/deps/v8/src/heap/cppgc/garbage-collector.h b/deps/v8/src/heap/cppgc/garbage-collector.h index a49a7a1badbaf4..8a08f56b6b177a 100644 --- a/deps/v8/src/heap/cppgc/garbage-collector.h +++ b/deps/v8/src/heap/cppgc/garbage-collector.h @@ -6,8 +6,7 @@ #define V8_HEAP_CPPGC_GARBAGE_COLLECTOR_H_ #include "include/cppgc/common.h" -#include "src/heap/cppgc/marker.h" -#include "src/heap/cppgc/sweeper.h" +#include "src/heap/cppgc/heap-config.h" namespace cppgc { namespace internal { @@ -16,62 +15,9 @@ namespace internal { // needed to mock/fake GC for testing. class GarbageCollector { public: - struct Config { - using CollectionType = Marker::MarkingConfig::CollectionType; - using StackState = cppgc::Heap::StackState; - using MarkingType = Marker::MarkingConfig::MarkingType; - using SweepingType = Sweeper::SweepingConfig::SweepingType; - using FreeMemoryHandling = Sweeper::SweepingConfig::FreeMemoryHandling; - using IsForcedGC = Marker::MarkingConfig::IsForcedGC; - - static constexpr Config ConservativeAtomicConfig() { - return {CollectionType::kMajor, StackState::kMayContainHeapPointers, - MarkingType::kAtomic, SweepingType::kAtomic}; - } - - static constexpr Config PreciseAtomicConfig() { - return {CollectionType::kMajor, StackState::kNoHeapPointers, - MarkingType::kAtomic, SweepingType::kAtomic}; - } - - static constexpr Config ConservativeIncrementalConfig() { - return {CollectionType::kMajor, StackState::kMayContainHeapPointers, - MarkingType::kIncremental, SweepingType::kAtomic}; - } - - static constexpr Config PreciseIncrementalConfig() { - return {CollectionType::kMajor, StackState::kNoHeapPointers, - MarkingType::kIncremental, SweepingType::kAtomic}; - } - - static constexpr Config - PreciseIncrementalMarkingConcurrentSweepingConfig() { - return {CollectionType::kMajor, StackState::kNoHeapPointers, - MarkingType::kIncremental, - SweepingType::kIncrementalAndConcurrent}; - } - - static constexpr Config MinorPreciseAtomicConfig() { - return {CollectionType::kMinor, StackState::kNoHeapPointers, - MarkingType::kAtomic, SweepingType::kAtomic}; - } - - static constexpr Config MinorConservativeAtomicConfig() { - return {CollectionType::kMinor, StackState::kMayContainHeapPointers, - MarkingType::kAtomic, SweepingType::kAtomic}; - } - - CollectionType collection_type = CollectionType::kMajor; - StackState stack_state = StackState::kMayContainHeapPointers; - MarkingType marking_type = MarkingType::kAtomic; - SweepingType sweeping_type = SweepingType::kAtomic; - FreeMemoryHandling free_memory_handling = FreeMemoryHandling::kDoNotDiscard; - IsForcedGC is_forced_gc = IsForcedGC::kNotForced; - }; - // Executes a garbage collection specified in config. - virtual void CollectGarbage(Config) = 0; - virtual void StartIncrementalGarbageCollection(Config) = 0; + virtual void CollectGarbage(GCConfig) = 0; + virtual void StartIncrementalGarbageCollection(GCConfig) = 0; // The current epoch that the GC maintains. The epoch is increased on every // GC invocation. diff --git a/deps/v8/src/heap/cppgc/gc-invoker.cc b/deps/v8/src/heap/cppgc/gc-invoker.cc index 1bddad7a7e0d4f..8561437552eb71 100644 --- a/deps/v8/src/heap/cppgc/gc-invoker.cc +++ b/deps/v8/src/heap/cppgc/gc-invoker.cc @@ -8,7 +8,6 @@ #include "include/cppgc/common.h" #include "include/cppgc/platform.h" -#include "src/heap/cppgc/heap.h" #include "src/heap/cppgc/task-handle.h" namespace cppgc { @@ -22,8 +21,8 @@ class GCInvoker::GCInvokerImpl final : public GarbageCollector { GCInvokerImpl(const GCInvokerImpl&) = delete; GCInvokerImpl& operator=(const GCInvokerImpl&) = delete; - void CollectGarbage(GarbageCollector::Config) final; - void StartIncrementalGarbageCollection(GarbageCollector::Config) final; + void CollectGarbage(GCConfig) final; + void StartIncrementalGarbageCollection(GCConfig) final; size_t epoch() const final { return collector_->epoch(); } const EmbedderStackState* override_stack_state() const final { return collector_->override_stack_state(); @@ -35,7 +34,7 @@ class GCInvoker::GCInvokerImpl final : public GarbageCollector { using Handle = SingleThreadedHandle; static Handle Post(GarbageCollector* collector, cppgc::TaskRunner* runner, - GarbageCollector::Config config) { + GCConfig config) { auto task = std::make_unique<GCInvoker::GCInvokerImpl::GCTask>(collector, config); auto handle = task->GetHandle(); @@ -43,8 +42,7 @@ class GCInvoker::GCInvokerImpl final : public GarbageCollector { return handle; } - explicit GCTask(GarbageCollector* collector, - GarbageCollector::Config config) + explicit GCTask(GarbageCollector* collector, GCConfig config) : collector_(collector), config_(config), handle_(Handle::NonEmptyTag{}), @@ -63,7 +61,7 @@ class GCInvoker::GCInvokerImpl final : public GarbageCollector { Handle GetHandle() { return handle_; } GarbageCollector* collector_; - GarbageCollector::Config config_; + GCConfig config_; Handle handle_; size_t saved_epoch_; }; @@ -87,10 +85,9 @@ GCInvoker::GCInvokerImpl::~GCInvokerImpl() { } } -void GCInvoker::GCInvokerImpl::CollectGarbage(GarbageCollector::Config config) { +void GCInvoker::GCInvokerImpl::CollectGarbage(GCConfig config) { DCHECK_EQ(config.marking_type, cppgc::Heap::MarkingType::kAtomic); - if ((config.stack_state == - GarbageCollector::Config::StackState::kNoHeapPointers) || + if ((config.stack_state == StackState::kNoHeapPointers) || (stack_support_ == cppgc::Heap::StackSupport::kSupportsConservativeStackScan)) { collector_->CollectGarbage(config); @@ -98,8 +95,7 @@ void GCInvoker::GCInvokerImpl::CollectGarbage(GarbageCollector::Config config) { platform_->GetForegroundTaskRunner()->NonNestableTasksEnabled()) { if (!gc_task_handle_) { // Force a precise GC since it will run in a non-nestable task. - config.stack_state = - GarbageCollector::Config::StackState::kNoHeapPointers; + config.stack_state = StackState::kNoHeapPointers; DCHECK_NE(cppgc::Heap::StackSupport::kSupportsConservativeStackScan, stack_support_); gc_task_handle_ = GCTask::Post( @@ -109,7 +105,7 @@ void GCInvoker::GCInvokerImpl::CollectGarbage(GarbageCollector::Config config) { } void GCInvoker::GCInvokerImpl::StartIncrementalGarbageCollection( - GarbageCollector::Config config) { + GCConfig config) { DCHECK_NE(config.marking_type, cppgc::Heap::MarkingType::kAtomic); if ((stack_support_ != cppgc::Heap::StackSupport::kSupportsConservativeStackScan) && @@ -134,12 +130,11 @@ GCInvoker::GCInvoker(GarbageCollector* collector, cppgc::Platform* platform, GCInvoker::~GCInvoker() = default; -void GCInvoker::CollectGarbage(GarbageCollector::Config config) { +void GCInvoker::CollectGarbage(GCConfig config) { impl_->CollectGarbage(config); } -void GCInvoker::StartIncrementalGarbageCollection( - GarbageCollector::Config config) { +void GCInvoker::StartIncrementalGarbageCollection(GCConfig config) { impl_->StartIncrementalGarbageCollection(config); } diff --git a/deps/v8/src/heap/cppgc/gc-invoker.h b/deps/v8/src/heap/cppgc/gc-invoker.h index ceebca139c8355..c3c379721b5bf0 100644 --- a/deps/v8/src/heap/cppgc/gc-invoker.h +++ b/deps/v8/src/heap/cppgc/gc-invoker.h @@ -34,8 +34,8 @@ class V8_EXPORT_PRIVATE GCInvoker final : public GarbageCollector { GCInvoker(const GCInvoker&) = delete; GCInvoker& operator=(const GCInvoker&) = delete; - void CollectGarbage(GarbageCollector::Config) final; - void StartIncrementalGarbageCollection(GarbageCollector::Config) final; + void CollectGarbage(GCConfig) final; + void StartIncrementalGarbageCollection(GCConfig) final; size_t epoch() const final; const EmbedderStackState* override_stack_state() const final; diff --git a/deps/v8/src/heap/cppgc/globals.h b/deps/v8/src/heap/cppgc/globals.h index 19d5cca59cbb9d..84fb389a7efd27 100644 --- a/deps/v8/src/heap/cppgc/globals.h +++ b/deps/v8/src/heap/cppgc/globals.h @@ -80,6 +80,12 @@ constexpr size_t kCagedHeapReservationSize = static_cast<size_t>(4) * kGB; #endif // !defined(CPPGC_2GB_CAGE) constexpr size_t kCagedHeapReservationAlignment = kCagedHeapReservationSize; +#if defined(CPPGC_POINTER_COMPRESSION) +constexpr size_t kSlotSize = sizeof(uint32_t); +#else // !defined(CPPGC_POINTER_COMPRESSION) +constexpr size_t kSlotSize = sizeof(uintptr_t); +#endif // !defined(CPPGC_POINTER_COMPRESSION) + } // namespace internal } // namespace cppgc diff --git a/deps/v8/src/heap/cppgc/heap-base.cc b/deps/v8/src/heap/cppgc/heap-base.cc index d057d820c8d82b..3b17bb8aa68d1c 100644 --- a/deps/v8/src/heap/cppgc/heap-base.cc +++ b/deps/v8/src/heap/cppgc/heap-base.cc @@ -250,18 +250,16 @@ void HeapBase::Terminate() { #endif // defined(CPPGC_YOUNG_GENERATION) in_atomic_pause_ = true; - stats_collector()->NotifyMarkingStarted( - GarbageCollector::Config::CollectionType::kMajor, - GarbageCollector::Config::MarkingType::kAtomic, - GarbageCollector::Config::IsForcedGC::kForced); + stats_collector()->NotifyMarkingStarted(CollectionType::kMajor, + GCConfig::MarkingType::kAtomic, + GCConfig::IsForcedGC::kForced); object_allocator().ResetLinearAllocationBuffers(); stats_collector()->NotifyMarkingCompleted(0); ExecutePreFinalizers(); // TODO(chromium:1029379): Prefinalizers may black-allocate objects (under a // compile-time option). Run sweeping with forced finalization here. - sweeper().Start( - {Sweeper::SweepingConfig::SweepingType::kAtomic, - Sweeper::SweepingConfig::CompactableSpaceHandling::kSweep}); + sweeper().Start({SweepingConfig::SweepingType::kAtomic, + SweepingConfig::CompactableSpaceHandling::kSweep}); in_atomic_pause_ = false; sweeper().NotifyDoneIfNeeded(); diff --git a/deps/v8/src/heap/cppgc/heap-config.h b/deps/v8/src/heap/cppgc/heap-config.h new file mode 100644 index 00000000000000..a89581387bff69 --- /dev/null +++ b/deps/v8/src/heap/cppgc/heap-config.h @@ -0,0 +1,103 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef V8_HEAP_CPPGC_HEAP_CONFIG_H_ +#define V8_HEAP_CPPGC_HEAP_CONFIG_H_ + +#include "include/cppgc/heap.h" + +namespace cppgc::internal { + +using StackState = cppgc::Heap::StackState; + +enum class CollectionType : uint8_t { + kMinor, + kMajor, +}; + +struct MarkingConfig { + using MarkingType = cppgc::Heap::MarkingType; + enum class IsForcedGC : uint8_t { + kNotForced, + kForced, + }; + + static constexpr MarkingConfig Default() { return {}; } + + const CollectionType collection_type = CollectionType::kMajor; + StackState stack_state = StackState::kMayContainHeapPointers; + MarkingType marking_type = MarkingType::kIncremental; + IsForcedGC is_forced_gc = IsForcedGC::kNotForced; +}; + +struct SweepingConfig { + using SweepingType = cppgc::Heap::SweepingType; + enum class CompactableSpaceHandling { kSweep, kIgnore }; + enum class FreeMemoryHandling { kDoNotDiscard, kDiscardWherePossible }; + + SweepingType sweeping_type = SweepingType::kIncrementalAndConcurrent; + CompactableSpaceHandling compactable_space_handling = + CompactableSpaceHandling::kSweep; + FreeMemoryHandling free_memory_handling = FreeMemoryHandling::kDoNotDiscard; +}; + +struct GCConfig { + using MarkingType = MarkingConfig::MarkingType; + using SweepingType = SweepingConfig::SweepingType; + using FreeMemoryHandling = SweepingConfig::FreeMemoryHandling; + using IsForcedGC = MarkingConfig::IsForcedGC; + + static constexpr GCConfig ConservativeAtomicConfig() { + return {CollectionType::kMajor, StackState::kMayContainHeapPointers, + MarkingType::kAtomic, SweepingType::kAtomic}; + } + + static constexpr GCConfig PreciseAtomicConfig() { + return {CollectionType::kMajor, StackState::kNoHeapPointers, + MarkingType::kAtomic, SweepingType::kAtomic}; + } + + static constexpr GCConfig ConservativeIncrementalConfig() { + return {CollectionType::kMajor, StackState::kMayContainHeapPointers, + MarkingType::kIncremental, SweepingType::kAtomic}; + } + + static constexpr GCConfig PreciseIncrementalConfig() { + return {CollectionType::kMajor, StackState::kNoHeapPointers, + MarkingType::kIncremental, SweepingType::kAtomic}; + } + + static constexpr GCConfig + PreciseIncrementalMarkingConcurrentSweepingConfig() { + return {CollectionType::kMajor, StackState::kNoHeapPointers, + MarkingType::kIncremental, SweepingType::kIncrementalAndConcurrent}; + } + + static constexpr GCConfig PreciseConcurrentConfig() { + return {CollectionType::kMajor, StackState::kNoHeapPointers, + MarkingType::kIncrementalAndConcurrent, + SweepingType::kIncrementalAndConcurrent}; + } + + static constexpr GCConfig MinorPreciseAtomicConfig() { + return {CollectionType::kMinor, StackState::kNoHeapPointers, + MarkingType::kAtomic, SweepingType::kAtomic}; + } + + static constexpr GCConfig MinorConservativeAtomicConfig() { + return {CollectionType::kMinor, StackState::kMayContainHeapPointers, + MarkingType::kAtomic, SweepingType::kAtomic}; + } + + CollectionType collection_type = CollectionType::kMajor; + StackState stack_state = StackState::kMayContainHeapPointers; + MarkingType marking_type = MarkingType::kAtomic; + SweepingType sweeping_type = SweepingType::kAtomic; + FreeMemoryHandling free_memory_handling = FreeMemoryHandling::kDoNotDiscard; + IsForcedGC is_forced_gc = IsForcedGC::kNotForced; +}; + +} // namespace cppgc::internal + +#endif // V8_HEAP_CPPGC_HEAP_CONFIG_H_ diff --git a/deps/v8/src/heap/cppgc/heap-growing.cc b/deps/v8/src/heap/cppgc/heap-growing.cc index 1055626a0a3548..0af0119863aebd 100644 --- a/deps/v8/src/heap/cppgc/heap-growing.cc +++ b/deps/v8/src/heap/cppgc/heap-growing.cc @@ -93,14 +93,12 @@ void HeapGrowing::HeapGrowingImpl::AllocatedObjectSizeIncreased(size_t) { size_t allocated_object_size = stats_collector_->allocated_object_size(); if (allocated_object_size > limit_for_atomic_gc_) { collector_->CollectGarbage( - {GarbageCollector::Config::CollectionType::kMajor, - GarbageCollector::Config::StackState::kMayContainHeapPointers, - GarbageCollector::Config::MarkingType::kAtomic, sweeping_support_}); + {CollectionType::kMajor, StackState::kMayContainHeapPointers, + GCConfig::MarkingType::kAtomic, sweeping_support_}); } else if (allocated_object_size > limit_for_incremental_gc_) { if (marking_support_ == cppgc::Heap::MarkingType::kAtomic) return; collector_->StartIncrementalGarbageCollection( - {GarbageCollector::Config::CollectionType::kMajor, - GarbageCollector::Config::StackState::kMayContainHeapPointers, + {CollectionType::kMajor, StackState::kMayContainHeapPointers, marking_support_, sweeping_support_}); } } diff --git a/deps/v8/src/heap/cppgc/heap-page.cc b/deps/v8/src/heap/cppgc/heap-page.cc index 07baf2e79d2c14..7e85eeca475e3d 100644 --- a/deps/v8/src/heap/cppgc/heap-page.cc +++ b/deps/v8/src/heap/cppgc/heap-page.cc @@ -18,6 +18,7 @@ #include "src/heap/cppgc/object-start-bitmap.h" #include "src/heap/cppgc/page-memory.h" #include "src/heap/cppgc/raw-heap.h" +#include "src/heap/cppgc/remembered-set.h" #include "src/heap/cppgc/stats-collector.h" namespace cppgc { @@ -85,6 +86,13 @@ ConstAddress BasePage::PayloadEnd() const { return const_cast<BasePage*>(this)->PayloadEnd(); } +size_t BasePage::AllocatedSize() const { + return is_large() ? LargePage::PageHeaderSize() + + LargePage::From(this)->PayloadSize() + : NormalPage::From(this)->PayloadSize() + + RoundUp(sizeof(NormalPage), kAllocationGranularity); +} + size_t BasePage::AllocatedBytesAtLastGC() const { return is_large() ? LargePage::From(this)->AllocatedBytesAtLastGC() : NormalPage::From(this)->AllocatedBytesAtLastGC(); @@ -120,8 +128,32 @@ const HeapObjectHeader* BasePage::TryObjectHeaderFromInnerAddress( return header; } +#if defined(CPPGC_YOUNG_GENERATION) +void BasePage::AllocateSlotSet() { + DCHECK_NULL(slot_set_); + slot_set_ = decltype(slot_set_)( + static_cast<SlotSet*>( + SlotSet::Allocate(SlotSet::BucketsForSize(AllocatedSize()))), + SlotSetDeleter{AllocatedSize()}); +} + +void BasePage::SlotSetDeleter::operator()(SlotSet* slot_set) const { + DCHECK_NOT_NULL(slot_set); + SlotSet::Delete(slot_set, SlotSet::BucketsForSize(page_size_)); +} + +void BasePage::ResetSlotSet() { slot_set_.reset(); } +#endif // defined(CPPGC_YOUNG_GENERATION) + BasePage::BasePage(HeapBase& heap, BaseSpace& space, PageType type) - : BasePageHandle(heap), space_(space), type_(type) { + : BasePageHandle(heap), + space_(space), + type_(type) +#if defined(CPPGC_YOUNG_GENERATION) + , + slot_set_(nullptr, SlotSetDeleter{}) +#endif // defined(CPPGC_YOUNG_GENERATION) +{ DCHECK_EQ(0u, (reinterpret_cast<uintptr_t>(this) - kGuardPageSize) & kPageOffsetMask); DCHECK_EQ(&heap.raw_heap(), space_.raw_heap()); diff --git a/deps/v8/src/heap/cppgc/heap-page.h b/deps/v8/src/heap/cppgc/heap-page.h index f20f159e73a65e..a60bb1448d29c8 100644 --- a/deps/v8/src/heap/cppgc/heap-page.h +++ b/deps/v8/src/heap/cppgc/heap-page.h @@ -8,6 +8,7 @@ #include "include/cppgc/internal/base-page-handle.h" #include "src/base/iterator.h" #include "src/base/macros.h" +#include "src/heap/base/basic-slot-set.h" #include "src/heap/cppgc/globals.h" #include "src/heap/cppgc/heap-object-header.h" #include "src/heap/cppgc/object-start-bitmap.h" @@ -20,6 +21,7 @@ class NormalPageSpace; class LargePageSpace; class HeapBase; class PageBackend; +class SlotSet; class V8_EXPORT_PRIVATE BasePage : public BasePageHandle { public: @@ -45,6 +47,9 @@ class V8_EXPORT_PRIVATE BasePage : public BasePageHandle { Address PayloadEnd(); ConstAddress PayloadEnd() const; + // Size of the payload with the page header. + size_t AllocatedSize() const; + // Returns the size of live objects on the page at the last GC. // The counter is update after sweeping. size_t AllocatedBytesAtLastGC() const; @@ -92,14 +97,29 @@ class V8_EXPORT_PRIVATE BasePage : public BasePageHandle { contains_young_objects_ = value; } +#if defined(CPPGC_YOUNG_GENERATION) + V8_INLINE SlotSet* slot_set() const { return slot_set_.get(); } + V8_INLINE SlotSet& GetOrAllocateSlotSet(); + void ResetSlotSet(); +#endif // defined(CPPGC_YOUNG_GENERATION) + protected: enum class PageType : uint8_t { kNormal, kLarge }; BasePage(HeapBase&, BaseSpace&, PageType); private: + struct SlotSetDeleter { + void operator()(SlotSet*) const; + size_t page_size_ = 0; + }; + void AllocateSlotSet(); + BaseSpace& space_; PageType type_; bool contains_young_objects_ = false; +#if defined(CPPGC_YOUNG_GENERATION) + std::unique_ptr<SlotSet, SlotSetDeleter> slot_set_; +#endif // defined(CPPGC_YOUNG_GENERATION) size_t discarded_memory_ = 0; }; @@ -311,6 +331,13 @@ const HeapObjectHeader& BasePage::ObjectHeaderFromInnerAddress( return *header; } +#if defined(CPPGC_YOUNG_GENERATION) +SlotSet& BasePage::GetOrAllocateSlotSet() { + if (!slot_set_) AllocateSlotSet(); + return *slot_set_; +} +#endif // defined(CPPGC_YOUNG_GENERATION) + } // namespace internal } // namespace cppgc diff --git a/deps/v8/src/heap/cppgc/heap.cc b/deps/v8/src/heap/cppgc/heap.cc index 9cd52b8dd0c8ba..7bc55b51dea6c0 100644 --- a/deps/v8/src/heap/cppgc/heap.cc +++ b/deps/v8/src/heap/cppgc/heap.cc @@ -45,11 +45,10 @@ std::unique_ptr<Heap> Heap::Create(std::shared_ptr<cppgc::Platform> platform, void Heap::ForceGarbageCollectionSlow(const char* source, const char* reason, Heap::StackState stack_state) { internal::Heap::From(this)->CollectGarbage( - {internal::GarbageCollector::Config::CollectionType::kMajor, stack_state, - MarkingType::kAtomic, SweepingType::kAtomic, - internal::GarbageCollector::Config::FreeMemoryHandling:: - kDiscardWherePossible, - internal::GarbageCollector::Config::IsForcedGC::kForced}); + {internal::CollectionType::kMajor, stack_state, MarkingType::kAtomic, + SweepingType::kAtomic, + internal::GCConfig::FreeMemoryHandling::kDiscardWherePossible, + internal::GCConfig::IsForcedGC::kForced}); } AllocationHandle& Heap::GetAllocationHandle() { @@ -62,12 +61,11 @@ namespace internal { namespace { -void CheckConfig(Heap::Config config, HeapBase::MarkingType marking_support, +void CheckConfig(GCConfig config, HeapBase::MarkingType marking_support, HeapBase::SweepingType sweeping_support) { - CHECK_WITH_MSG( - (config.collection_type != Heap::Config::CollectionType::kMinor) || - (config.stack_state == Heap::Config::StackState::kNoHeapPointers), - "Minor GCs with stack is currently not supported"); + CHECK_WITH_MSG((config.collection_type != CollectionType::kMinor) || + (config.stack_state == StackState::kNoHeapPointers), + "Minor GCs with stack is currently not supported"); CHECK_LE(static_cast<int>(config.marking_type), static_cast<int>(marking_support)); CHECK_LE(static_cast<int>(config.sweeping_type), @@ -94,17 +92,16 @@ Heap::~Heap() { // Gracefully finish already running GC if any, but don't finalize live // objects. FinalizeIncrementalGarbageCollectionIfRunning( - {Config::CollectionType::kMajor, - Config::StackState::kMayContainHeapPointers, - Config::MarkingType::kAtomic, Config::SweepingType::kAtomic}); + {CollectionType::kMajor, StackState::kMayContainHeapPointers, + GCConfig::MarkingType::kAtomic, GCConfig::SweepingType::kAtomic}); { subtle::NoGarbageCollectionScope no_gc(*this); sweeper_.FinishIfRunning(); } } -void Heap::CollectGarbage(Config config) { - DCHECK_EQ(Config::MarkingType::kAtomic, config.marking_type); +void Heap::CollectGarbage(GCConfig config) { + DCHECK_EQ(GCConfig::MarkingType::kAtomic, config.marking_type); CheckConfig(config, marking_support_, sweeping_support_); if (in_no_gc_scope()) return; @@ -118,9 +115,9 @@ void Heap::CollectGarbage(Config config) { FinalizeGarbageCollection(config.stack_state); } -void Heap::StartIncrementalGarbageCollection(Config config) { - DCHECK_NE(Config::MarkingType::kAtomic, config.marking_type); - DCHECK_NE(marking_support_, Config::MarkingType::kAtomic); +void Heap::StartIncrementalGarbageCollection(GCConfig config) { + DCHECK_NE(GCConfig::MarkingType::kAtomic, config.marking_type); + DCHECK_NE(marking_support_, GCConfig::MarkingType::kAtomic); CheckConfig(config, marking_support_, sweeping_support_); if (IsMarking() || in_no_gc_scope()) return; @@ -130,19 +127,19 @@ void Heap::StartIncrementalGarbageCollection(Config config) { StartGarbageCollection(config); } -void Heap::FinalizeIncrementalGarbageCollectionIfRunning(Config config) { +void Heap::FinalizeIncrementalGarbageCollectionIfRunning(GCConfig config) { CheckConfig(config, marking_support_, sweeping_support_); if (!IsMarking()) return; DCHECK(!in_no_gc_scope()); - DCHECK_NE(Config::MarkingType::kAtomic, config_.marking_type); + DCHECK_NE(GCConfig::MarkingType::kAtomic, config_.marking_type); config_ = config; FinalizeGarbageCollection(config.stack_state); } -void Heap::StartGarbageCollection(Config config) { +void Heap::StartGarbageCollection(GCConfig config) { DCHECK(!IsMarking()); DCHECK(!in_no_gc_scope()); @@ -152,18 +149,17 @@ void Heap::StartGarbageCollection(Config config) { epoch_++; #if defined(CPPGC_YOUNG_GENERATION) - if (config.collection_type == Config::CollectionType::kMajor) + if (config.collection_type == CollectionType::kMajor) SequentialUnmarker unmarker(raw_heap()); #endif // defined(CPPGC_YOUNG_GENERATION) - const Marker::MarkingConfig marking_config{ - config.collection_type, config.stack_state, config.marking_type, - config.is_forced_gc}; + const MarkingConfig marking_config{config.collection_type, config.stack_state, + config.marking_type, config.is_forced_gc}; marker_ = std::make_unique<Marker>(AsBase(), platform_.get(), marking_config); marker_->StartMarking(); } -void Heap::FinalizeGarbageCollection(Config::StackState stack_state) { +void Heap::FinalizeGarbageCollection(StackState stack_state) { DCHECK(IsMarking()); DCHECK(!in_no_gc_scope()); CHECK(!in_disallow_gc_scope()); @@ -203,9 +199,8 @@ void Heap::FinalizeGarbageCollection(Config::StackState stack_state) { #endif // defined(CPPGC_YOUNG_GENERATION) subtle::NoGarbageCollectionScope no_gc(*this); - const Sweeper::SweepingConfig sweeping_config{ - config_.sweeping_type, - Sweeper::SweepingConfig::CompactableSpaceHandling::kSweep, + const SweepingConfig sweeping_config{ + config_.sweeping_type, SweepingConfig::CompactableSpaceHandling::kSweep, config_.free_memory_handling}; sweeper_.Start(sweeping_config); in_atomic_pause_ = false; @@ -221,7 +216,7 @@ void Heap::EnableGenerationalGC() { void Heap::DisableHeapGrowingForTesting() { growing_.DisableForTesting(); } void Heap::FinalizeIncrementalGarbageCollectionIfNeeded( - Config::StackState stack_state) { + StackState stack_state) { StatsCollector::EnabledScope stats_scope( stats_collector(), StatsCollector::kMarkIncrementalFinalize); FinalizeGarbageCollection(stack_state); @@ -230,10 +225,9 @@ void Heap::FinalizeIncrementalGarbageCollectionIfNeeded( void Heap::StartIncrementalGarbageCollectionForTesting() { DCHECK(!IsMarking()); DCHECK(!in_no_gc_scope()); - StartGarbageCollection({Config::CollectionType::kMajor, - Config::StackState::kNoHeapPointers, - Config::MarkingType::kIncrementalAndConcurrent, - Config::SweepingType::kIncrementalAndConcurrent}); + StartGarbageCollection({CollectionType::kMajor, StackState::kNoHeapPointers, + GCConfig::MarkingType::kIncrementalAndConcurrent, + GCConfig::SweepingType::kIncrementalAndConcurrent}); } void Heap::FinalizeIncrementalGarbageCollectionForTesting( diff --git a/deps/v8/src/heap/cppgc/heap.h b/deps/v8/src/heap/cppgc/heap.h index cc027974f81a40..3a9e09fa5ffbcc 100644 --- a/deps/v8/src/heap/cppgc/heap.h +++ b/deps/v8/src/heap/cppgc/heap.h @@ -32,9 +32,9 @@ class V8_EXPORT_PRIVATE Heap final : public HeapBase, HeapBase& AsBase() { return *this; } const HeapBase& AsBase() const { return *this; } - void CollectGarbage(Config) final; - void StartIncrementalGarbageCollection(Config) final; - void FinalizeIncrementalGarbageCollectionIfRunning(Config); + void CollectGarbage(GCConfig) final; + void StartIncrementalGarbageCollection(GCConfig) final; + void FinalizeIncrementalGarbageCollectionIfRunning(GCConfig); size_t epoch() const final { return epoch_; } const EmbedderStackState* override_stack_state() const final { @@ -46,15 +46,15 @@ class V8_EXPORT_PRIVATE Heap final : public HeapBase, void DisableHeapGrowingForTesting(); private: - void StartGarbageCollection(Config); - void FinalizeGarbageCollection(Config::StackState); + void StartGarbageCollection(GCConfig); + void FinalizeGarbageCollection(StackState); - void FinalizeIncrementalGarbageCollectionIfNeeded(Config::StackState) final; + void FinalizeIncrementalGarbageCollectionIfNeeded(StackState) final; void StartIncrementalGarbageCollectionForTesting() final; void FinalizeIncrementalGarbageCollectionForTesting(EmbedderStackState) final; - Config config_; + GCConfig config_; GCInvoker gc_invoker_; HeapGrowing growing_; bool generational_gc_enabled_ = false; diff --git a/deps/v8/src/heap/cppgc/marker.cc b/deps/v8/src/heap/cppgc/marker.cc index 056f18912e6048..11197dafb88326 100644 --- a/deps/v8/src/heap/cppgc/marker.cc +++ b/deps/v8/src/heap/cppgc/marker.cc @@ -32,11 +32,10 @@ namespace internal { namespace { -bool EnterIncrementalMarkingIfNeeded(Marker::MarkingConfig config, - HeapBase& heap) { - if (config.marking_type == Marker::MarkingConfig::MarkingType::kIncremental || +bool EnterIncrementalMarkingIfNeeded(MarkingConfig config, HeapBase& heap) { + if (config.marking_type == MarkingConfig::MarkingType::kIncremental || config.marking_type == - Marker::MarkingConfig::MarkingType::kIncrementalAndConcurrent) { + MarkingConfig::MarkingType::kIncrementalAndConcurrent) { WriteBarrier::FlagUpdater::Enter(); heap.set_incremental_marking_in_progress(true); return true; @@ -44,11 +43,10 @@ bool EnterIncrementalMarkingIfNeeded(Marker::MarkingConfig config, return false; } -bool ExitIncrementalMarkingIfNeeded(Marker::MarkingConfig config, - HeapBase& heap) { - if (config.marking_type == Marker::MarkingConfig::MarkingType::kIncremental || +bool ExitIncrementalMarkingIfNeeded(MarkingConfig config, HeapBase& heap) { + if (config.marking_type == MarkingConfig::MarkingType::kIncremental || config.marking_type == - Marker::MarkingConfig::MarkingType::kIncrementalAndConcurrent) { + MarkingConfig::MarkingType::kIncrementalAndConcurrent) { WriteBarrier::FlagUpdater::Exit(); heap.set_incremental_marking_in_progress(false); return true; @@ -87,7 +85,7 @@ class MarkerBase::IncrementalMarkingTask final : public cppgc::Task { public: using Handle = SingleThreadedHandle; - IncrementalMarkingTask(MarkerBase*, MarkingConfig::StackState); + IncrementalMarkingTask(MarkerBase*, StackState); static Handle Post(cppgc::TaskRunner*, MarkerBase*); @@ -95,13 +93,13 @@ class MarkerBase::IncrementalMarkingTask final : public cppgc::Task { void Run() final; MarkerBase* const marker_; - MarkingConfig::StackState stack_state_; + StackState stack_state_; // TODO(chromium:1056170): Change to CancelableTask. Handle handle_; }; MarkerBase::IncrementalMarkingTask::IncrementalMarkingTask( - MarkerBase* marker, MarkingConfig::StackState stack_state) + MarkerBase* marker, StackState stack_state) : marker_(marker), stack_state_(stack_state), handle_(Handle::NonEmptyTag{}) {} @@ -117,10 +115,9 @@ MarkerBase::IncrementalMarkingTask::Post(cppgc::TaskRunner* runner, DCHECK_IMPLIES(marker->heap().stack_support() != HeapBase::StackSupport::kSupportsConservativeStackScan, runner->NonNestableTasksEnabled()); - MarkingConfig::StackState stack_state_for_task = - runner->NonNestableTasksEnabled() - ? MarkingConfig::StackState::kNoHeapPointers - : MarkingConfig::StackState::kMayContainHeapPointers; + const auto stack_state_for_task = runner->NonNestableTasksEnabled() + ? StackState::kNoHeapPointers + : StackState::kMayContainHeapPointers; auto task = std::make_unique<IncrementalMarkingTask>(marker, stack_state_for_task); auto handle = task->handle_; @@ -152,9 +149,8 @@ MarkerBase::MarkerBase(HeapBase& heap, cppgc::Platform* platform, foreground_task_runner_(platform_->GetForegroundTaskRunner()), mutator_marking_state_(heap, marking_worklists_, heap.compactor().compaction_worklists()) { - DCHECK_IMPLIES( - config_.collection_type == MarkingConfig::CollectionType::kMinor, - heap_.generational_gc_supported()); + DCHECK_IMPLIES(config_.collection_type == CollectionType::kMinor, + heap_.generational_gc_supported()); } MarkerBase::~MarkerBase() { @@ -163,7 +159,7 @@ MarkerBase::~MarkerBase() { // and should thus already be marked. if (!marking_worklists_.not_fully_constructed_worklist()->IsEmpty()) { #if DEBUG - DCHECK_NE(MarkingConfig::StackState::kNoHeapPointers, config_.stack_state); + DCHECK_NE(StackState::kNoHeapPointers, config_.stack_state); std::unordered_set<HeapObjectHeader*> objects = mutator_marking_state_.not_fully_constructed_worklist().Extract(); for (HeapObjectHeader* object : objects) DCHECK(object->IsMarked()); @@ -229,7 +225,7 @@ void MarkerBase::StartMarking() { // Performing incremental or concurrent marking. schedule_.NotifyIncrementalMarkingStart(); // Scanning the stack is expensive so we only do it at the atomic pause. - VisitRoots(MarkingConfig::StackState::kNoHeapPointers); + VisitRoots(StackState::kNoHeapPointers); ScheduleIncrementalMarkingTask(); if (config_.marking_type == MarkingConfig::MarkingType::kIncrementalAndConcurrent) { @@ -244,14 +240,14 @@ void MarkerBase::StartMarking() { } void MarkerBase::HandleNotFullyConstructedObjects() { - if (config_.stack_state == MarkingConfig::StackState::kNoHeapPointers) { + if (config_.stack_state == StackState::kNoHeapPointers) { mutator_marking_state_.FlushNotFullyConstructedObjects(); } else { MarkNotFullyConstructedObjects(); } } -void MarkerBase::EnterAtomicPause(MarkingConfig::StackState stack_state) { +void MarkerBase::EnterAtomicPause(StackState stack_state) { StatsCollector::EnabledScope top_stats_scope(heap().stats_collector(), StatsCollector::kAtomicMark); StatsCollector::EnabledScope stats_scope(heap().stats_collector(), @@ -310,7 +306,7 @@ void MarkerBase::LeaveAtomicPause() { heap().SetStackStateOfPrevGC(config_.stack_state); } -void MarkerBase::FinishMarking(MarkingConfig::StackState stack_state) { +void MarkerBase::FinishMarking(StackState stack_state) { DCHECK(is_marking_); EnterAtomicPause(stack_state); { @@ -383,7 +379,7 @@ void MarkerBase::ProcessWeakness() { #if defined(CPPGC_YOUNG_GENERATION) if (heap().generational_gc_supported()) { auto& remembered_set = heap().remembered_set(); - if (config_.collection_type == MarkingConfig::CollectionType::kMinor) { + if (config_.collection_type == CollectionType::kMinor) { // Custom callbacks assume that untraced pointers point to not yet freed // objects. They must make sure that upon callback completion no // UntracedMember points to a freed object. This may not hold true if a @@ -425,7 +421,7 @@ void MarkerBase::ProcessWeakness() { DCHECK(marking_worklists_.marking_worklist()->IsEmpty()); } -void MarkerBase::VisitRoots(MarkingConfig::StackState stack_state) { +void MarkerBase::VisitRoots(StackState stack_state) { StatsCollector::EnabledScope stats_scope(heap().stats_collector(), StatsCollector::kMarkVisitRoots); @@ -442,13 +438,13 @@ void MarkerBase::VisitRoots(MarkingConfig::StackState stack_state) { } } - if (stack_state != MarkingConfig::StackState::kNoHeapPointers) { + if (stack_state != StackState::kNoHeapPointers) { StatsCollector::DisabledScope stack_stats_scope( heap().stats_collector(), StatsCollector::kMarkVisitStack); heap().stack()->IteratePointers(&stack_visitor()); } #if defined(CPPGC_YOUNG_GENERATION) - if (config_.collection_type == MarkingConfig::CollectionType::kMinor) { + if (config_.collection_type == CollectionType::kMinor) { StatsCollector::EnabledScope stats_scope( heap().stats_collector(), StatsCollector::kMarkVisitRememberedSets); heap().remembered_set().Visit(visitor(), mutator_marking_state_); @@ -482,13 +478,12 @@ void MarkerBase::ScheduleIncrementalMarkingTask() { IncrementalMarkingTask::Post(foreground_task_runner_.get(), this); } -bool MarkerBase::IncrementalMarkingStepForTesting( - MarkingConfig::StackState stack_state) { +bool MarkerBase::IncrementalMarkingStepForTesting(StackState stack_state) { return IncrementalMarkingStep(stack_state); } -bool MarkerBase::IncrementalMarkingStep(MarkingConfig::StackState stack_state) { - if (stack_state == MarkingConfig::StackState::kNoHeapPointers) { +bool MarkerBase::IncrementalMarkingStep(StackState stack_state) { + if (stack_state == StackState::kNoHeapPointers) { mutator_marking_state_.FlushNotFullyConstructedObjects(); } config_.stack_state = stack_state; diff --git a/deps/v8/src/heap/cppgc/marker.h b/deps/v8/src/heap/cppgc/marker.h index 9c471250adafe9..7586a43957304f 100644 --- a/deps/v8/src/heap/cppgc/marker.h +++ b/deps/v8/src/heap/cppgc/marker.h @@ -15,6 +15,7 @@ #include "src/heap/base/worklist.h" #include "src/heap/cppgc/concurrent-marker.h" #include "src/heap/cppgc/globals.h" +#include "src/heap/cppgc/heap-config.h" #include "src/heap/cppgc/incremental-marking-schedule.h" #include "src/heap/cppgc/marking-state.h" #include "src/heap/cppgc/marking-visitor.h" @@ -39,26 +40,6 @@ class V8_EXPORT_PRIVATE MarkerBase { public: class IncrementalMarkingTask; - struct MarkingConfig { - enum class CollectionType : uint8_t { - kMinor, - kMajor, - }; - using StackState = cppgc::Heap::StackState; - using MarkingType = cppgc::Heap::MarkingType; - enum class IsForcedGC : uint8_t { - kNotForced, - kForced, - }; - - static constexpr MarkingConfig Default() { return {}; } - - const CollectionType collection_type = CollectionType::kMajor; - StackState stack_state = StackState::kMayContainHeapPointers; - MarkingType marking_type = MarkingType::kIncremental; - IsForcedGC is_forced_gc = IsForcedGC::kNotForced; - }; - enum class WriteBarrierType { kDijkstra, kSteele, @@ -89,7 +70,7 @@ class V8_EXPORT_PRIVATE MarkerBase { // - stops incremental/concurrent marking; // - flushes back any in-construction worklists if needed; // - Updates the MarkingConfig if the stack state has changed; - void EnterAtomicPause(MarkingConfig::StackState); + void EnterAtomicPause(StackState); // Makes marking progress. A `marked_bytes_limit` of 0 means that the limit // is determined by the internal marking scheduler. @@ -113,7 +94,7 @@ class V8_EXPORT_PRIVATE MarkerBase { // - AdvanceMarkingWithLimits() // - ProcessWeakness() // - LeaveAtomicPause() - void FinishMarking(MarkingConfig::StackState); + void FinishMarking(StackState); void ProcessWeakness(); @@ -134,7 +115,7 @@ class V8_EXPORT_PRIVATE MarkerBase { void SetMainThreadMarkingDisabledForTesting(bool); void WaitForConcurrentMarkingForTesting(); void ClearAllWorklistsForTesting(); - bool IncrementalMarkingStepForTesting(MarkingConfig::StackState); + bool IncrementalMarkingStepForTesting(StackState); MarkingWorklists& MarkingWorklistsForTesting() { return marking_worklists_; } MutatorMarkingState& MutatorMarkingStateForTesting() { @@ -157,7 +138,7 @@ class V8_EXPORT_PRIVATE MarkerBase { bool ProcessWorklistsWithDeadline(size_t, v8::base::TimeTicks); - void VisitRoots(MarkingConfig::StackState); + void VisitRoots(StackState); bool VisitCrossThreadPersistentsIfNeeded(); @@ -165,7 +146,7 @@ class V8_EXPORT_PRIVATE MarkerBase { void ScheduleIncrementalMarkingTask(); - bool IncrementalMarkingStep(MarkingConfig::StackState); + bool IncrementalMarkingStep(StackState); void AdvanceMarkingOnAllocation(); diff --git a/deps/v8/src/heap/cppgc/marking-verifier.cc b/deps/v8/src/heap/cppgc/marking-verifier.cc index a64a6d5f252f64..666e715cd76407 100644 --- a/deps/v8/src/heap/cppgc/marking-verifier.cc +++ b/deps/v8/src/heap/cppgc/marking-verifier.cc @@ -36,7 +36,7 @@ void VerificationState::VerifyMarked(const void* base_object_payload) const { } MarkingVerifierBase::MarkingVerifierBase( - HeapBase& heap, Heap::Config::CollectionType collection_type, + HeapBase& heap, CollectionType collection_type, VerificationState& verification_state, std::unique_ptr<cppgc::Visitor> visitor) : ConservativeTracingVisitor(heap, *heap.page_backend(), *visitor.get()), @@ -45,7 +45,7 @@ MarkingVerifierBase::MarkingVerifierBase( collection_type_(collection_type) {} void MarkingVerifierBase::Run( - Heap::Config::StackState stack_state, uintptr_t stack_end, + StackState stack_state, uintptr_t stack_end, v8::base::Optional<size_t> expected_marked_bytes) { Traverse(heap_.raw_heap()); // Avoid verifying the stack when running with TSAN as the TSAN runtime changes @@ -61,7 +61,7 @@ void MarkingVerifierBase::Run( // TODO(chromium:1325007): Investigate if Oilpan verification can be moved // before V8 compaction or compaction never runs with stack. #if !defined(THREAD_SANITIZER) && !defined(CPPGC_POINTER_COMPRESSION) - if (stack_state == Heap::Config::StackState::kMayContainHeapPointers) { + if (stack_state == StackState::kMayContainHeapPointers) { in_construction_objects_ = &in_construction_objects_stack_; heap_.stack()->IteratePointersUnsafe(this, stack_end); // The objects found through the unsafe iteration are only a subset of the @@ -114,7 +114,7 @@ bool MarkingVerifierBase::VisitHeapObjectHeader(HeapObjectHeader& header) { DCHECK(!header.IsFree()); #if defined(CPPGC_YOUNG_GENERATION) - if (collection_type_ == Heap::Config::CollectionType::kMinor) { + if (collection_type_ == CollectionType::kMinor) { auto& caged_heap = CagedHeap::Instance(); const auto age = CagedHeapLocalData::Get().age_table.GetAge( caged_heap.OffsetFromAddress(header.ObjectStart())); @@ -185,7 +185,7 @@ class VerificationVisitor final : public cppgc::Visitor { } // namespace MarkingVerifier::MarkingVerifier(HeapBase& heap_base, - Heap::Config::CollectionType collection_type) + CollectionType collection_type) : MarkingVerifierBase(heap_base, collection_type, state_, std::make_unique<VerificationVisitor>(state_)) {} diff --git a/deps/v8/src/heap/cppgc/marking-verifier.h b/deps/v8/src/heap/cppgc/marking-verifier.h index cb2eb4c80cbb97..c966aea51fa1ca 100644 --- a/deps/v8/src/heap/cppgc/marking-verifier.h +++ b/deps/v8/src/heap/cppgc/marking-verifier.h @@ -41,11 +41,11 @@ class V8_EXPORT_PRIVATE MarkingVerifierBase MarkingVerifierBase(const MarkingVerifierBase&) = delete; MarkingVerifierBase& operator=(const MarkingVerifierBase&) = delete; - void Run(Heap::Config::StackState, uintptr_t, v8::base::Optional<size_t>); + void Run(StackState, uintptr_t, v8::base::Optional<size_t>); protected: - MarkingVerifierBase(HeapBase&, Heap::Config::CollectionType, - VerificationState&, std::unique_ptr<cppgc::Visitor>); + MarkingVerifierBase(HeapBase&, CollectionType, VerificationState&, + std::unique_ptr<cppgc::Visitor>); private: void VisitInConstructionConservatively(HeapObjectHeader&, @@ -63,12 +63,12 @@ class V8_EXPORT_PRIVATE MarkingVerifierBase &in_construction_objects_heap_; size_t verifier_found_marked_bytes_ = 0; bool verifier_found_marked_bytes_are_exact_ = true; - Heap::Config::CollectionType collection_type_; + CollectionType collection_type_; }; class V8_EXPORT_PRIVATE MarkingVerifier final : public MarkingVerifierBase { public: - MarkingVerifier(HeapBase&, Heap::Config::CollectionType); + MarkingVerifier(HeapBase&, CollectionType); ~MarkingVerifier() final = default; private: diff --git a/deps/v8/src/heap/cppgc/member-storage.cc b/deps/v8/src/heap/cppgc/member-storage.cc index a0e45624724919..c457c60ba4923c 100644 --- a/deps/v8/src/heap/cppgc/member-storage.cc +++ b/deps/v8/src/heap/cppgc/member-storage.cc @@ -4,6 +4,11 @@ #include "include/cppgc/internal/member-storage.h" +#include "include/cppgc/garbage-collected.h" +#include "include/cppgc/member.h" +#include "src/base/compiler-specific.h" +#include "src/base/macros.h" + namespace cppgc { namespace internal { @@ -11,5 +16,26 @@ namespace internal { uintptr_t CageBaseGlobal::g_base_ = CageBaseGlobal::kLowerHalfWordMask; #endif // defined(CPPGC_POINTER_COMPRESSION) +// Debugging helpers. + +#if defined(CPPGC_POINTER_COMPRESSION) +extern "C" V8_DONT_STRIP_SYMBOL V8_EXPORT_PRIVATE void* +_cppgc_internal_Decompress_Compressed_Pointer(uint32_t cmprsd) { + return MemberStorage::Decompress(cmprsd); +} +#endif // !defined(CPPGC_POINTER_COMPRESSION) + +class MemberDebugHelper final { + public: + static void* PrintUncompressed(MemberBase* m) { + return const_cast<void*>(m->GetRaw()); + } +}; + +extern "C" V8_DONT_STRIP_SYMBOL V8_EXPORT_PRIVATE void* +_cppgc_internal_Print_Member(MemberBase* m) { + return MemberDebugHelper::PrintUncompressed(m); +} + } // namespace internal } // namespace cppgc diff --git a/deps/v8/src/heap/cppgc/object-allocator.cc b/deps/v8/src/heap/cppgc/object-allocator.cc index 38a3ccd8e9c09d..b88ba5c20069ce 100644 --- a/deps/v8/src/heap/cppgc/object-allocator.cc +++ b/deps/v8/src/heap/cppgc/object-allocator.cc @@ -148,9 +148,9 @@ void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace& space, void* result = TryAllocateLargeObject(page_backend_, large_space, stats_collector_, size, gcinfo); if (!result) { - auto config = GarbageCollector::Config::ConservativeAtomicConfig(); + auto config = GCConfig::ConservativeAtomicConfig(); config.free_memory_handling = - GarbageCollector::Config::FreeMemoryHandling::kDiscardWherePossible; + GCConfig::FreeMemoryHandling::kDiscardWherePossible; garbage_collector_.CollectGarbage(config); result = TryAllocateLargeObject(page_backend_, large_space, stats_collector_, size, gcinfo); @@ -170,9 +170,9 @@ void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace& space, } if (!TryRefillLinearAllocationBuffer(space, request_size)) { - auto config = GarbageCollector::Config::ConservativeAtomicConfig(); + auto config = GCConfig::ConservativeAtomicConfig(); config.free_memory_handling = - GarbageCollector::Config::FreeMemoryHandling::kDiscardWherePossible; + GCConfig::FreeMemoryHandling::kDiscardWherePossible; garbage_collector_.CollectGarbage(config); if (!TryRefillLinearAllocationBuffer(space, request_size)) { oom_handler_("Oilpan: Normal allocation."); @@ -187,42 +187,64 @@ void* ObjectAllocator::OutOfLineAllocateImpl(NormalPageSpace& space, return result; } +bool ObjectAllocator::TryExpandAndRefillLinearAllocationBuffer( + NormalPageSpace& space) { + auto* const new_page = NormalPage::TryCreate(page_backend_, space); + if (!new_page) return false; + + space.AddPage(new_page); + // Set linear allocation buffer to new page. + ReplaceLinearAllocationBuffer(space, stats_collector_, + new_page->PayloadStart(), + new_page->PayloadSize()); + return true; +} + bool ObjectAllocator::TryRefillLinearAllocationBuffer(NormalPageSpace& space, size_t size) { // Try to allocate from the freelist. if (TryRefillLinearAllocationBufferFromFreeList(space, size)) return true; - // Lazily sweep pages of this heap until we find a freed area for this - // allocation or we finish sweeping all pages of this heap. Sweeper& sweeper = raw_heap_.heap()->sweeper(); - // TODO(chromium:1056170): Investigate whether this should be a loop which - // would result in more aggressive re-use of memory at the expense of - // potentially larger allocation time. - if (sweeper.SweepForAllocationIfRunning(&space, size)) { - // Sweeper found a block of at least `size` bytes. Allocation from the - // free list may still fail as actual buckets are not exhaustively - // searched for a suitable block. Instead, buckets are tested from larger - // sizes that are guaranteed to fit the block to smaller bucket sizes that - // may only potentially fit the block. For the bucket that may exactly fit - // the allocation of `size` bytes (no overallocation), only the first - // entry is checked. - if (TryRefillLinearAllocationBufferFromFreeList(space, size)) return true; + // Lazily sweep pages of this heap. This is not exhaustive to limit jank on + // allocation. Allocation from the free list may still fail as actual buckets + // are not exhaustively searched for a suitable block. Instead, buckets are + // tested from larger sizes that are guaranteed to fit the block to smaller + // bucket sizes that may only potentially fit the block. For the bucket that + // may exactly fit the allocation of `size` bytes (no overallocation), only + // the first entry is checked. + if (sweeper.SweepForAllocationIfRunning( + &space, size, v8::base::TimeDelta::FromMicroseconds(500)) && + TryRefillLinearAllocationBufferFromFreeList(space, size)) { + return true; } - sweeper.FinishIfRunning(); - // TODO(chromium:1056170): Make use of the synchronously freed memory. - - auto* new_page = NormalPage::TryCreate(page_backend_, space); - if (!new_page) { - return false; + // Sweeping was off or did not yield in any memory within limited + // contributing. We expand at this point as that's cheaper than possibly + // continuing sweeping the whole heap. + if (TryExpandAndRefillLinearAllocationBuffer(space)) return true; + + // Expansion failed. Before finishing all sweeping, finish sweeping of a given + // space which is cheaper. + if (sweeper.SweepForAllocationIfRunning(&space, size, + v8::base::TimeDelta::Max()) && + TryRefillLinearAllocationBufferFromFreeList(space, size)) { + return true; } - space.AddPage(new_page); - // Set linear allocation buffer to new page. - ReplaceLinearAllocationBuffer(space, stats_collector_, - new_page->PayloadStart(), - new_page->PayloadSize()); - return true; + // Heap expansion and sweeping of a space failed. At this point the caller + // could run OOM or do a full GC which needs to finish sweeping if it's + // running. Hence, we may as well finish sweeping here. Note that this is + // possibly very expensive but not more expensive than running a full GC as + // the alternative is OOM. + if (sweeper.FinishIfRunning()) { + // Sweeping may have added memory to the free list. + if (TryRefillLinearAllocationBufferFromFreeList(space, size)) return true; + + // Sweeping may have freed pages completely. + if (TryExpandAndRefillLinearAllocationBuffer(space)) return true; + } + return false; } bool ObjectAllocator::TryRefillLinearAllocationBufferFromFreeList( diff --git a/deps/v8/src/heap/cppgc/object-allocator.h b/deps/v8/src/heap/cppgc/object-allocator.h index ea01f671f7cf6c..77f26ce3b59f07 100644 --- a/deps/v8/src/heap/cppgc/object-allocator.h +++ b/deps/v8/src/heap/cppgc/object-allocator.h @@ -70,6 +70,7 @@ class V8_EXPORT_PRIVATE ObjectAllocator final : public cppgc::AllocationHandle { bool TryRefillLinearAllocationBuffer(NormalPageSpace&, size_t); bool TryRefillLinearAllocationBufferFromFreeList(NormalPageSpace&, size_t); + bool TryExpandAndRefillLinearAllocationBuffer(NormalPageSpace&); RawHeap& raw_heap_; PageBackend& page_backend_; diff --git a/deps/v8/src/heap/cppgc/remembered-set.cc b/deps/v8/src/heap/cppgc/remembered-set.cc index 485fb4057f6763..60e8f978eff6f5 100644 --- a/deps/v8/src/heap/cppgc/remembered-set.cc +++ b/deps/v8/src/heap/cppgc/remembered-set.cc @@ -2,15 +2,19 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. +#if defined(CPPGC_YOUNG_GENERATION) + #include "src/heap/cppgc/remembered-set.h" #include <algorithm> #include "include/cppgc/member.h" #include "include/cppgc/visitor.h" +#include "src/heap/base/basic-slot-set.h" #include "src/heap/cppgc/heap-base.h" #include "src/heap/cppgc/heap-object-header.h" #include "src/heap/cppgc/heap-page.h" +#include "src/heap/cppgc/heap-visitor.h" #include "src/heap/cppgc/marking-state.h" namespace cppgc { @@ -20,23 +24,54 @@ namespace { enum class SlotType { kCompressed, kUncompressed }; -template <SlotType slot_type> -void InvalidateRememberedSlots(std::set<void*>& slots, void* begin, void* end) { +void EraseFromSet(std::set<void*>& set, void* begin, void* end) { // TODO(1029379): The 2 binary walks can be optimized with a custom algorithm. - auto from = slots.lower_bound(begin), to = slots.lower_bound(end); - slots.erase(from, to); + auto from = set.lower_bound(begin), to = set.lower_bound(end); + set.erase(from, to); +} + +// TODO(1029379): Make the implementation functions private functions of +// OldToNewRememberedSet to avoid parameter passing. +void InvalidateCompressedRememberedSlots( + const HeapBase& heap, void* begin, void* end, + std::set<void*>& remembered_slots_for_verification) { + DCHECK_LT(begin, end); + + BasePage* page = BasePage::FromInnerAddress(&heap, begin); + DCHECK_NOT_NULL(page); + // The input range must reside within the same page. + DCHECK_EQ(page, BasePage::FromInnerAddress( + &heap, reinterpret_cast<void*>( + reinterpret_cast<uintptr_t>(end) - 1))); + + auto* slot_set = page->slot_set(); + if (!slot_set) return; + + const size_t buckets_size = SlotSet::BucketsForSize(page->AllocatedSize()); + + const uintptr_t page_start = reinterpret_cast<uintptr_t>(page); + const uintptr_t ubegin = reinterpret_cast<uintptr_t>(begin); + const uintptr_t uend = reinterpret_cast<uintptr_t>(end); + + slot_set->RemoveRange(ubegin - page_start, uend - page_start, buckets_size, + SlotSet::EmptyBucketMode::FREE_EMPTY_BUCKETS); +#if DEBUG + EraseFromSet(remembered_slots_for_verification, begin, end); +#endif // DEBUG +} + +void InvalidateUncompressedRememberedSlots( + std::set<void*>& slots, void* begin, void* end, + std::set<void*>& remembered_slots_for_verification) { + EraseFromSet(slots, begin, end); +#if DEBUG + EraseFromSet(remembered_slots_for_verification, begin, end); +#endif // DEBUG #if defined(ENABLE_SLOW_DCHECKS) // Check that no remembered slots are referring to the freed area. DCHECK(std::none_of(slots.begin(), slots.end(), [begin, end](void* slot) { void* value = nullptr; -#if defined(CPPGC_POINTER_COMPRESSION) - if constexpr (slot_type == SlotType::kCompressed) - value = CompressedPointer::Decompress(*reinterpret_cast<uint32_t*>(slot)); - else - value = *reinterpret_cast<void**>(slot); -#else // !defined(CPPGC_POINTER_COMPRESSION) value = *reinterpret_cast<void**>(slot); -#endif // !defined(CPPGC_POINTER_COMPRESSION) return begin <= value && value < end; })); #endif // defined(ENABLE_SLOW_DCHECKS) @@ -44,45 +79,155 @@ void InvalidateRememberedSlots(std::set<void*>& slots, void* begin, void* end) { // Visit remembered set that was recorded in the generational barrier. template <SlotType slot_type> -void VisitRememberedSlots(const std::set<void*>& slots, const HeapBase& heap, - MutatorMarkingState& mutator_marking_state) { - for (void* slot : slots) { - // Slot must always point to a valid, not freed object. - auto& slot_header = BasePage::FromInnerAddress(&heap, slot) - ->ObjectHeaderFromInnerAddress(slot); - // The age checking in the generational barrier is imprecise, since a card - // may have mixed young/old objects. Check here precisely if the object is - // old. - if (slot_header.IsYoung()) continue; - // The design of young generation requires collections to be executed at the - // top level (with the guarantee that no objects are currently being in - // construction). This can be ensured by running young GCs from safe points - // or by reintroducing nested allocation scopes that avoid finalization. - DCHECK(!slot_header.template IsInConstruction<AccessMode::kNonAtomic>()); +void VisitSlot(const HeapBase& heap, const BasePage& page, Address slot, + MutatorMarkingState& marking_state, + const std::set<void*>& slots_for_verification) { +#if defined(DEBUG) + DCHECK_EQ(BasePage::FromInnerAddress(&heap, slot), &page); + DCHECK_NE(slots_for_verification.end(), slots_for_verification.find(slot)); +#endif // defined(DEBUG) + + // Slot must always point to a valid, not freed object. + auto& slot_header = page.ObjectHeaderFromInnerAddress(slot); + // The age checking in the generational barrier is imprecise, since a card + // may have mixed young/old objects. Check here precisely if the object is + // old. + if (slot_header.IsYoung()) return; + // The design of young generation requires collections to be executed at the + // top level (with the guarantee that no objects are currently being in + // construction). This can be ensured by running young GCs from safe points + // or by reintroducing nested allocation scopes that avoid finalization. + DCHECK(!slot_header.template IsInConstruction<AccessMode::kNonAtomic>()); #if defined(CPPGC_POINTER_COMPRESSION) - void* value = nullptr; - if constexpr (slot_type == SlotType::kCompressed) { - value = CompressedPointer::Decompress(*reinterpret_cast<uint32_t*>(slot)); - } else { - value = *reinterpret_cast<void**>(slot); - } + void* value = nullptr; + if constexpr (slot_type == SlotType::kCompressed) { + value = CompressedPointer::Decompress(*reinterpret_cast<uint32_t*>(slot)); + } else { + value = *reinterpret_cast<void**>(slot); + } #else // !defined(CPPGC_POINTER_COMPRESSION) - void* value = *reinterpret_cast<void**>(slot); + void* value = *reinterpret_cast<void**>(slot); #endif // !defined(CPPGC_POINTER_COMPRESSION) - // Slot could be updated to nullptr or kSentinelPointer by the mutator. - if (value == kSentinelPointer || value == nullptr) continue; + // Slot could be updated to nullptr or kSentinelPointer by the mutator. + if (value == kSentinelPointer || value == nullptr) return; -#if DEBUG - // Check that the slot can not point to a freed object. - HeapObjectHeader& header = - BasePage::FromPayload(value)->ObjectHeaderFromInnerAddress(value); - DCHECK(!header.IsFree()); -#endif +#if defined(DEBUG) + // Check that the slot can not point to a freed object. + HeapObjectHeader& header = + BasePage::FromPayload(value)->ObjectHeaderFromInnerAddress(value); + DCHECK(!header.IsFree()); +#endif // defined(DEBUG) + + marking_state.DynamicallyMarkAddress(static_cast<Address>(value)); +} + +class CompressedSlotVisitor : HeapVisitor<CompressedSlotVisitor> { + friend class HeapVisitor<CompressedSlotVisitor>; + + public: + CompressedSlotVisitor(HeapBase& heap, MutatorMarkingState& marking_state, + const std::set<void*>& slots_for_verification) + : heap_(heap), + marking_state_(marking_state), + remembered_slots_for_verification_(slots_for_verification) {} - mutator_marking_state.DynamicallyMarkAddress(static_cast<Address>(value)); + size_t Run() { + Traverse(heap_.raw_heap()); + return objects_visited_; } + + private: + heap::base::SlotCallbackResult VisitCompressedSlot(Address slot) { + DCHECK(current_page_); + VisitSlot<SlotType::kCompressed>(heap_, *current_page_, slot, + marking_state_, + remembered_slots_for_verification_); + ++objects_visited_; + return heap::base::KEEP_SLOT; + } + + void VisitSlotSet(SlotSet* slot_set) { + DCHECK(current_page_); + + if (!slot_set) return; + + const uintptr_t page_start = reinterpret_cast<uintptr_t>(current_page_); + const size_t buckets_size = + SlotSet::BucketsForSize(current_page_->AllocatedSize()); + + slot_set->Iterate( + page_start, 0, buckets_size, + [this](SlotSet::Address slot) { + return VisitCompressedSlot(reinterpret_cast<Address>(slot)); + }, + SlotSet::EmptyBucketMode::FREE_EMPTY_BUCKETS); + } + + bool VisitNormalPage(NormalPage& page) { + current_page_ = &page; + VisitSlotSet(page.slot_set()); + return true; + } + + bool VisitLargePage(LargePage& page) { + current_page_ = &page; + VisitSlotSet(page.slot_set()); + return true; + } + + HeapBase& heap_; + MutatorMarkingState& marking_state_; + BasePage* current_page_ = nullptr; + + const std::set<void*>& remembered_slots_for_verification_; + size_t objects_visited_ = 0u; +}; + +class SlotRemover : HeapVisitor<SlotRemover> { + friend class HeapVisitor<SlotRemover>; + + public: + explicit SlotRemover(HeapBase& heap) : heap_(heap) {} + + void Run() { Traverse(heap_.raw_heap()); } + + private: + bool VisitNormalPage(NormalPage& page) { + page.ResetSlotSet(); + return true; + } + + bool VisitLargePage(LargePage& page) { + page.ResetSlotSet(); + return true; + } + + HeapBase& heap_; +}; + +// Visit remembered set that was recorded in the generational barrier. +void VisitRememberedSlots( + HeapBase& heap, MutatorMarkingState& mutator_marking_state, + const std::set<void*>& remembered_uncompressed_slots, + const std::set<void*>& remembered_slots_for_verification) { + size_t objects_visited = 0; + { + CompressedSlotVisitor slot_visitor(heap, mutator_marking_state, + remembered_slots_for_verification); + objects_visited += slot_visitor.Run(); + } + for (void* uncompressed_slot : remembered_uncompressed_slots) { + auto* page = BasePage::FromInnerAddress(&heap, uncompressed_slot); + DCHECK(page); + VisitSlot<SlotType::kUncompressed>( + heap, *page, static_cast<Address>(uncompressed_slot), + mutator_marking_state, remembered_slots_for_verification); + ++objects_visited; + } + DCHECK_EQ(remembered_slots_for_verification.size(), objects_visited); + USE(objects_visited); } // Visits source objects that were recorded in the generational barrier for @@ -114,12 +259,29 @@ void VisitRememberedSourceObjects( void OldToNewRememberedSet::AddSlot(void* slot) { DCHECK(heap_.generational_gc_supported()); - remembered_slots_.insert(slot); + + BasePage* source_page = BasePage::FromInnerAddress(&heap_, slot); + DCHECK(source_page); + + auto& slot_set = source_page->GetOrAllocateSlotSet(); + + const uintptr_t slot_offset = reinterpret_cast<uintptr_t>(slot) - + reinterpret_cast<uintptr_t>(source_page); + + slot_set.Insert<SlotSet::AccessMode::NON_ATOMIC>( + static_cast<size_t>(slot_offset)); + +#if defined(DEBUG) + remembered_slots_for_verification_.insert(slot); +#endif // defined(DEBUG) } void OldToNewRememberedSet::AddUncompressedSlot(void* uncompressed_slot) { DCHECK(heap_.generational_gc_supported()); remembered_uncompressed_slots_.insert(uncompressed_slot); +#if defined(DEBUG) + remembered_slots_for_verification_.insert(uncompressed_slot); +#endif // defined(DEBUG) } void OldToNewRememberedSet::AddSourceObject(HeapObjectHeader& hoh) { @@ -138,10 +300,11 @@ void OldToNewRememberedSet::AddWeakCallback(WeakCallbackItem item) { void OldToNewRememberedSet::InvalidateRememberedSlotsInRange(void* begin, void* end) { DCHECK(heap_.generational_gc_supported()); - InvalidateRememberedSlots<SlotType::kCompressed>(remembered_slots_, begin, - end); - InvalidateRememberedSlots<SlotType::kUncompressed>( - remembered_uncompressed_slots_, begin, end); + InvalidateCompressedRememberedSlots(heap_, begin, end, + remembered_slots_for_verification_); + InvalidateUncompressedRememberedSlots(remembered_uncompressed_slots_, begin, + end, + remembered_slots_for_verification_); } void OldToNewRememberedSet::InvalidateRememberedSourceObject( @@ -153,10 +316,8 @@ void OldToNewRememberedSet::InvalidateRememberedSourceObject( void OldToNewRememberedSet::Visit(Visitor& visitor, MutatorMarkingState& marking_state) { DCHECK(heap_.generational_gc_supported()); - VisitRememberedSlots<SlotType::kCompressed>(remembered_slots_, heap_, - marking_state); - VisitRememberedSlots<SlotType::kUncompressed>(remembered_uncompressed_slots_, - heap_, marking_state); + VisitRememberedSlots(heap_, marking_state, remembered_uncompressed_slots_, + remembered_slots_for_verification_); VisitRememberedSourceObjects(remembered_source_objects_, visitor); } @@ -174,16 +335,23 @@ void OldToNewRememberedSet::ReleaseCustomCallbacks() { void OldToNewRememberedSet::Reset() { DCHECK(heap_.generational_gc_supported()); - remembered_slots_.clear(); + SlotRemover slot_remover(heap_); + slot_remover.Run(); remembered_uncompressed_slots_.clear(); remembered_source_objects_.clear(); +#if DEBUG + remembered_slots_for_verification_.clear(); +#endif // DEBUG } bool OldToNewRememberedSet::IsEmpty() const { - return remembered_slots_.empty() && remembered_uncompressed_slots_.empty() && + // TODO(1029379): Add visitor to check if empty. + return remembered_uncompressed_slots_.empty() && remembered_source_objects_.empty() && remembered_weak_callbacks_.empty(); } } // namespace internal } // namespace cppgc + +#endif // defined(CPPGC_YOUNG_GENERATION) diff --git a/deps/v8/src/heap/cppgc/remembered-set.h b/deps/v8/src/heap/cppgc/remembered-set.h index 24e460d438f2b5..086ba622890e0f 100644 --- a/deps/v8/src/heap/cppgc/remembered-set.h +++ b/deps/v8/src/heap/cppgc/remembered-set.h @@ -5,9 +5,12 @@ #ifndef V8_HEAP_CPPGC_REMEMBERED_SET_H_ #define V8_HEAP_CPPGC_REMEMBERED_SET_H_ +#if defined(CPPGC_YOUNG_GENERATION) + #include <set> #include "src/base/macros.h" +#include "src/heap/base/basic-slot-set.h" #include "src/heap/cppgc/marking-worklists.h" namespace cppgc { @@ -21,11 +24,14 @@ class HeapBase; class HeapObjectHeader; class MutatorMarkingState; +class SlotSet : public ::heap::base::BasicSlotSet<kSlotSize> {}; + +// OldToNewRememberedSet represents a per-heap set of old-to-new references. class V8_EXPORT_PRIVATE OldToNewRememberedSet final { public: using WeakCallbackItem = MarkingWorklists::WeakCallbackItem; - explicit OldToNewRememberedSet(const HeapBase& heap) + explicit OldToNewRememberedSet(HeapBase& heap) : heap_(heap), remembered_weak_callbacks_(compare_parameter) {} OldToNewRememberedSet(const OldToNewRememberedSet&) = delete; @@ -58,15 +64,19 @@ class V8_EXPORT_PRIVATE OldToNewRememberedSet final { } } compare_parameter{}; - const HeapBase& heap_; - std::set<void*> remembered_slots_; - std::set<void*> remembered_uncompressed_slots_; + HeapBase& heap_; std::set<HeapObjectHeader*> remembered_source_objects_; std::set<WeakCallbackItem, decltype(compare_parameter)> remembered_weak_callbacks_; + // Compressed slots are stored in slot-sets (per-page two-level bitmaps), + // whereas uncompressed are stored in std::set. + std::set<void*> remembered_uncompressed_slots_; + std::set<void*> remembered_slots_for_verification_; }; } // namespace internal } // namespace cppgc +#endif // defined(CPPGC_YOUNG_GENERATION) + #endif // V8_HEAP_CPPGC_REMEMBERED_SET_H_ diff --git a/deps/v8/src/heap/cppgc/stats-collector.cc b/deps/v8/src/heap/cppgc/stats-collector.cc index ca0112220845a0..f65309b6f47fdd 100644 --- a/deps/v8/src/heap/cppgc/stats-collector.cc +++ b/deps/v8/src/heap/cppgc/stats-collector.cc @@ -171,8 +171,7 @@ int64_t SumPhases(const MetricRecorder::GCCycle::Phases& phases) { } MetricRecorder::GCCycle GetCycleEventForMetricRecorder( - StatsCollector::CollectionType type, - StatsCollector::MarkingType marking_type, + CollectionType type, StatsCollector::MarkingType marking_type, StatsCollector::SweepingType sweeping_type, int64_t atomic_mark_us, int64_t atomic_weak_us, int64_t atomic_compact_us, int64_t atomic_sweep_us, int64_t incremental_mark_us, int64_t incremental_sweep_us, @@ -181,7 +180,7 @@ MetricRecorder::GCCycle GetCycleEventForMetricRecorder( int64_t objects_freed_bytes, int64_t memory_before_bytes, int64_t memory_after_bytes, int64_t memory_freed_bytes) { MetricRecorder::GCCycle event; - event.type = (type == StatsCollector::CollectionType::kMajor) + event.type = (type == CollectionType::kMajor) ? MetricRecorder::GCCycle::Type::kMajor : MetricRecorder::GCCycle::Type::kMinor; // MainThread.Incremental: diff --git a/deps/v8/src/heap/cppgc/stats-collector.h b/deps/v8/src/heap/cppgc/stats-collector.h index c78db86acf2cd5..ff040a3dcc646a 100644 --- a/deps/v8/src/heap/cppgc/stats-collector.h +++ b/deps/v8/src/heap/cppgc/stats-collector.h @@ -33,6 +33,7 @@ namespace internal { V(IncrementalSweep) #define CPPGC_FOR_ALL_SCOPES(V) \ + V(Unmark) \ V(MarkIncrementalStart) \ V(MarkIncrementalFinalize) \ V(MarkAtomicPrologue) \ @@ -52,9 +53,10 @@ namespace internal { V(MarkVisitCrossThreadPersistents) \ V(MarkVisitStack) \ V(MarkVisitRememberedSets) \ + V(SweepFinishIfOutOfWork) \ V(SweepInvokePreFinalizers) \ - V(SweepIdleStep) \ V(SweepInTask) \ + V(SweepInTaskForStatistics) \ V(SweepOnAllocation) \ V(SweepFinalize) @@ -67,12 +69,11 @@ namespace internal { // Sink for various time and memory statistics. class V8_EXPORT_PRIVATE StatsCollector final { - using IsForcedGC = GarbageCollector::Config::IsForcedGC; + using IsForcedGC = GCConfig::IsForcedGC; public: - using CollectionType = GarbageCollector::Config::CollectionType; - using MarkingType = GarbageCollector::Config::MarkingType; - using SweepingType = GarbageCollector::Config::SweepingType; + using MarkingType = GCConfig::MarkingType; + using SweepingType = GCConfig::SweepingType; #if defined(CPPGC_DECLARE_ENUM) static_assert(false, "CPPGC_DECLARE_ENUM macro is already defined"); diff --git a/deps/v8/src/heap/cppgc/sweeper.cc b/deps/v8/src/heap/cppgc/sweeper.cc index 23e684ed4da9a7..3cb96f8baa972e 100644 --- a/deps/v8/src/heap/cppgc/sweeper.cc +++ b/deps/v8/src/heap/cppgc/sweeper.cc @@ -11,6 +11,7 @@ #include "include/cppgc/platform.h" #include "src/base/optional.h" #include "src/base/platform/mutex.h" +#include "src/base/platform/time.h" #include "src/heap/cppgc/free-list.h" #include "src/heap/cppgc/globals.h" #include "src/heap/cppgc/heap-base.h" @@ -25,13 +26,41 @@ #include "src/heap/cppgc/stats-collector.h" #include "src/heap/cppgc/task-handle.h" -namespace cppgc { -namespace internal { +namespace cppgc::internal { namespace { +class DeadlineChecker final { + public: + explicit DeadlineChecker(v8::base::TimeTicks end) : end_(end) {} + + bool Check() { + return (++count_ % kInterval == 0) && (end_ < v8::base::TimeTicks::Now()); + } + + private: + static constexpr size_t kInterval = 4; + + const v8::base::TimeTicks end_; + size_t count_ = 0; +}; + using v8::base::Optional; +enum class MutatorThreadSweepingMode { + kOnlyFinalizers, + kAll, +}; + +constexpr const char* ToString(MutatorThreadSweepingMode sweeping_mode) { + switch (sweeping_mode) { + case MutatorThreadSweepingMode::kAll: + return "all"; + case MutatorThreadSweepingMode::kOnlyFinalizers: + return "only-finalizers"; + } +} + enum class StickyBits : uint8_t { kDisabled, kEnabled, @@ -220,6 +249,9 @@ class InlinedFinalizationBuilderBase { bool is_empty = false; size_t largest_new_free_list_entry = 0; }; + + protected: + ResultType result_; }; // Builder that finalizes objects and adds freelist entries right away. @@ -238,10 +270,13 @@ class InlinedFinalizationBuilder final : public InlinedFinalizationBuilderBase, void AddFreeListEntry(Address start, size_t size) { FreeHandler::Free({start, size}); + result_.largest_new_free_list_entry = + std::max(result_.largest_new_free_list_entry, size); } - ResultType GetResult(bool is_empty, size_t largest_new_free_list_entry) { - return {is_empty, largest_new_free_list_entry}; + ResultType&& GetResult(bool is_empty) { + result_.is_empty = is_empty; + return std::move(result_); } }; @@ -282,12 +317,13 @@ class DeferredFinalizationBuilder final : public FreeHandler { } else { FreeHandler::Free({start, size}); } + result_.largest_new_free_list_entry = + std::max(result_.largest_new_free_list_entry, size); found_finalizer_ = false; } - ResultType&& GetResult(bool is_empty, size_t largest_new_free_list_entry) { + ResultType&& GetResult(bool is_empty) { result_.is_empty = is_empty; - result_.largest_new_free_list_entry = largest_new_free_list_entry; return std::move(result_); } @@ -305,7 +341,6 @@ typename FinalizationBuilder::ResultType SweepNormalPage( PlatformAwareObjectStartBitmap& bitmap = page->object_start_bitmap(); - size_t largest_new_free_list_entry = 0; size_t live_bytes = 0; Address start_of_gap = page->PayloadStart(); @@ -346,12 +381,10 @@ typename FinalizationBuilder::ResultType SweepNormalPage( // The object is alive. const Address header_address = reinterpret_cast<Address>(header); if (start_of_gap != header_address) { - size_t new_free_list_entry_size = + const size_t new_free_list_entry_size = static_cast<size_t>(header_address - start_of_gap); builder.AddFreeListEntry(start_of_gap, new_free_list_entry_size); DCHECK(bitmap.CheckBit<AccessMode::kAtomic>(start_of_gap)); - largest_new_free_list_entry = - std::max(largest_new_free_list_entry, new_free_list_entry_size); } StickyUnmark(header, sticky_bits); begin += size; @@ -368,7 +401,7 @@ typename FinalizationBuilder::ResultType SweepNormalPage( page->SetAllocatedBytesAtLastGC(live_bytes); const bool is_empty = (start_of_gap == page->PayloadStart()); - return builder.GetResult(is_empty, largest_new_free_list_entry); + return builder.GetResult(is_empty); } // SweepFinalizer is responsible for heap/space/page finalization. Finalization @@ -377,7 +410,7 @@ typename FinalizationBuilder::ResultType SweepNormalPage( // - returns (unmaps) empty pages; // - merges freelists to the space's freelist. class SweepFinalizer final { - using FreeMemoryHandling = Sweeper::SweepingConfig::FreeMemoryHandling; + using FreeMemoryHandling = SweepingConfig::FreeMemoryHandling; public: SweepFinalizer(cppgc::Platform* platform, @@ -397,20 +430,13 @@ class SweepFinalizer final { } bool FinalizeSpaceWithDeadline(SpaceState* space_state, - double deadline_in_seconds) { + v8::base::TimeTicks deadline) { DCHECK(platform_); - static constexpr size_t kDeadlineCheckInterval = 8; - size_t page_count = 1; - + DeadlineChecker deadline_check(deadline); while (auto page_state = space_state->swept_unfinalized_pages.Pop()) { FinalizePage(&*page_state); - if (page_count % kDeadlineCheckInterval == 0 && - deadline_in_seconds <= platform_->MonotonicallyIncreasingTime()) { - return false; - } - - page_count++; + if (deadline_check.Check()) return false; } return true; @@ -488,7 +514,7 @@ class SweepFinalizer final { class MutatorThreadSweeper final : private HeapVisitor<MutatorThreadSweeper> { friend class HeapVisitor<MutatorThreadSweeper>; - using FreeMemoryHandling = Sweeper::SweepingConfig::FreeMemoryHandling; + using FreeMemoryHandling = SweepingConfig::FreeMemoryHandling; public: MutatorThreadSweeper(HeapBase* heap, SpaceStates* states, @@ -511,25 +537,23 @@ class MutatorThreadSweeper final : private HeapVisitor<MutatorThreadSweeper> { void SweepPage(BasePage& page) { Traverse(page); } - bool SweepWithDeadline(double deadline_in_seconds) { + bool SweepWithDeadline(v8::base::TimeDelta max_duration, + MutatorThreadSweepingMode sweeping_mode) { DCHECK(platform_); - static constexpr double kSlackInSeconds = 0.001; for (SpaceState& state : *states_) { - // FinalizeSpaceWithDeadline() and SweepSpaceWithDeadline() won't check - // the deadline until it sweeps 10 pages. So we give a small slack for - // safety. - const double remaining_budget = deadline_in_seconds - kSlackInSeconds - - platform_->MonotonicallyIncreasingTime(); - if (remaining_budget <= 0.) return false; + const auto deadline = v8::base::TimeTicks::Now() + max_duration; // First, prioritize finalization of pages that were swept concurrently. SweepFinalizer finalizer(platform_, free_memory_handling_); - if (!finalizer.FinalizeSpaceWithDeadline(&state, deadline_in_seconds)) { + if (!finalizer.FinalizeSpaceWithDeadline(&state, deadline)) { return false; } + if (sweeping_mode == MutatorThreadSweepingMode::kOnlyFinalizers) + return false; + // Help out the concurrent sweeper. - if (!SweepSpaceWithDeadline(&state, deadline_in_seconds)) { + if (!SweepSpaceWithDeadline(&state, deadline)) { return false; } } @@ -541,16 +565,11 @@ class MutatorThreadSweeper final : private HeapVisitor<MutatorThreadSweeper> { } private: - bool SweepSpaceWithDeadline(SpaceState* state, double deadline_in_seconds) { - static constexpr size_t kDeadlineCheckInterval = 8; - size_t page_count = 1; + bool SweepSpaceWithDeadline(SpaceState* state, v8::base::TimeTicks deadline) { + DeadlineChecker deadline_check(deadline); while (auto page = state->unswept_pages.Pop()) { Traverse(**page); - if (page_count % kDeadlineCheckInterval == 0 && - deadline_in_seconds <= platform_->MonotonicallyIncreasingTime()) { - return false; - } - page_count++; + if (deadline_check.Check()) return false; } return true; @@ -603,7 +622,7 @@ class ConcurrentSweepTask final : public cppgc::JobTask, private HeapVisitor<ConcurrentSweepTask> { friend class HeapVisitor<ConcurrentSweepTask>; - using FreeMemoryHandling = Sweeper::SweepingConfig::FreeMemoryHandling; + using FreeMemoryHandling = SweepingConfig::FreeMemoryHandling; public: ConcurrentSweepTask(HeapBase& heap, SpaceStates* states, Platform* platform, @@ -693,8 +712,7 @@ class ConcurrentSweepTask final : public cppgc::JobTask, class PrepareForSweepVisitor final : protected HeapVisitor<PrepareForSweepVisitor> { friend class HeapVisitor<PrepareForSweepVisitor>; - using CompactableSpaceHandling = - Sweeper::SweepingConfig::CompactableSpaceHandling; + using CompactableSpaceHandling = SweepingConfig::CompactableSpaceHandling; public: PrepareForSweepVisitor(SpaceStates* states, @@ -746,7 +764,7 @@ class PrepareForSweepVisitor final } // namespace class Sweeper::SweeperImpl final { - using FreeMemoryHandling = Sweeper::SweepingConfig::FreeMemoryHandling; + using FreeMemoryHandling = SweepingConfig::FreeMemoryHandling; public: SweeperImpl(RawHeap& heap, StatsCollector* stats_collector) @@ -787,7 +805,8 @@ class Sweeper::SweeperImpl final { } } - bool SweepForAllocationIfRunning(NormalPageSpace* space, size_t size) { + bool SweepForAllocationIfRunning(NormalPageSpace* space, size_t size, + v8::base::TimeDelta max_duration) { if (!is_in_progress_) return false; // Bail out for recursive sweeping calls. This can happen when finalizers @@ -808,14 +827,19 @@ class Sweeper::SweeperImpl final { StatsCollector::EnabledScope inner_scope( stats_collector_, StatsCollector::kSweepOnAllocation); MutatorThreadSweepingScope sweeping_in_progress(*this); - + DeadlineChecker deadline_check(v8::base::TimeTicks::Now() + max_duration); { // First, process unfinalized pages as finalizing a page is faster than // sweeping. SweepFinalizer finalizer(platform_, config_.free_memory_handling); while (auto page = space_state.swept_unfinalized_pages.Pop()) { finalizer.FinalizePage(&*page); - if (size <= finalizer.largest_new_free_list_entry()) return true; + if (size <= finalizer.largest_new_free_list_entry()) { + return true; + } + if (deadline_check.Check()) { + return false; + } } } { @@ -825,19 +849,24 @@ class Sweeper::SweeperImpl final { config_.free_memory_handling); while (auto page = space_state.unswept_pages.Pop()) { sweeper.SweepPage(**page); - if (size <= sweeper.largest_new_free_list_entry()) return true; + if (size <= sweeper.largest_new_free_list_entry()) { + return true; + } + if (deadline_check.Check()) { + return false; + } } } return false; } - void FinishIfRunning() { - if (!is_in_progress_) return; + bool FinishIfRunning() { + if (!is_in_progress_) return false; // Bail out for recursive sweeping calls. This can happen when finalizers // allocate new memory. - if (is_sweeping_on_mutator_thread_) return; + if (is_sweeping_on_mutator_thread_) return false; { StatsCollector::EnabledScope stats_scope( @@ -852,12 +881,22 @@ class Sweeper::SweeperImpl final { Finish(); } NotifyDone(); + return true; + } + + bool IsConcurrentSweepingDone() const { + return !concurrent_sweeper_handle_ || + (concurrent_sweeper_handle_->IsValid() && + !concurrent_sweeper_handle_->IsActive()); } void FinishIfOutOfWork() { if (is_in_progress_ && !is_sweeping_on_mutator_thread_ && concurrent_sweeper_handle_ && concurrent_sweeper_handle_->IsValid() && !concurrent_sweeper_handle_->IsActive()) { + StatsCollector::EnabledScope stats_scope( + stats_collector_, StatsCollector::kSweepFinishIfOutOfWork); + MutatorThreadSweepingScope sweeping_in_progress(*this); // At this point we know that the concurrent sweeping task has run // out-of-work: all pages are swept. The main thread still needs to finish // sweeping though. @@ -865,8 +904,18 @@ class Sweeper::SweeperImpl final { [](const SpaceState& state) { return state.unswept_pages.IsEmpty(); })); - FinishIfRunning(); + + // There may be unfinalized pages left. Since it's hard to estimate + // the actual amount of sweeping necessary, we sweep with a small + // deadline to see if sweeping can be fully finished. + MutatorThreadSweeper sweeper(heap_.heap(), &space_states_, platform_, + config_.free_memory_handling); + if (sweeper.SweepWithDeadline(v8::base::TimeDelta::FromMilliseconds(2), + MutatorThreadSweepingMode::kAll)) { + FinalizeSweep(); + } } + NotifyDoneIfNeeded(); } void Finish() { @@ -920,8 +969,9 @@ class Sweeper::SweeperImpl final { bool IsSweepingInProgress() const { return is_in_progress_; } - bool PerformSweepOnMutatorThread(double deadline_in_seconds, - StatsCollector::ScopeId internal_scope_id) { + bool PerformSweepOnMutatorThread(v8::base::TimeDelta max_duration, + StatsCollector::ScopeId internal_scope_id, + MutatorThreadSweepingMode sweeping_mode) { if (!is_in_progress_) return true; MutatorThreadSweepingScope sweeping_in_progress(*this); @@ -935,10 +985,10 @@ class Sweeper::SweeperImpl final { config_.free_memory_handling); { StatsCollector::EnabledScope inner_stats_scope( - stats_collector_, internal_scope_id, "deltaInSeconds", - deadline_in_seconds - platform_->MonotonicallyIncreasingTime()); - - sweep_complete = sweeper.SweepWithDeadline(deadline_in_seconds); + stats_collector_, internal_scope_id, "max_duration_ms", + max_duration.InMillisecondsF(), "sweeping_mode", + ToString(sweeping_mode)); + sweep_complete = sweeper.SweepWithDeadline(max_duration, sweeping_mode); } if (sweep_complete) { FinalizeSweep(); @@ -948,6 +998,23 @@ class Sweeper::SweeperImpl final { return sweep_complete; } + void AddMutatorThreadSweepingObserver( + Sweeper::SweepingOnMutatorThreadObserver* observer) { + DCHECK_EQ(mutator_thread_sweeping_observers_.end(), + std::find(mutator_thread_sweeping_observers_.begin(), + mutator_thread_sweeping_observers_.end(), observer)); + mutator_thread_sweeping_observers_.push_back(observer); + } + + void RemoveMutatorThreadSweepingObserver( + Sweeper::SweepingOnMutatorThreadObserver* observer) { + const auto it = + std::find(mutator_thread_sweeping_observers_.begin(), + mutator_thread_sweeping_observers_.end(), observer); + DCHECK_NE(mutator_thread_sweeping_observers_.end(), it); + mutator_thread_sweeping_observers_.erase(it); + } + private: class MutatorThreadSweepingScope final { public: @@ -955,9 +1022,15 @@ class Sweeper::SweeperImpl final { : sweeper_(sweeper) { DCHECK(!sweeper_.is_sweeping_on_mutator_thread_); sweeper_.is_sweeping_on_mutator_thread_ = true; + for (auto* observer : sweeper_.mutator_thread_sweeping_observers_) { + observer->Start(); + } } ~MutatorThreadSweepingScope() { sweeper_.is_sweeping_on_mutator_thread_ = false; + for (auto* observer : sweeper_.mutator_thread_sweeping_observers_) { + observer->End(); + } } MutatorThreadSweepingScope(const MutatorThreadSweepingScope&) = delete; @@ -968,33 +1041,37 @@ class Sweeper::SweeperImpl final { SweeperImpl& sweeper_; }; - class IncrementalSweepTask : public cppgc::IdleTask { + class IncrementalSweepTask final : public cppgc::Task { public: using Handle = SingleThreadedHandle; - explicit IncrementalSweepTask(SweeperImpl* sweeper) + explicit IncrementalSweepTask(SweeperImpl& sweeper) : sweeper_(sweeper), handle_(Handle::NonEmptyTag{}) {} - static Handle Post(SweeperImpl* sweeper, cppgc::TaskRunner* runner) { + static Handle Post(SweeperImpl& sweeper, cppgc::TaskRunner* runner) { auto task = std::make_unique<IncrementalSweepTask>(sweeper); auto handle = task->GetHandle(); - runner->PostIdleTask(std::move(task)); + runner->PostTask(std::move(task)); return handle; } private: - void Run(double deadline_in_seconds) override { + void Run() override { if (handle_.IsCanceled()) return; - if (!sweeper_->PerformSweepOnMutatorThread( - deadline_in_seconds, StatsCollector::kSweepIdleStep)) { - sweeper_->ScheduleIncrementalSweeping(); + if (!sweeper_.PerformSweepOnMutatorThread( + v8::base::TimeDelta::FromMilliseconds(5), + StatsCollector::kSweepInTask, + sweeper_.IsConcurrentSweepingDone() + ? MutatorThreadSweepingMode::kAll + : MutatorThreadSweepingMode::kOnlyFinalizers)) { + sweeper_.ScheduleIncrementalSweeping(); } } Handle GetHandle() const { return handle_; } - SweeperImpl* sweeper_; + SweeperImpl& sweeper_; // TODO(chromium:1056170): Change to CancelableTask. Handle handle_; }; @@ -1002,10 +1079,10 @@ class Sweeper::SweeperImpl final { void ScheduleIncrementalSweeping() { DCHECK(platform_); auto runner = platform_->GetForegroundTaskRunner(); - if (!runner || !runner->IdleTasksEnabled()) return; + if (!runner) return; incremental_sweeper_handle_ = - IncrementalSweepTask::Post(this, runner.get()); + IncrementalSweepTask::Post(*this, runner.get()); } void ScheduleConcurrentSweeping() { @@ -1042,6 +1119,8 @@ class Sweeper::SweeperImpl final { SweepingConfig config_; IncrementalSweepTask::Handle incremental_sweeper_handle_; std::unique_ptr<cppgc::JobHandle> concurrent_sweeper_handle_; + std::vector<Sweeper::SweepingOnMutatorThreadObserver*> + mutator_thread_sweeping_observers_; // Indicates whether the sweeping phase is in progress. bool is_in_progress_ = false; bool notify_done_pending_ = false; @@ -1060,14 +1139,16 @@ Sweeper::~Sweeper() = default; void Sweeper::Start(SweepingConfig config) { impl_->Start(config, heap_.platform()); } -void Sweeper::FinishIfRunning() { impl_->FinishIfRunning(); } + +bool Sweeper::FinishIfRunning() { return impl_->FinishIfRunning(); } void Sweeper::FinishIfOutOfWork() { impl_->FinishIfOutOfWork(); } void Sweeper::WaitForConcurrentSweepingForTesting() { impl_->WaitForConcurrentSweepingForTesting(); } void Sweeper::NotifyDoneIfNeeded() { impl_->NotifyDoneIfNeeded(); } -bool Sweeper::SweepForAllocationIfRunning(NormalPageSpace* space, size_t size) { - return impl_->SweepForAllocationIfRunning(space, size); +bool Sweeper::SweepForAllocationIfRunning(NormalPageSpace* space, size_t size, + v8::base::TimeDelta max_duration) { + return impl_->SweepForAllocationIfRunning(space, size, max_duration); } bool Sweeper::IsSweepingOnMutatorThread() const { return impl_->IsSweepingOnMutatorThread(); @@ -1077,10 +1158,20 @@ bool Sweeper::IsSweepingInProgress() const { return impl_->IsSweepingInProgress(); } -bool Sweeper::PerformSweepOnMutatorThread(double deadline_in_seconds) { - return impl_->PerformSweepOnMutatorThread(deadline_in_seconds, - StatsCollector::kSweepInTask); +bool Sweeper::PerformSweepOnMutatorThread(v8::base::TimeDelta max_duration, + StatsCollector::ScopeId scope_id) { + return impl_->PerformSweepOnMutatorThread(max_duration, scope_id, + MutatorThreadSweepingMode::kAll); +} + +Sweeper::SweepingOnMutatorThreadObserver::SweepingOnMutatorThreadObserver( + Sweeper& sweeper) + : sweeper_(sweeper) { + sweeper_.impl_->AddMutatorThreadSweepingObserver(this); +} + +Sweeper::SweepingOnMutatorThreadObserver::~SweepingOnMutatorThreadObserver() { + sweeper_.impl_->RemoveMutatorThreadSweepingObserver(this); } -} // namespace internal -} // namespace cppgc +} // namespace cppgc::internal diff --git a/deps/v8/src/heap/cppgc/sweeper.h b/deps/v8/src/heap/cppgc/sweeper.h index 845dfbbfc1261e..95b61729b865cc 100644 --- a/deps/v8/src/heap/cppgc/sweeper.h +++ b/deps/v8/src/heap/cppgc/sweeper.h @@ -7,16 +7,13 @@ #include <memory> -#include "include/cppgc/heap.h" #include "src/base/macros.h" #include "src/base/platform/time.h" +#include "src/heap/cppgc/heap-config.h" #include "src/heap/cppgc/memory.h" +#include "src/heap/cppgc/stats-collector.h" -namespace cppgc { - -class Platform; - -namespace internal { +namespace cppgc::internal { class HeapBase; class ConcurrentSweeperTest; @@ -24,15 +21,16 @@ class NormalPageSpace; class V8_EXPORT_PRIVATE Sweeper final { public: - struct SweepingConfig { - using SweepingType = cppgc::Heap::SweepingType; - enum class CompactableSpaceHandling { kSweep, kIgnore }; - enum class FreeMemoryHandling { kDoNotDiscard, kDiscardWherePossible }; - - SweepingType sweeping_type = SweepingType::kIncrementalAndConcurrent; - CompactableSpaceHandling compactable_space_handling = - CompactableSpaceHandling::kSweep; - FreeMemoryHandling free_memory_handling = FreeMemoryHandling::kDoNotDiscard; + class V8_EXPORT_PRIVATE SweepingOnMutatorThreadObserver { + public: + explicit SweepingOnMutatorThreadObserver(Sweeper&); + virtual ~SweepingOnMutatorThreadObserver(); + + virtual void Start() = 0; + virtual void End() = 0; + + private: + Sweeper& sweeper_; }; static constexpr bool CanDiscardMemory() { @@ -47,19 +45,24 @@ class V8_EXPORT_PRIVATE Sweeper final { // Sweeper::Start assumes the heap holds no linear allocation buffers. void Start(SweepingConfig); - void FinishIfRunning(); + // Returns true when sweeping was finished and false if it was not running or + // couldn't be finished due to being a recursive sweep call. + bool FinishIfRunning(); void FinishIfOutOfWork(); void NotifyDoneIfNeeded(); - // SweepForAllocationIfRunning sweeps the given |space| until a slot that can - // fit an allocation of size |size| is found. Returns true if a slot was - // found. - bool SweepForAllocationIfRunning(NormalPageSpace* space, size_t size); + // SweepForAllocationIfRunning sweeps the given `space` until a slot that can + // fit an allocation of `min_wanted_size` bytes is found. Returns true if a + // slot was found. Aborts after `max_duration`. + bool SweepForAllocationIfRunning(NormalPageSpace* space, + size_t min_wanted_size, + v8::base::TimeDelta max_duration); bool IsSweepingOnMutatorThread() const; bool IsSweepingInProgress() const; // Assist with sweeping. Returns true if sweeping is done. - bool PerformSweepOnMutatorThread(double deadline_in_seconds); + bool PerformSweepOnMutatorThread(v8::base::TimeDelta max_duration, + StatsCollector::ScopeId); private: void WaitForConcurrentSweepingForTesting(); @@ -72,7 +75,6 @@ class V8_EXPORT_PRIVATE Sweeper final { friend class ConcurrentSweeperTest; }; -} // namespace internal -} // namespace cppgc +} // namespace cppgc::internal #endif // V8_HEAP_CPPGC_SWEEPER_H_ diff --git a/deps/v8/src/heap/cppgc/write-barrier.cc b/deps/v8/src/heap/cppgc/write-barrier.cc index 098f950d2a47da..5cbec656a9236c 100644 --- a/deps/v8/src/heap/cppgc/write-barrier.cc +++ b/deps/v8/src/heap/cppgc/write-barrier.cc @@ -187,24 +187,6 @@ void WriteBarrier::CheckParams(Type expected_type, const Params& params) { } #endif // V8_ENABLE_CHECKS -// static -bool WriteBarrierTypeForNonCagedHeapPolicy::IsMarking(const void* object, - HeapHandle** handle) { - // Large objects cannot have mixins, so we are guaranteed to always have - // a pointer on the same page. - const auto* page = BasePage::FromPayload(object); - *handle = &page->heap(); - const MarkerBase* marker = page->heap().marker(); - return marker && marker->IsMarking(); -} - -// static -bool WriteBarrierTypeForNonCagedHeapPolicy::IsMarking(HeapHandle& heap_handle) { - const auto& heap_base = internal::HeapBase::From(heap_handle); - const MarkerBase* marker = heap_base.marker(); - return marker && marker->IsMarking(); -} - #if defined(CPPGC_YOUNG_GENERATION) // static diff --git a/deps/v8/src/heap/embedder-tracing.cc b/deps/v8/src/heap/embedder-tracing.cc index 8bb5bcad4e9b85..ceac516f9c4eb9 100644 --- a/deps/v8/src/heap/embedder-tracing.cc +++ b/deps/v8/src/heap/embedder-tracing.cc @@ -45,9 +45,8 @@ CppHeap::GarbageCollectionFlags ConvertTraceFlags( void LocalEmbedderHeapTracer::PrepareForTrace( EmbedderHeapTracer::TraceFlags flags) { if (cpp_heap_) - cpp_heap()->InitializeTracing( - cppgc::internal::GarbageCollector::Config::CollectionType::kMajor, - ConvertTraceFlags(flags)); + cpp_heap()->InitializeTracing(cppgc::internal::CollectionType::kMajor, + ConvertTraceFlags(flags)); } void LocalEmbedderHeapTracer::TracePrologue( diff --git a/deps/v8/src/heap/evacuation-allocator-inl.h b/deps/v8/src/heap/evacuation-allocator-inl.h index b474664a62472a..8d83eaad1cac81 100644 --- a/deps/v8/src/heap/evacuation-allocator-inl.h +++ b/deps/v8/src/heap/evacuation-allocator-inl.h @@ -16,6 +16,7 @@ AllocationResult EvacuationAllocator::Allocate(AllocationSpace space, int object_size, AllocationOrigin origin, AllocationAlignment alignment) { + object_size = ALIGN_TO_ALLOCATION_ALIGNMENT(object_size); switch (space) { case NEW_SPACE: return AllocateInNewSpace(object_size, origin, alignment); @@ -28,6 +29,9 @@ AllocationResult EvacuationAllocator::Allocate(AllocationSpace space, case CODE_SPACE: return compaction_spaces_.Get(CODE_SPACE) ->AllocateRaw(object_size, alignment, origin); + case SHARED_SPACE: + return compaction_spaces_.Get(SHARED_SPACE) + ->AllocateRaw(object_size, alignment, origin); default: UNREACHABLE(); } @@ -35,15 +39,19 @@ AllocationResult EvacuationAllocator::Allocate(AllocationSpace space, void EvacuationAllocator::FreeLast(AllocationSpace space, HeapObject object, int object_size) { + object_size = ALIGN_TO_ALLOCATION_ALIGNMENT(object_size); switch (space) { case NEW_SPACE: FreeLastInNewSpace(object, object_size); return; case OLD_SPACE: - FreeLastInOldSpace(object, object_size); + FreeLastInCompactionSpace(OLD_SPACE, object, object_size); return; case MAP_SPACE: - FreeLastInMapSpace(object, object_size); + FreeLastInCompactionSpace(MAP_SPACE, object, object_size); + return; + case SHARED_SPACE: + FreeLastInCompactionSpace(SHARED_SPACE, object, object_size); return; default: // Only new and old space supported. @@ -59,19 +67,11 @@ void EvacuationAllocator::FreeLastInNewSpace(HeapObject object, } } -void EvacuationAllocator::FreeLastInOldSpace(HeapObject object, - int object_size) { - if (!compaction_spaces_.Get(OLD_SPACE)->TryFreeLast(object.address(), - object_size)) { - // We couldn't free the last object so we have to write a proper filler. - heap_->CreateFillerObjectAt(object.address(), object_size); - } -} - -void EvacuationAllocator::FreeLastInMapSpace(HeapObject object, - int object_size) { - if (!compaction_spaces_.Get(MAP_SPACE)->TryFreeLast(object.address(), - object_size)) { +void EvacuationAllocator::FreeLastInCompactionSpace(AllocationSpace space, + HeapObject object, + int object_size) { + if (!compaction_spaces_.Get(space)->TryFreeLast(object.address(), + object_size)) { // We couldn't free the last object so we have to write a proper filler. heap_->CreateFillerObjectAt(object.address(), object_size); } diff --git a/deps/v8/src/heap/evacuation-allocator.h b/deps/v8/src/heap/evacuation-allocator.h index 6dbeab1b2987fc..14f5cb0a1f86e8 100644 --- a/deps/v8/src/heap/evacuation-allocator.h +++ b/deps/v8/src/heap/evacuation-allocator.h @@ -39,6 +39,10 @@ class EvacuationAllocator { heap_->map_space()->MergeCompactionSpace( compaction_spaces_.Get(MAP_SPACE)); } + if (heap_->shared_space()) { + heap_->shared_space()->MergeCompactionSpace( + compaction_spaces_.Get(SHARED_SPACE)); + } // Give back remaining LAB space if this EvacuationAllocator's new space LAB // sits right next to new space allocation top. @@ -60,8 +64,8 @@ class EvacuationAllocator { inline AllocationResult AllocateInLAB(int object_size, AllocationAlignment alignment); inline void FreeLastInNewSpace(HeapObject object, int object_size); - inline void FreeLastInOldSpace(HeapObject object, int object_size); - inline void FreeLastInMapSpace(HeapObject object, int object_size); + inline void FreeLastInCompactionSpace(AllocationSpace space, + HeapObject object, int object_size); Heap* const heap_; NewSpace* const new_space_; diff --git a/deps/v8/src/heap/evacuation-verifier-inl.h b/deps/v8/src/heap/evacuation-verifier-inl.h new file mode 100644 index 00000000000000..cf1eee13517c7c --- /dev/null +++ b/deps/v8/src/heap/evacuation-verifier-inl.h @@ -0,0 +1,64 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef V8_HEAP_EVACUATION_VERIFIER_INL_H_ +#define V8_HEAP_EVACUATION_VERIFIER_INL_H_ + +#include "src/heap/evacuation-verifier.h" +#include "src/heap/heap-inl.h" +#include "src/heap/mark-compact.h" + +namespace v8 { +namespace internal { + +#ifdef VERIFY_HEAP + +void FullEvacuationVerifier::VerifyHeapObjectImpl(HeapObject heap_object) { + if (!ShouldVerifyObject(heap_object)) return; + CHECK_IMPLIES(Heap::InYoungGeneration(heap_object), + Heap::InToPage(heap_object)); + CHECK(!MarkCompactCollector::IsOnEvacuationCandidate(heap_object)); +} + +bool FullEvacuationVerifier::ShouldVerifyObject(HeapObject heap_object) { + const bool in_shared_heap = heap_object.InSharedWritableHeap(); + return heap_->isolate()->is_shared_heap_isolate() ? in_shared_heap + : !in_shared_heap; +} + +template <typename TSlot> +void FullEvacuationVerifier::VerifyPointersImpl(TSlot start, TSlot end) { + for (TSlot current = start; current < end; ++current) { + typename TSlot::TObject object = current.load(cage_base()); + HeapObject heap_object; + if (object.GetHeapObjectIfStrong(&heap_object)) { + VerifyHeapObjectImpl(heap_object); + } + } +} + +void YoungGenerationEvacuationVerifier::VerifyHeapObjectImpl( + HeapObject heap_object) { + CHECK_IMPLIES(Heap::InYoungGeneration(heap_object), + Heap::InToPage(heap_object)); +} + +template <typename TSlot> +void YoungGenerationEvacuationVerifier::VerifyPointersImpl(TSlot start, + TSlot end) { + for (TSlot current = start; current < end; ++current) { + typename TSlot::TObject object = current.load(cage_base()); + HeapObject heap_object; + if (object.GetHeapObject(&heap_object)) { + VerifyHeapObjectImpl(heap_object); + } + } +} + +#endif // VERIFY_HEAP + +} // namespace internal +} // namespace v8 + +#endif // V8_HEAP_EVACUATION_VERIFIER_INL_H_ diff --git a/deps/v8/src/heap/evacuation-verifier.cc b/deps/v8/src/heap/evacuation-verifier.cc new file mode 100644 index 00000000000000..2396e73f36f080 --- /dev/null +++ b/deps/v8/src/heap/evacuation-verifier.cc @@ -0,0 +1,179 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "src/codegen/assembler-inl.h" +#include "src/codegen/reloc-info.h" +#include "src/heap/evacuation-verifier-inl.h" +#include "src/objects/map-inl.h" + +namespace v8 { +namespace internal { + +#ifdef VERIFY_HEAP + +EvacuationVerifier::EvacuationVerifier(Heap* heap) + : ObjectVisitorWithCageBases(heap), heap_(heap) {} + +void EvacuationVerifier::VisitPointers(HeapObject host, ObjectSlot start, + ObjectSlot end) { + VerifyPointers(start, end); +} + +void EvacuationVerifier::VisitPointers(HeapObject host, MaybeObjectSlot start, + MaybeObjectSlot end) { + VerifyPointers(start, end); +} + +void EvacuationVerifier::VisitCodePointer(HeapObject host, + CodeObjectSlot slot) { + CHECK(V8_EXTERNAL_CODE_SPACE_BOOL); + VerifyCodePointer(slot); +} + +void EvacuationVerifier::VisitRootPointers(Root root, const char* description, + FullObjectSlot start, + FullObjectSlot end) { + VerifyRootPointers(start, end); +} + +void EvacuationVerifier::VisitMapPointer(HeapObject object) { + VerifyMap(object.map(cage_base())); +} +void EvacuationVerifier::VerifyRoots() { + heap_->IterateRootsIncludingClients(this, + base::EnumSet<SkipRoot>{SkipRoot::kWeak}); +} + +void EvacuationVerifier::VerifyEvacuationOnPage(Address start, Address end) { + Address current = start; + while (current < end) { + HeapObject object = HeapObject::FromAddress(current); + if (!object.IsFreeSpaceOrFiller(cage_base())) { + object.Iterate(cage_base(), this); + } + current += ALIGN_TO_ALLOCATION_ALIGNMENT(object.Size(cage_base())); + } +} + +void EvacuationVerifier::VerifyEvacuation(NewSpace* space) { + if (!space) return; + if (v8_flags.minor_mc) { + VerifyEvacuation(PagedNewSpace::From(space)->paged_space()); + return; + } + PageRange range(space->first_allocatable_address(), space->top()); + for (auto it = range.begin(); it != range.end();) { + Page* page = *(it++); + Address current = page->area_start(); + Address limit = it != range.end() ? page->area_end() : space->top(); + CHECK(limit == space->top() || !page->Contains(space->top())); + VerifyEvacuationOnPage(current, limit); + } +} + +void EvacuationVerifier::VerifyEvacuation(PagedSpaceBase* space) { + for (Page* p : *space) { + if (p->IsEvacuationCandidate()) continue; + if (p->Contains(space->top())) { + CodePageMemoryModificationScope memory_modification_scope(p); + heap_->CreateFillerObjectAt( + space->top(), static_cast<int>(space->limit() - space->top())); + } + VerifyEvacuationOnPage(p->area_start(), p->area_end()); + } +} + +FullEvacuationVerifier::FullEvacuationVerifier(Heap* heap) + : EvacuationVerifier(heap) {} + +void FullEvacuationVerifier::Run() { + DCHECK(!heap_->sweeping_in_progress()); + VerifyRoots(); + VerifyEvacuation(heap_->new_space()); + VerifyEvacuation(heap_->old_space()); + VerifyEvacuation(heap_->code_space()); + if (heap_->shared_space()) VerifyEvacuation(heap_->shared_space()); + if (heap_->map_space()) VerifyEvacuation(heap_->map_space()); +} + +void FullEvacuationVerifier::VerifyMap(Map map) { VerifyHeapObjectImpl(map); } +void FullEvacuationVerifier::VerifyPointers(ObjectSlot start, ObjectSlot end) { + VerifyPointersImpl(start, end); +} +void FullEvacuationVerifier::VerifyPointers(MaybeObjectSlot start, + MaybeObjectSlot end) { + VerifyPointersImpl(start, end); +} +void FullEvacuationVerifier::VerifyCodePointer(CodeObjectSlot slot) { + CHECK(V8_EXTERNAL_CODE_SPACE_BOOL); + Object maybe_code = slot.load(code_cage_base()); + HeapObject code; + // The slot might contain smi during CodeDataContainer creation, so skip it. + if (maybe_code.GetHeapObject(&code)) { + VerifyHeapObjectImpl(code); + } +} +void FullEvacuationVerifier::VisitCodeTarget(Code host, RelocInfo* rinfo) { + Code target = Code::GetCodeFromTargetAddress(rinfo->target_address()); + VerifyHeapObjectImpl(target); +} +void FullEvacuationVerifier::VisitEmbeddedPointer(Code host, RelocInfo* rinfo) { + VerifyHeapObjectImpl(rinfo->target_object(cage_base())); +} +void FullEvacuationVerifier::VerifyRootPointers(FullObjectSlot start, + FullObjectSlot end) { + VerifyPointersImpl(start, end); +} + +YoungGenerationEvacuationVerifier::YoungGenerationEvacuationVerifier(Heap* heap) + : EvacuationVerifier(heap) {} + +void YoungGenerationEvacuationVerifier::YoungGenerationEvacuationVerifier:: + Run() { + DCHECK(!heap_->sweeping_in_progress()); + VerifyRoots(); + VerifyEvacuation(heap_->new_space()); + VerifyEvacuation(heap_->old_space()); + VerifyEvacuation(heap_->code_space()); + if (heap_->map_space()) VerifyEvacuation(heap_->map_space()); +} + +void YoungGenerationEvacuationVerifier::VerifyMap(Map map) { + VerifyHeapObjectImpl(map); +} +void YoungGenerationEvacuationVerifier::VerifyPointers(ObjectSlot start, + ObjectSlot end) { + VerifyPointersImpl(start, end); +} +void YoungGenerationEvacuationVerifier::VerifyPointers(MaybeObjectSlot start, + MaybeObjectSlot end) { + VerifyPointersImpl(start, end); +} +void YoungGenerationEvacuationVerifier::VerifyCodePointer(CodeObjectSlot slot) { + CHECK(V8_EXTERNAL_CODE_SPACE_BOOL); + Object maybe_code = slot.load(code_cage_base()); + HeapObject code; + // The slot might contain smi during CodeDataContainer creation, so skip it. + if (maybe_code.GetHeapObject(&code)) { + VerifyHeapObjectImpl(code); + } +} +void YoungGenerationEvacuationVerifier::VisitCodeTarget(Code host, + RelocInfo* rinfo) { + Code target = Code::GetCodeFromTargetAddress(rinfo->target_address()); + VerifyHeapObjectImpl(target); +} +void YoungGenerationEvacuationVerifier::VisitEmbeddedPointer(Code host, + RelocInfo* rinfo) { + VerifyHeapObjectImpl(rinfo->target_object(cage_base())); +} +void YoungGenerationEvacuationVerifier::VerifyRootPointers(FullObjectSlot start, + FullObjectSlot end) { + VerifyPointersImpl(start, end); +} + +#endif // VERIFY_HEAP + +} // namespace internal +} // namespace v8 diff --git a/deps/v8/src/heap/evacuation-verifier.h b/deps/v8/src/heap/evacuation-verifier.h new file mode 100644 index 00000000000000..3aa4702eaa2bbb --- /dev/null +++ b/deps/v8/src/heap/evacuation-verifier.h @@ -0,0 +1,104 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef V8_HEAP_EVACUATION_VERIFIER_H_ +#define V8_HEAP_EVACUATION_VERIFIER_H_ + +#include "src/heap/new-spaces.h" +#include "src/heap/paged-spaces.h" +#include "src/objects/map.h" +#include "src/objects/visitors.h" + +namespace v8 { +namespace internal { + +#ifdef VERIFY_HEAP + +class EvacuationVerifier : public ObjectVisitorWithCageBases, + public RootVisitor { + public: + virtual void Run() = 0; + + void VisitPointers(HeapObject host, ObjectSlot start, + ObjectSlot end) override; + + void VisitPointers(HeapObject host, MaybeObjectSlot start, + MaybeObjectSlot end) override; + + void VisitCodePointer(HeapObject host, CodeObjectSlot slot) override; + + void VisitRootPointers(Root root, const char* description, + FullObjectSlot start, FullObjectSlot end) override; + + void VisitMapPointer(HeapObject object) override; + + protected: + explicit EvacuationVerifier(Heap* heap); + + inline Heap* heap() { return heap_; } + + virtual void VerifyMap(Map map) = 0; + virtual void VerifyPointers(ObjectSlot start, ObjectSlot end) = 0; + virtual void VerifyPointers(MaybeObjectSlot start, MaybeObjectSlot end) = 0; + virtual void VerifyCodePointer(CodeObjectSlot slot) = 0; + virtual void VerifyRootPointers(FullObjectSlot start, FullObjectSlot end) = 0; + + void VerifyRoots(); + void VerifyEvacuationOnPage(Address start, Address end); + void VerifyEvacuation(NewSpace* new_space); + void VerifyEvacuation(PagedSpaceBase* paged_space); + + Heap* heap_; +}; + +class FullEvacuationVerifier : public EvacuationVerifier { + public: + explicit FullEvacuationVerifier(Heap* heap); + + void Run() override; + + protected: + V8_INLINE void VerifyHeapObjectImpl(HeapObject heap_object); + + V8_INLINE bool ShouldVerifyObject(HeapObject heap_object); + + template <typename TSlot> + void VerifyPointersImpl(TSlot start, TSlot end); + + void VerifyMap(Map map) override; + void VerifyPointers(ObjectSlot start, ObjectSlot end) override; + void VerifyPointers(MaybeObjectSlot start, MaybeObjectSlot end) override; + void VerifyCodePointer(CodeObjectSlot slot) override; + void VisitCodeTarget(Code host, RelocInfo* rinfo) override; + void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) override; + void VerifyRootPointers(FullObjectSlot start, FullObjectSlot end) override; +}; + +class YoungGenerationEvacuationVerifier : public EvacuationVerifier { + public: + explicit YoungGenerationEvacuationVerifier(Heap* heap); + + void Run() override; + + protected: + V8_INLINE void VerifyHeapObjectImpl(HeapObject heap_object); + + template <typename TSlot> + void VerifyPointersImpl(TSlot start, TSlot end); + + void VerifyMap(Map map) override; + void VerifyPointers(ObjectSlot start, ObjectSlot end) override; + void VerifyPointers(MaybeObjectSlot start, MaybeObjectSlot end) override; + void VerifyCodePointer(CodeObjectSlot slot) override; + void VisitCodeTarget(Code host, RelocInfo* rinfo) override; + void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) override; + void VerifyRootPointers(FullObjectSlot start, FullObjectSlot end) override; +}; + +#endif // VERIFY_HEAP + +} // namespace internal +} // namespace v8 + +#endif // V8_HEAP_EVACUATION_VERIFIER_H_ diff --git a/deps/v8/src/heap/factory-base.cc b/deps/v8/src/heap/factory-base.cc index 9533456935d345..0dec79f03456d0 100644 --- a/deps/v8/src/heap/factory-base.cc +++ b/deps/v8/src/heap/factory-base.cc @@ -216,7 +216,7 @@ Handle<ByteArray> FactoryBase<Impl>::NewByteArray(int length, UNREACHABLE(); } if (length == 0) return impl()->empty_byte_array(); - int size = ByteArray::SizeFor(length); + int size = ALIGN_TO_ALLOCATION_ALIGNMENT(ByteArray::SizeFor(length)); HeapObject result = AllocateRawWithImmortalMap( size, allocation, read_only_roots().byte_array_map()); DisallowGarbageCollection no_gc; diff --git a/deps/v8/src/heap/factory.cc b/deps/v8/src/heap/factory.cc index 7afbc9b6836e43..bcb2f6475ec357 100644 --- a/deps/v8/src/heap/factory.cc +++ b/deps/v8/src/heap/factory.cc @@ -351,7 +351,7 @@ HeapObject Factory::AllocateRawWithAllocationSite( int size = map->instance_size(); if (!allocation_site.is_null()) { DCHECK(V8_ALLOCATION_SITE_TRACKING_BOOL); - size += AllocationMemento::kSize; + size += ALIGN_TO_ALLOCATION_ALIGNMENT(AllocationMemento::kSize); } HeapObject result = allocator()->AllocateRawWith<HeapAllocator::kRetryOrFail>( size, allocation); @@ -360,8 +360,9 @@ HeapObject Factory::AllocateRawWithAllocationSite( : UPDATE_WRITE_BARRIER; result.set_map_after_allocation(*map, write_barrier_mode); if (!allocation_site.is_null()) { - AllocationMemento alloc_memento = AllocationMemento::unchecked_cast( - Object(result.ptr() + map->instance_size())); + int aligned_size = ALIGN_TO_ALLOCATION_ALIGNMENT(map->instance_size()); + AllocationMemento alloc_memento = + AllocationMemento::unchecked_cast(Object(result.ptr() + aligned_size)); InitializeAllocationMemento(alloc_memento, *allocation_site); } return result; @@ -774,6 +775,11 @@ MaybeHandle<String> NewStringFromUtf8Variant(Isolate* isolate, MaybeHandle<String> Factory::NewStringFromUtf8( const base::Vector<const uint8_t>& string, unibrow::Utf8Variant utf8_variant, AllocationType allocation) { + if (string.size() > kMaxInt) { + // The Utf8Decode can't handle longer inputs, and we couldn't create + // strings from them anyway. + THROW_NEW_ERROR(isolate(), NewInvalidStringLengthError(), String); + } auto peek_bytes = [&]() -> base::Vector<const uint8_t> { return string; }; return NewStringFromUtf8Variant(isolate(), peek_bytes, utf8_variant, allocation); @@ -792,6 +798,8 @@ MaybeHandle<String> Factory::NewStringFromUtf8( DCHECK_EQ(sizeof(uint8_t), array->type()->element_type().value_kind_size()); DCHECK_LE(start, end); DCHECK_LE(end, array->length()); + // {end - start} can never be more than what the Utf8Decoder can handle. + static_assert(WasmArray::MaxLength(sizeof(uint8_t)) <= kMaxInt); auto peek_bytes = [&]() -> base::Vector<const uint8_t> { const uint8_t* contents = reinterpret_cast<const uint8_t*>(array->ElementAddress(0)); @@ -806,6 +814,8 @@ MaybeHandle<String> Factory::NewStringFromUtf8( unibrow::Utf8Variant utf8_variant, AllocationType allocation) { DCHECK_LE(start, end); DCHECK_LE(end, array->length()); + // {end - start} can never be more than what the Utf8Decoder can handle. + static_assert(ByteArray::kMaxLength <= kMaxInt); auto peek_bytes = [&]() -> base::Vector<const uint8_t> { const uint8_t* contents = reinterpret_cast<const uint8_t*>(array->GetDataStartAddress()); @@ -838,6 +848,8 @@ MaybeHandle<String> Factory::NewStringFromUtf16(Handle<WasmArray> array, DCHECK_EQ(sizeof(uint16_t), array->type()->element_type().value_kind_size()); DCHECK_LE(start, end); DCHECK_LE(end, array->length()); + // {end - start} can never be more than what the Utf8Decoder can handle. + static_assert(WasmArray::MaxLength(sizeof(uint16_t)) <= kMaxInt); auto peek_bytes = [&]() -> base::Vector<const uint16_t> { const uint16_t* contents = reinterpret_cast<const uint16_t*>(array->ElementAddress(0)); @@ -2036,7 +2048,7 @@ Handle<Map> Factory::NewMap(InstanceType type, int instance_size, DisallowGarbageCollection no_gc; Heap* roots = allocation_type == AllocationType::kMap ? isolate()->heap() - : isolate()->shared_isolate()->heap(); + : isolate()->shared_heap_isolate()->heap(); result.set_map_after_allocation(ReadOnlyRoots(roots).meta_map(), SKIP_WRITE_BARRIER); return handle(InitializeMap(Map::cast(result), type, instance_size, @@ -2119,10 +2131,12 @@ Handle<JSObject> Factory::CopyJSObjectWithAllocationSite( DCHECK(site.is_null() || AllocationSite::CanTrack(instance_type)); int object_size = map->instance_size(); - int adjusted_object_size = object_size; + int aligned_object_size = ALIGN_TO_ALLOCATION_ALIGNMENT(object_size); + int adjusted_object_size = aligned_object_size; if (!site.is_null()) { DCHECK(V8_ALLOCATION_SITE_TRACKING_BOOL); - adjusted_object_size += AllocationMemento::kSize; + adjusted_object_size += + ALIGN_TO_ALLOCATION_ALIGNMENT(AllocationMemento::kSize); } HeapObject raw_clone = allocator()->AllocateRawWith<HeapAllocator::kRetryOrFail>( @@ -2142,7 +2156,7 @@ Handle<JSObject> Factory::CopyJSObjectWithAllocationSite( } if (!site.is_null()) { AllocationMemento alloc_memento = AllocationMemento::unchecked_cast( - Object(raw_clone.ptr() + object_size)); + Object(raw_clone.ptr() + aligned_object_size)); InitializeAllocationMemento(alloc_memento, *site); } @@ -2716,6 +2730,10 @@ Handle<JSObject> Factory::NewJSObjectFromMap( InitializeJSObjectFromMap(js_obj, *empty_fixed_array(), *map); DCHECK(js_obj.HasFastElements() || + (isolate()->bootstrapper()->IsActive() || + *map == isolate() + ->raw_native_context() + .js_array_template_literal_object_map()) || js_obj.HasTypedArrayOrRabGsabTypedArrayElements() || js_obj.HasFastStringWrapperElements() || js_obj.HasFastArgumentsElements() || js_obj.HasDictionaryElements() || @@ -2788,7 +2806,9 @@ Handle<JSArray> Factory::NewJSArrayWithElements(Handle<FixedArrayBase> elements, AllocationType allocation) { Handle<JSArray> array = NewJSArrayWithUnverifiedElements( elements, elements_kind, length, allocation); +#ifdef ENABLE_SLOW_DCHECKS JSObject::ValidateElements(*array); +#endif return array; } @@ -2802,8 +2822,14 @@ Handle<JSArray> Factory::NewJSArrayWithUnverifiedElements( JSFunction array_function = native_context.array_function(); map = array_function.initial_map(); } - Handle<JSArray> array = Handle<JSArray>::cast( - NewJSObjectFromMap(handle(map, isolate()), allocation)); + return NewJSArrayWithUnverifiedElements(handle(map, isolate()), elements, + length, allocation); +} + +Handle<JSArray> Factory::NewJSArrayWithUnverifiedElements( + Handle<Map> map, Handle<FixedArrayBase> elements, int length, + AllocationType allocation) { + auto array = Handle<JSArray>::cast(NewJSObjectFromMap(map, allocation)); DisallowGarbageCollection no_gc; JSArray raw = *array; raw.set_elements(*elements); @@ -2811,6 +2837,23 @@ Handle<JSArray> Factory::NewJSArrayWithUnverifiedElements( return array; } +Handle<JSArray> Factory::NewJSArrayForTemplateLiteralArray( + Handle<FixedArray> cooked_strings, Handle<FixedArray> raw_strings) { + Handle<JSArray> raw_object = + NewJSArrayWithElements(raw_strings, PACKED_ELEMENTS, + raw_strings->length(), AllocationType::kOld); + JSObject::SetIntegrityLevel(raw_object, FROZEN, kThrowOnError).ToChecked(); + + Handle<NativeContext> native_context = isolate()->native_context(); + Handle<JSArray> template_object = NewJSArrayWithUnverifiedElements( + handle(native_context->js_array_template_literal_object_map(), isolate()), + cooked_strings, cooked_strings->length(), AllocationType::kOld); + TemplateLiteralObject::SetRaw(template_object, raw_object); + DCHECK_EQ(template_object->map(), + native_context->js_array_template_literal_object_map()); + return template_object; +} + void Factory::NewJSArrayStorage(Handle<JSArray> array, int length, int capacity, ArrayStorageAllocationMode mode) { DCHECK(capacity >= length); @@ -3011,13 +3054,14 @@ MaybeHandle<JSArrayBuffer> Factory::NewJSArrayBufferAndBackingStore( Handle<JSArrayBuffer> Factory::NewJSSharedArrayBuffer( std::shared_ptr<BackingStore> backing_store) { - DCHECK_IMPLIES(backing_store->is_resizable(), v8_flags.harmony_rab_gsab); + DCHECK_IMPLIES(backing_store->is_resizable_by_js(), + v8_flags.harmony_rab_gsab); Handle<Map> map( isolate()->native_context()->shared_array_buffer_fun().initial_map(), isolate()); auto result = Handle<JSArrayBuffer>::cast( NewJSObjectFromMap(map, AllocationType::kYoung)); - ResizableFlag resizable = backing_store->is_resizable() + ResizableFlag resizable = backing_store->is_resizable_by_js() ? ResizableFlag::kResizable : ResizableFlag::kNotResizable; result->Setup(SharedFlag::kShared, resizable, std::move(backing_store)); @@ -3133,7 +3177,8 @@ Handle<JSTypedArray> Factory::NewJSTypedArray(ExternalArrayType type, raw.set_length(length); raw.SetOffHeapDataPtr(isolate(), buffer->backing_store(), byte_offset); raw.set_is_length_tracking(false); - raw.set_is_backed_by_rab(!buffer->is_shared() && buffer->is_resizable()); + raw.set_is_backed_by_rab(!buffer->is_shared() && + buffer->is_resizable_by_js()); return typed_array; } @@ -3148,7 +3193,8 @@ Handle<JSDataView> Factory::NewJSDataView(Handle<JSArrayBuffer> buffer, isolate(), static_cast<uint8_t*>(buffer->backing_store()) + byte_offset); // TODO(v8:11111): Support creating length tracking DataViews via the API. obj->set_is_length_tracking(false); - obj->set_is_backed_by_rab(!buffer->is_shared() && buffer->is_resizable()); + obj->set_is_backed_by_rab(!buffer->is_shared() && + buffer->is_resizable_by_js()); return obj; } diff --git a/deps/v8/src/heap/factory.h b/deps/v8/src/heap/factory.h index 75676b4624cbea..6c9cc2d4d8ed1e 100644 --- a/deps/v8/src/heap/factory.h +++ b/deps/v8/src/heap/factory.h @@ -606,6 +606,9 @@ class V8_EXPORT_PRIVATE Factory : public FactoryBase<Factory> { ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND, AllocationType allocation = AllocationType::kYoung); + Handle<JSArray> NewJSArrayForTemplateLiteralArray( + Handle<FixedArray> cooked_strings, Handle<FixedArray> raw_strings); + void NewJSArrayStorage( Handle<JSArray> array, int length, int capacity, ArrayStorageAllocationMode mode = DONT_INITIALIZE_ARRAY_ELEMENTS); @@ -1141,6 +1144,9 @@ class V8_EXPORT_PRIVATE Factory : public FactoryBase<Factory> { Handle<JSArray> NewJSArrayWithUnverifiedElements( Handle<FixedArrayBase> elements, ElementsKind elements_kind, int length, AllocationType allocation = AllocationType::kYoung); + Handle<JSArray> NewJSArrayWithUnverifiedElements( + Handle<Map> map, Handle<FixedArrayBase> elements, int length, + AllocationType allocation = AllocationType::kYoung); // Creates the backing storage for a JSArray. This handle must be discarded // before returning the JSArray reference to code outside Factory, which might diff --git a/deps/v8/src/heap/gc-tracer-inl.h b/deps/v8/src/heap/gc-tracer-inl.h index 9dc46c7431dab9..248c3490fb2ed8 100644 --- a/deps/v8/src/heap/gc-tracer-inl.h +++ b/deps/v8/src/heap/gc-tracer-inl.h @@ -121,6 +121,10 @@ bool GCTracer::IsInObservablePause() const { return 0.0 < start_of_observable_pause_; } +bool GCTracer::IsInAtomicPause() const { + return current_.state == Event::State::ATOMIC; +} + bool GCTracer::IsConsistentWithCollector(GarbageCollector collector) const { return (collector == GarbageCollector::SCAVENGER && current_.type == Event::SCAVENGER) || diff --git a/deps/v8/src/heap/gc-tracer.cc b/deps/v8/src/heap/gc-tracer.cc index 47a97f91a118b0..b9dc605f5d0549 100644 --- a/deps/v8/src/heap/gc-tracer.cc +++ b/deps/v8/src/heap/gc-tracer.cc @@ -76,7 +76,7 @@ const char* GCTracer::Event::TypeName(bool short_name) const { return (short_name) ? "s" : "Scavenge"; case MARK_COMPACTOR: case INCREMENTAL_MARK_COMPACTOR: - return (short_name) ? "ms" : "Mark-sweep"; + return (short_name) ? "mc" : "Mark-Compact"; case MINOR_MARK_COMPACTOR: case INCREMENTAL_MINOR_MARK_COMPACTOR: return (short_name) ? "mmc" : "Minor Mark-Compact"; @@ -223,14 +223,6 @@ void GCTracer::ResetForTesting() { } } -void GCTracer::NotifyYoungGenerationHandling( - YoungGenerationHandling young_generation_handling) { - DCHECK_GE(1, start_counter_); - DCHECK_EQ(Event::SCAVENGER, current_.type); - heap_->isolate()->counters()->young_generation_handling()->AddSample( - static_cast<int>(young_generation_handling)); -} - void GCTracer::StartObservablePause() { DCHECK_EQ(0, start_counter_); start_counter_++; @@ -269,6 +261,8 @@ void GCTracer::StartCycle(GarbageCollector collector, DCHECK_IMPLIES(young_gc_while_full_gc_, Heap::IsYoungGenerationCollector(collector) && !Event::IsYoungGenerationEvent(current_.type)); + DCHECK_IMPLIES(collector != GarbageCollector::SCAVENGER, + !young_gc_while_full_gc_); Event::Type type; switch (collector) { @@ -468,6 +462,7 @@ void GCTracer::StopCycle(GarbageCollector collector) { // If a young generation GC interrupted an unfinished full GC cycle, restore // the event corresponding to the full GC cycle. if (young_gc_while_full_gc_) { + DCHECK_EQ(current_.type, Event::Type::SCAVENGER); std::swap(current_, previous_); young_gc_while_full_gc_ = false; } @@ -517,7 +512,7 @@ void GCTracer::NotifySweepingCompleted() { DCHECK((current_.type == Event::MARK_COMPACTOR || current_.type == Event::INCREMENTAL_MARK_COMPACTOR) && (current_.state == Event::State::SWEEPING || - (v8_flags.verify_heap && current_.state == Event::State::ATOMIC))); + current_.state == Event::State::ATOMIC)); } else { DCHECK(IsSweepingInProgress()); } @@ -762,14 +757,14 @@ void GCTracer::PrintNVP() const { "holes_size_after=%zu " "allocated=%zu " "promoted=%zu " - "semi_space_copied=%zu " + "new_space_survived=%zu " "nodes_died_in_new=%d " "nodes_copied_in_new=%d " "nodes_promoted=%d " "promotion_ratio=%.1f%% " "average_survival_ratio=%.1f%% " "promotion_rate=%.1f%% " - "semi_space_copy_rate=%.1f%% " + "new_space_survive_rate_=%.1f%% " "new_space_allocation_throughput=%.1f " "unmapper_chunks=%d\n", duration, spent_in_mutator, current_.TypeName(true), @@ -800,11 +795,11 @@ void GCTracer::PrintNVP() const { current_.end_object_size, current_.start_holes_size, current_.end_holes_size, allocated_since_last_gc, heap_->promoted_objects_size(), - heap_->semi_space_copied_object_size(), + heap_->new_space_surviving_object_size(), heap_->nodes_died_in_new_space_, heap_->nodes_copied_in_new_space_, heap_->nodes_promoted_, heap_->promotion_ratio_, AverageSurvivalRatio(), heap_->promotion_rate_, - heap_->semi_space_copied_rate_, + heap_->new_space_surviving_rate_, NewSpaceAllocationThroughputInBytesPerMillisecond(), heap_->memory_allocator()->unmapper()->NumberOfChunks()); break; @@ -817,46 +812,92 @@ void GCTracer::PrintNVP() const { "minor_mc=%.2f " "time_to_safepoint=%.2f " "mark=%.2f " + "mark.incremental_roots=%.2f " + "mark.finish_incremental=%.2f " "mark.seed=%.2f " - "mark.roots=%.2f " - "mark.weak=%.2f " + "mark.closure_parallel=%.2f " + "mark.closure=%.2f " "mark.global_handles=%.2f " "clear=%.2f " "clear.string_table=%.2f " - "clear.weak_lists=%.2f " + "complete.sweep_array_buffers=%.2f " "evacuate=%.2f " + "evacuate.clean_up=%.2f " "evacuate.copy=%.2f " + "evacuate.prologue=%.2f " + "evacuate.epilogue=%.2f " + "evacuate.rebalance=%.2f " "evacuate.update_pointers=%.2f " "evacuate.update_pointers.slots=%.2f " + "evacuate.update_pointers.weak=%.2f " + "sweep=%.2f " + "sweep.new=%.2f " + "sweep.new_lo=%.2f " + "finish=%.2f " + "finish.sweep_array_buffers=%.2f " "background.mark=%.2f " + "background.sweep=%.2f " "background.evacuate.copy=%.2f " "background.evacuate.update_pointers=%.2f " "background.unmapper=%.2f " "unmapper=%.2f " - "update_marking_deque=%.2f " - "reset_liveness=%.2f\n", + "total_size_before=%zu " + "total_size_after=%zu " + "holes_size_before=%zu " + "holes_size_after=%zu " + "allocated=%zu " + "promoted=%zu " + "new_space_survived=%zu " + "nodes_died_in_new=%d " + "nodes_copied_in_new=%d " + "nodes_promoted=%d " + "promotion_ratio=%.1f%% " + "average_survival_ratio=%.1f%% " + "promotion_rate=%.1f%% " + "new_space_survive_rate_=%.1f%% " + "new_space_allocation_throughput=%.1f\n", duration, spent_in_mutator, "mmc", current_.reduce_memory, current_scope(Scope::MINOR_MC), current_scope(Scope::TIME_TO_SAFEPOINT), current_scope(Scope::MINOR_MC_MARK), - current_scope(Scope::MINOR_MC_MARK_SEED), current_scope(Scope::MINOR_MC_MARK_ROOTS), - current_scope(Scope::MINOR_MC_MARK_WEAK), + current_scope(Scope::MINOR_MC_MARK_FINISH_INCREMENTAL), + current_scope(Scope::MINOR_MC_MARK_SEED), + current_scope(Scope::MINOR_MC_MARK_CLOSURE_PARALLEL), + current_scope(Scope::MINOR_MC_MARK_CLOSURE), current_scope(Scope::MINOR_MC_MARK_GLOBAL_HANDLES), current_scope(Scope::MINOR_MC_CLEAR), current_scope(Scope::MINOR_MC_CLEAR_STRING_TABLE), - current_scope(Scope::MINOR_MC_CLEAR_WEAK_LISTS), + current_scope(Scope::MINOR_MC_COMPLETE_SWEEP_ARRAY_BUFFERS), current_scope(Scope::MINOR_MC_EVACUATE), + current_scope(Scope::MINOR_MC_EVACUATE_CLEAN_UP), current_scope(Scope::MINOR_MC_EVACUATE_COPY), + current_scope(Scope::MINOR_MC_EVACUATE_PROLOGUE), + current_scope(Scope::MINOR_MC_EVACUATE_EPILOGUE), + current_scope(Scope::MINOR_MC_EVACUATE_REBALANCE), current_scope(Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS), current_scope(Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS_SLOTS), + current_scope(Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS_WEAK), + current_scope(Scope::MINOR_MC_SWEEP), + current_scope(Scope::MINOR_MC_SWEEP_NEW), + current_scope(Scope::MINOR_MC_SWEEP_NEW_LO), + current_scope(Scope::MINOR_MC_FINISH), + current_scope(Scope::MINOR_MC_FINISH_SWEEP_ARRAY_BUFFERS), current_scope(Scope::MINOR_MC_BACKGROUND_MARKING), + current_scope(Scope::MINOR_MC_BACKGROUND_SWEEPING), current_scope(Scope::MINOR_MC_BACKGROUND_EVACUATE_COPY), current_scope(Scope::MINOR_MC_BACKGROUND_EVACUATE_UPDATE_POINTERS), current_scope(Scope::BACKGROUND_UNMAPPER), - current_scope(Scope::UNMAPPER), - current_scope(Scope::MINOR_MC_MARKING_DEQUE), - current_scope(Scope::MINOR_MC_RESET_LIVENESS)); + current_scope(Scope::UNMAPPER), current_.start_object_size, + current_.end_object_size, current_.start_holes_size, + current_.end_holes_size, allocated_since_last_gc, + heap_->promoted_objects_size(), + heap_->new_space_surviving_object_size(), + heap_->nodes_died_in_new_space_, heap_->nodes_copied_in_new_space_, + heap_->nodes_promoted_, heap_->promotion_ratio_, + AverageSurvivalRatio(), heap_->promotion_rate_, + heap_->new_space_surviving_rate_, + NewSpaceAllocationThroughputInBytesPerMillisecond()); break; case Event::MARK_COMPACTOR: case Event::INCREMENTAL_MARK_COMPACTOR: @@ -912,6 +953,8 @@ void GCTracer::PrintNVP() const { "sweep=%.1f " "sweep.code=%.1f " "sweep.map=%.1f " + "sweep.new=%.1f " + "sweep.new_lo=%.1f " "sweep.old=%.1f " "incremental=%.1f " "incremental.finalize=%.1f " @@ -939,14 +982,14 @@ void GCTracer::PrintNVP() const { "holes_size_after=%zu " "allocated=%zu " "promoted=%zu " - "semi_space_copied=%zu " + "new_space_survived=%zu " "nodes_died_in_new=%d " "nodes_copied_in_new=%d " "nodes_promoted=%d " "promotion_ratio=%.1f%% " "average_survival_ratio=%.1f%% " "promotion_rate=%.1f%% " - "semi_space_copy_rate=%.1f%% " + "new_space_survive_rate=%.1f%% " "new_space_allocation_throughput=%.1f " "unmapper_chunks=%d " "compaction_speed=%.f\n", @@ -996,6 +1039,8 @@ void GCTracer::PrintNVP() const { current_scope(Scope::MC_PROLOGUE), current_scope(Scope::MC_SWEEP), current_scope(Scope::MC_SWEEP_CODE), current_scope(Scope::MC_SWEEP_MAP), + current_scope(Scope::MC_SWEEP_NEW), + current_scope(Scope::MC_SWEEP_NEW_LO), current_scope(Scope::MC_SWEEP_OLD), current_scope(Scope::MC_INCREMENTAL), current_scope(Scope::MC_INCREMENTAL_FINALIZE), @@ -1021,11 +1066,11 @@ void GCTracer::PrintNVP() const { current_.end_object_size, current_.start_holes_size, current_.end_holes_size, allocated_since_last_gc, heap_->promoted_objects_size(), - heap_->semi_space_copied_object_size(), + heap_->new_space_surviving_object_size(), heap_->nodes_died_in_new_space_, heap_->nodes_copied_in_new_space_, heap_->nodes_promoted_, heap_->promotion_ratio_, AverageSurvivalRatio(), heap_->promotion_rate_, - heap_->semi_space_copied_rate_, + heap_->new_space_surviving_rate_, NewSpaceAllocationThroughputInBytesPerMillisecond(), heap_->memory_allocator()->unmapper()->NumberOfChunks(), CompactionSpeedInBytesPerMillisecond()); @@ -1320,29 +1365,6 @@ void GCTracer::RecordGCPhasesHistograms(RecordGCPhasesInfo::Mode mode) { heap_->isolate()->counters()->gc_marking_sum()->AddSample( static_cast<int>(overall_marking_time)); - // Filter out samples where - // - we don't have high-resolution timers; - // - size of marked objects is very small; - // - marking time is rounded to 0; - constexpr size_t kMinObjectSizeForReportingThroughput = 1024 * 1024; - if (base::TimeTicks::IsHighResolution() && - heap_->SizeOfObjects() > kMinObjectSizeForReportingThroughput && - overall_marking_time > 0) { - const double overall_v8_marking_time = - overall_marking_time - - current_.scopes[Scope::MC_MARK_EMBEDDER_TRACING]; - if (overall_v8_marking_time > 0) { - const int main_thread_marking_throughput_mb_per_s = - static_cast<int>(static_cast<double>(heap_->SizeOfObjects()) / - overall_v8_marking_time * 1000 / 1024 / 1024); - heap_->isolate() - ->counters() - ->gc_main_thread_marking_throughput() - ->AddSample( - static_cast<int>(main_thread_marking_throughput_mb_per_s)); - } - } - DCHECK_EQ(Scope::LAST_TOP_MC_SCOPE, Scope::MC_SWEEP); } else if (mode == RecordGCPhasesInfo::Mode::Scavenger) { counters->gc_scavenger_scavenge_main()->AddSample( diff --git a/deps/v8/src/heap/gc-tracer.h b/deps/v8/src/heap/gc-tracer.h index 9be60cf7c5af58..586aa86bf1c739 100644 --- a/deps/v8/src/heap/gc-tracer.h +++ b/deps/v8/src/heap/gc-tracer.h @@ -11,7 +11,6 @@ #include "src/base/optional.h" #include "src/base/ring-buffer.h" #include "src/common/globals.h" -#include "src/heap/heap.h" #include "src/init/heap-symbols.h" #include "src/logging/counters.h" #include "testing/gtest/include/gtest/gtest_prod.h" // nogncheck @@ -120,8 +119,8 @@ class V8_EXPORT_PRIVATE GCTracer { MARK_COMPACTOR = 1, INCREMENTAL_MARK_COMPACTOR = 2, MINOR_MARK_COMPACTOR = 3, - START = 4, - INCREMENTAL_MINOR_MARK_COMPACTOR = 5, + INCREMENTAL_MINOR_MARK_COMPACTOR = 4, + START = 5, }; // Returns true if the event corresponds to a young generation GC. @@ -270,11 +269,9 @@ class V8_EXPORT_PRIVATE GCTracer { void NotifyYoungCppGCRunning(); void NotifyYoungCppGCCompleted(); - void NotifyYoungGenerationHandling( - YoungGenerationHandling young_generation_handling); - #ifdef DEBUG V8_INLINE bool IsInObservablePause() const; + V8_INLINE bool IsInAtomicPause() const; // Checks if the current event is consistent with a collector. V8_INLINE bool IsConsistentWithCollector(GarbageCollector collector) const; @@ -402,6 +399,10 @@ class V8_EXPORT_PRIVATE GCTracer { V8_INLINE WorkerThreadRuntimeCallStats* worker_thread_runtime_call_stats(); #endif // defined(V8_RUNTIME_CALL_STATS) + bool IsCurrentGCDueToAllocationFailure() const { + return current_.gc_reason == GarbageCollectionReason::kAllocationFailure; + } + private: FRIEND_TEST(GCTracer, AverageSpeed); FRIEND_TEST(GCTracerTest, AllocationThroughput); diff --git a/deps/v8/src/heap/global-handle-marking-visitor.cc b/deps/v8/src/heap/global-handle-marking-visitor.cc index fc0d669fce9aad..b466051380ec16 100644 --- a/deps/v8/src/heap/global-handle-marking-visitor.cc +++ b/deps/v8/src/heap/global-handle-marking-visitor.cc @@ -4,16 +4,16 @@ #include "src/heap/global-handle-marking-visitor.h" +#include "src/heap/marking-state-inl.h" #include "src/heap/marking-worklist-inl.h" namespace v8 { namespace internal { GlobalHandleMarkingVisitor::GlobalHandleMarkingVisitor( - Heap& heap, MarkingState& marking_state, - MarkingWorklists::Local& local_marking_worklist) + Heap& heap, MarkingWorklists::Local& local_marking_worklist) : heap_(heap), - marking_state_(marking_state), + marking_state_(*heap_.marking_state()), local_marking_worklist_(local_marking_worklist), traced_node_bounds_( heap.isolate()->global_handles()->GetTracedNodeBounds()) {} diff --git a/deps/v8/src/heap/global-handle-marking-visitor.h b/deps/v8/src/heap/global-handle-marking-visitor.h index 1b2fbd9cbb862e..71e805810e9414 100644 --- a/deps/v8/src/heap/global-handle-marking-visitor.h +++ b/deps/v8/src/heap/global-handle-marking-visitor.h @@ -18,7 +18,7 @@ namespace internal { // which requires them to be kept alive. class GlobalHandleMarkingVisitor final : public ::heap::base::StackVisitor { public: - GlobalHandleMarkingVisitor(Heap&, MarkingState&, MarkingWorklists::Local&); + GlobalHandleMarkingVisitor(Heap&, MarkingWorklists::Local&); ~GlobalHandleMarkingVisitor() override = default; void VisitPointer(const void*) override; diff --git a/deps/v8/src/heap/heap-allocator-inl.h b/deps/v8/src/heap/heap-allocator-inl.h index 0abf92fb107219..06783b5ac699ec 100644 --- a/deps/v8/src/heap/heap-allocator-inl.h +++ b/deps/v8/src/heap/heap-allocator-inl.h @@ -225,6 +225,7 @@ V8_WARN_UNUSED_RESULT V8_INLINE HeapObject HeapAllocator::AllocateRawWith( AllocationAlignment alignment) { AllocationResult result; HeapObject object; + size = ALIGN_TO_ALLOCATION_ALIGNMENT(size); if (allocation == AllocationType::kYoung) { result = AllocateRaw<AllocationType::kYoung>(size, origin, alignment); if (result.To(&object)) { diff --git a/deps/v8/src/heap/heap-allocator.cc b/deps/v8/src/heap/heap-allocator.cc index c78098ef289695..8824d45bccd577 100644 --- a/deps/v8/src/heap/heap-allocator.cc +++ b/deps/v8/src/heap/heap-allocator.cc @@ -27,11 +27,11 @@ void HeapAllocator::Setup() { ? static_cast<PagedSpace*>(spaces_[MAP_SPACE]) : static_cast<PagedSpace*>(spaces_[OLD_SPACE]); - shared_old_allocator_ = heap_->shared_old_allocator_.get(); + shared_old_allocator_ = heap_->shared_space_allocator_.get(); shared_map_allocator_ = heap_->shared_map_allocator_ ? heap_->shared_map_allocator_.get() : shared_old_allocator_; - shared_lo_space_ = heap_->shared_lo_space(); + shared_lo_space_ = heap_->shared_lo_allocation_space(); } void HeapAllocator::SetReadOnlySpace(ReadOnlySpace* read_only_space) { @@ -90,7 +90,8 @@ AllocationResult HeapAllocator::AllocateRawWithLightRetrySlowPath( // Two GCs before returning failure. for (int i = 0; i < 2; i++) { if (IsSharedAllocationType(allocation)) { - heap_->CollectSharedGarbage(GarbageCollectionReason::kAllocationFailure); + heap_->CollectGarbageShared(heap_->main_thread_local_heap(), + GarbageCollectionReason::kAllocationFailure); } else { AllocationSpace space_to_gc = AllocationTypeToGCSpace(allocation); if (v8_flags.minor_mc && i > 0) { @@ -117,12 +118,13 @@ AllocationResult HeapAllocator::AllocateRawWithRetryOrFailSlowPath( if (!result.IsFailure()) return result; if (IsSharedAllocationType(allocation)) { - heap_->CollectSharedGarbage(GarbageCollectionReason::kLastResort); + heap_->CollectGarbageShared(heap_->main_thread_local_heap(), + GarbageCollectionReason::kLastResort); // We need always_allocate() to be true both on the client- and // server-isolate. It is used in both code paths. AlwaysAllocateScope shared_scope( - heap_->isolate()->shared_isolate()->heap()); + heap_->isolate()->shared_heap_isolate()->heap()); AlwaysAllocateScope client_scope(heap_); result = AllocateRaw(size, allocation, origin, alignment); } else { diff --git a/deps/v8/src/heap/heap-inl.h b/deps/v8/src/heap/heap-inl.h index 6991a6dca52b49..64c075f269588d 100644 --- a/deps/v8/src/heap/heap-inl.h +++ b/deps/v8/src/heap/heap-inl.h @@ -14,7 +14,6 @@ #include "src/base/atomicops.h" #include "src/base/platform/mutex.h" #include "src/base/platform/platform.h" -#include "src/base/sanitizer/msan.h" #include "src/common/assert-scope.h" #include "src/common/code-memory-access-inl.h" #include "src/execution/isolate-data.h" @@ -26,6 +25,7 @@ #include "src/heap/heap-write-barrier.h" #include "src/heap/heap.h" #include "src/heap/large-spaces.h" +#include "src/heap/marking-state-inl.h" #include "src/heap/memory-allocator.h" #include "src/heap/memory-chunk-layout.h" #include "src/heap/memory-chunk.h" @@ -99,16 +99,15 @@ base::EnumSet<CodeFlushMode> Heap::GetCodeFlushMode(Isolate* isolate) { Isolate* Heap::isolate() const { return Isolate::FromHeap(this); } -#ifdef DEBUG bool Heap::IsMainThread() const { return isolate()->thread_id() == ThreadId::Current(); } bool Heap::IsSharedMainThread() const { - Isolate* shared_isolate = isolate()->shared_isolate(); - return shared_isolate && shared_isolate->thread_id() == ThreadId::Current(); + if (!isolate()->has_shared_heap()) return false; + Isolate* shared_heap_isolate = isolate()->shared_heap_isolate(); + return shared_heap_isolate->thread_id() == ThreadId::Current(); } -#endif int64_t Heap::external_memory() { return external_memory_.total(); } @@ -123,7 +122,7 @@ PagedSpace* Heap::space_for_maps() { ConcurrentAllocator* Heap::concurrent_allocator_for_maps() { return V8_LIKELY(shared_map_allocator_) ? shared_map_allocator_.get() - : shared_old_allocator_.get(); + : shared_space_allocator_.get(); } RootsTable& Heap::roots_table() { return isolate()->roots_table(); } @@ -171,11 +170,12 @@ void Heap::SetPendingOptimizeForTestBytecode(Object hash_table) { } PagedSpace* Heap::paged_space(int idx) { - DCHECK(idx == OLD_SPACE || idx == CODE_SPACE || idx == MAP_SPACE); - return static_cast<PagedSpace*>(space_[idx]); + DCHECK(idx == OLD_SPACE || idx == CODE_SPACE || idx == MAP_SPACE || + idx == SHARED_SPACE); + return static_cast<PagedSpace*>(space_[idx].get()); } -Space* Heap::space(int idx) { return space_[idx]; } +Space* Heap::space(int idx) { return space_[idx].get(); } Address* Heap::NewSpaceAllocationTopAddress() { return new_space_ ? new_space_->allocation_top_address() : nullptr; @@ -353,93 +353,6 @@ void Heap::CopyBlock(Address dst, Address src, int byte_size) { CopyTagged(dst, src, static_cast<size_t>(byte_size / kTaggedSize)); } -template <Heap::FindMementoMode mode> -AllocationMemento Heap::FindAllocationMemento(Map map, HeapObject object) { - Address object_address = object.address(); - Address memento_address = object_address + object.SizeFromMap(map); - Address last_memento_word_address = memento_address + kTaggedSize; - // If the memento would be on another page, bail out immediately. - if (!Page::OnSamePage(object_address, last_memento_word_address)) { - return AllocationMemento(); - } - HeapObject candidate = HeapObject::FromAddress(memento_address); - ObjectSlot candidate_map_slot = candidate.map_slot(); - // This fast check may peek at an uninitialized word. However, the slow check - // below (memento_address == top) ensures that this is safe. Mark the word as - // initialized to silence MemorySanitizer warnings. - MSAN_MEMORY_IS_INITIALIZED(candidate_map_slot.address(), kTaggedSize); - if (!candidate_map_slot.contains_map_value( - ReadOnlyRoots(this).allocation_memento_map().ptr())) { - return AllocationMemento(); - } - - // Bail out if the memento is below the age mark, which can happen when - // mementos survived because a page got moved within new space. - Page* object_page = Page::FromAddress(object_address); - if (object_page->IsFlagSet(Page::NEW_SPACE_BELOW_AGE_MARK)) { - Address age_mark = - reinterpret_cast<SemiSpace*>(object_page->owner())->age_mark(); - if (!object_page->Contains(age_mark)) { - return AllocationMemento(); - } - // Do an exact check in the case where the age mark is on the same page. - if (object_address < age_mark) { - return AllocationMemento(); - } - } - - AllocationMemento memento_candidate = AllocationMemento::cast(candidate); - - // Depending on what the memento is used for, we might need to perform - // additional checks. - Address top; - switch (mode) { - case Heap::kForGC: - return memento_candidate; - case Heap::kForRuntime: - if (memento_candidate.is_null()) return AllocationMemento(); - // Either the object is the last object in the new space, or there is - // another object of at least word size (the header map word) following - // it, so suffices to compare ptr and top here. - top = NewSpaceTop(); - DCHECK(memento_address >= new_space()->limit() || - memento_address + AllocationMemento::kSize <= top); - if ((memento_address != top) && memento_candidate.IsValid()) { - return memento_candidate; - } - return AllocationMemento(); - default: - UNREACHABLE(); - } - UNREACHABLE(); -} - -void Heap::UpdateAllocationSite(Map map, HeapObject object, - PretenuringFeedbackMap* pretenuring_feedback) { - DCHECK_NE(pretenuring_feedback, &global_pretenuring_feedback_); -#ifdef DEBUG - BasicMemoryChunk* chunk = BasicMemoryChunk::FromHeapObject(object); - DCHECK_IMPLIES(chunk->IsToPage(), - v8_flags.minor_mc || - chunk->IsFlagSet(MemoryChunk::PAGE_NEW_NEW_PROMOTION)); - DCHECK_IMPLIES(!chunk->InYoungGeneration(), - chunk->IsFlagSet(MemoryChunk::PAGE_NEW_OLD_PROMOTION)); -#endif - if (!v8_flags.allocation_site_pretenuring || - !AllocationSite::CanTrack(map.instance_type())) { - return; - } - AllocationMemento memento_candidate = - FindAllocationMemento<kForGC>(map, object); - if (memento_candidate.is_null()) return; - - // Entering cached feedback is used in the parallel case. We are not allowed - // to dereference the allocation site and rather have to postpone all checks - // till actually merging the data. - Address key = memento_candidate.GetAllocationSiteUnchecked(); - (*pretenuring_feedback)[AllocationSite::unchecked_cast(Object(key))]++; -} - bool Heap::IsPendingAllocationInternal(HeapObject object) { DCHECK(deserialization_complete()); @@ -485,6 +398,8 @@ bool Heap::IsPendingAllocationInternal(HeapObject object) { return addr == large_space->pending_object(); } + case SHARED_SPACE: + case SHARED_LO_SPACE: case RO_SPACE: UNREACHABLE(); } diff --git a/deps/v8/src/heap/heap-verifier.cc b/deps/v8/src/heap/heap-verifier.cc index 28061588c4a558..2c4e3fa8708d63 100644 --- a/deps/v8/src/heap/heap-verifier.cc +++ b/deps/v8/src/heap/heap-verifier.cc @@ -200,7 +200,6 @@ class SlotVerifyingVisitor : public ObjectVisitorWithCageBases { if (ShouldHaveBeenRecorded(host, MaybeObject::FromObject(target))) { CHECK(InTypedSet(SlotType::kEmbeddedObjectFull, rinfo->pc()) || InTypedSet(SlotType::kEmbeddedObjectCompressed, rinfo->pc()) || - InTypedSet(SlotType::kEmbeddedObjectData, rinfo->pc()) || (rinfo->IsInConstantPool() && InTypedSet(SlotType::kConstPoolEmbeddedObjectCompressed, rinfo->constant_pool_entry_address())) || diff --git a/deps/v8/src/heap/heap-write-barrier-inl.h b/deps/v8/src/heap/heap-write-barrier-inl.h index 5423eaaadbdfe6..e56924cb9ce790 100644 --- a/deps/v8/src/heap/heap-write-barrier-inl.h +++ b/deps/v8/src/heap/heap-write-barrier-inl.h @@ -115,7 +115,7 @@ inline void CombinedWriteBarrierInternal(HeapObject host, HeapObjectSlot slot, } // Marking barrier: mark value & record slots when marking is on. - if (is_marking) { + if (V8_UNLIKELY(is_marking)) { #ifdef V8_EXTERNAL_CODE_SPACE // CodePageHeaderModificationScope is not required because the only case // when a Code value is stored somewhere is during creation of a new Code @@ -259,7 +259,7 @@ base::Optional<Heap*> WriteBarrier::GetHeapIfMarking(HeapObject object) { if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return {}; heap_internals::MemoryChunk* chunk = heap_internals::MemoryChunk::FromHeapObject(object); - if (!chunk->IsMarking()) return {}; + if (V8_LIKELY(!chunk->IsMarking())) return {}; return chunk->GetHeap(); } diff --git a/deps/v8/src/heap/heap.cc b/deps/v8/src/heap/heap.cc index d966d979c87c99..c607af988019ff 100644 --- a/deps/v8/src/heap/heap.cc +++ b/deps/v8/src/heap/heap.cc @@ -16,6 +16,7 @@ #include "src/base/bits.h" #include "src/base/flags.h" #include "src/base/logging.h" +#include "src/base/macros.h" #include "src/base/once.h" #include "src/base/platform/memory.h" #include "src/base/platform/mutex.h" @@ -33,6 +34,7 @@ #include "src/execution/microtask-queue.h" #include "src/execution/v8threads.h" #include "src/execution/vm-state-inl.h" +#include "src/flags/flags.h" #include "src/handles/global-handles-inl.h" #include "src/heap/array-buffer-sweeper.h" #include "src/heap/base/stack.h" @@ -46,6 +48,7 @@ #include "src/heap/concurrent-marking.h" #include "src/heap/cppgc-js/cpp-heap.h" #include "src/heap/embedder-tracing.h" +#include "src/heap/evacuation-verifier-inl.h" #include "src/heap/finalization-registry-cleanup-task.h" #include "src/heap/gc-idle-time-handler.h" #include "src/heap/gc-tracer-inl.h" @@ -62,6 +65,8 @@ #include "src/heap/mark-compact.h" #include "src/heap/marking-barrier-inl.h" #include "src/heap/marking-barrier.h" +#include "src/heap/marking-state-inl.h" +#include "src/heap/marking-state.h" #include "src/heap/memory-chunk-inl.h" #include "src/heap/memory-chunk-layout.h" #include "src/heap/memory-measurement.h" @@ -72,6 +77,7 @@ #include "src/heap/objects-visiting.h" #include "src/heap/paged-spaces-inl.h" #include "src/heap/parked-scope.h" +#include "src/heap/pretenuring-handler.h" #include "src/heap/read-only-heap.h" #include "src/heap/remembered-set.h" #include "src/heap/safepoint.h" @@ -199,7 +205,7 @@ class MinorMCTaskObserver final : public AllocationObserver { : AllocationObserver(step_size), heap_(heap) {} void Step(int bytes_allocated, Address, size_t) override { - if (v8_flags.concurrent_minor_mc) { + if (v8_flags.concurrent_minor_mc_marking) { if (heap_->incremental_marking()->IsMinorMarking()) { heap_->concurrent_marking()->RescheduleJobIfNeeded( GarbageCollector::MINOR_MARK_COMPACTOR); @@ -217,13 +223,16 @@ Heap::Heap() : isolate_(isolate()), heap_allocator_(this), memory_pressure_level_(MemoryPressureLevel::kNone), - global_pretenuring_feedback_(kInitialFeedbackCapacity), safepoint_(std::make_unique<IsolateSafepoint>(this)), external_string_table_(this), allocation_type_for_in_place_internalizable_strings_( isolate()->OwnsStringTables() ? AllocationType::kOld : AllocationType::kSharedOld), - collection_barrier_(new CollectionBarrier(this)) { + collection_barrier_(new CollectionBarrier(this)), + marking_state_(isolate_), + non_atomic_marking_state_(isolate_), + atomic_marking_state_(isolate_), + pretenuring_handler_(this) { // Ensure old_generation_size_ is a multiple of kPageSize. DCHECK_EQ(0, max_old_generation_size() & (Page::kPageSize - 1)); @@ -1066,8 +1075,8 @@ void Heap::GarbageCollectionPrologue( // Reset GC statistics. promoted_objects_size_ = 0; - previous_semi_space_copied_object_size_ = semi_space_copied_object_size_; - semi_space_copied_object_size_ = 0; + previous_new_space_surviving_object_size_ = new_space_surviving_object_size_; + new_space_surviving_object_size_ = 0; nodes_died_in_new_space_ = 0; nodes_copied_in_new_space_ = 0; nodes_promoted_ = 0; @@ -1082,9 +1091,9 @@ void Heap::GarbageCollectionPrologue( #endif // DEBUG if (new_space_ && new_space_->IsAtMaximumCapacity()) { - maximum_size_scavenges_++; + maximum_size_minor_gcs_++; } else { - maximum_size_scavenges_ = 0; + maximum_size_minor_gcs_ = 0; } memory_allocator()->unmapper()->PrepareForGC(); } @@ -1126,31 +1135,6 @@ size_t Heap::UsedGlobalHandlesSize() { return isolate_->global_handles()->UsedSize(); } -void Heap::MergeAllocationSitePretenuringFeedback( - const PretenuringFeedbackMap& local_pretenuring_feedback) { - PtrComprCageBase cage_base(isolate()); - AllocationSite site; - for (auto& site_and_count : local_pretenuring_feedback) { - site = site_and_count.first; - MapWord map_word = site.map_word(cage_base, kRelaxedLoad); - if (map_word.IsForwardingAddress()) { - site = AllocationSite::cast(map_word.ToForwardingAddress()); - } - - // We have not validated the allocation site yet, since we have not - // dereferenced the site during collecting information. - // This is an inlined check of AllocationMemento::IsValid. - if (!site.IsAllocationSite() || site.IsZombie()) continue; - - const int value = static_cast<int>(site_and_count.second); - DCHECK_LT(0, value); - if (site.IncrementMementoFoundCount(value)) { - // For sites in the global map the count is accessed through the site. - global_pretenuring_feedback_.insert(std::make_pair(site, 0)); - } - } -} - void Heap::AddAllocationObserversToAllSpaces( AllocationObserver* observer, AllocationObserver* new_space_observer) { DCHECK(observer && new_space_observer); @@ -1192,197 +1176,6 @@ void Heap::PublishPendingAllocations() { code_lo_space_->ResetPendingObject(); } -namespace { -inline bool MakePretenureDecision( - AllocationSite site, AllocationSite::PretenureDecision current_decision, - double ratio, bool maximum_size_scavenge) { - // Here we just allow state transitions from undecided or maybe tenure - // to don't tenure, maybe tenure, or tenure. - if ((current_decision == AllocationSite::kUndecided || - current_decision == AllocationSite::kMaybeTenure)) { - if (ratio >= AllocationSite::kPretenureRatio) { - // We just transition into tenure state when the semi-space was at - // maximum capacity. - if (maximum_size_scavenge) { - site.set_deopt_dependent_code(true); - site.set_pretenure_decision(AllocationSite::kTenure); - // Currently we just need to deopt when we make a state transition to - // tenure. - return true; - } - site.set_pretenure_decision(AllocationSite::kMaybeTenure); - } else { - site.set_pretenure_decision(AllocationSite::kDontTenure); - } - } - return false; -} - -// Clear feedback calculation fields until the next gc. -inline void ResetPretenuringFeedback(AllocationSite site) { - site.set_memento_found_count(0); - site.set_memento_create_count(0); -} - -inline bool DigestPretenuringFeedback(Isolate* isolate, AllocationSite site, - bool maximum_size_scavenge) { - bool deopt = false; - int create_count = site.memento_create_count(); - int found_count = site.memento_found_count(); - bool minimum_mementos_created = - create_count >= AllocationSite::kPretenureMinimumCreated; - double ratio = - minimum_mementos_created || v8_flags.trace_pretenuring_statistics - ? static_cast<double>(found_count) / create_count - : 0.0; - AllocationSite::PretenureDecision current_decision = - site.pretenure_decision(); - - if (minimum_mementos_created) { - deopt = MakePretenureDecision(site, current_decision, ratio, - maximum_size_scavenge); - } - - if (v8_flags.trace_pretenuring_statistics) { - PrintIsolate(isolate, - "pretenuring: AllocationSite(%p): (created, found, ratio) " - "(%d, %d, %f) %s => %s\n", - reinterpret_cast<void*>(site.ptr()), create_count, found_count, - ratio, site.PretenureDecisionName(current_decision), - site.PretenureDecisionName(site.pretenure_decision())); - } - - ResetPretenuringFeedback(site); - return deopt; -} - -bool PretenureAllocationSiteManually(Isolate* isolate, AllocationSite site) { - AllocationSite::PretenureDecision current_decision = - site.pretenure_decision(); - bool deopt = true; - if (current_decision == AllocationSite::kUndecided || - current_decision == AllocationSite::kMaybeTenure) { - site.set_deopt_dependent_code(true); - site.set_pretenure_decision(AllocationSite::kTenure); - } else { - deopt = false; - } - if (v8_flags.trace_pretenuring_statistics) { - PrintIsolate(isolate, - "pretenuring manually requested: AllocationSite(%p): " - "%s => %s\n", - reinterpret_cast<void*>(site.ptr()), - site.PretenureDecisionName(current_decision), - site.PretenureDecisionName(site.pretenure_decision())); - } - - ResetPretenuringFeedback(site); - return deopt; -} - -} // namespace - -void Heap::RemoveAllocationSitePretenuringFeedback(AllocationSite site) { - global_pretenuring_feedback_.erase(site); -} - -bool Heap::DeoptMaybeTenuredAllocationSites() { - return new_space_ && new_space_->IsAtMaximumCapacity() && - maximum_size_scavenges_ == 0; -} - -void Heap::ProcessPretenuringFeedback() { - bool trigger_deoptimization = false; - if (v8_flags.allocation_site_pretenuring) { - int tenure_decisions = 0; - int dont_tenure_decisions = 0; - int allocation_mementos_found = 0; - int allocation_sites = 0; - int active_allocation_sites = 0; - - AllocationSite site; - - // Step 1: Digest feedback for recorded allocation sites. - bool maximum_size_scavenge = MaximumSizeScavenge(); - for (auto& site_and_count : global_pretenuring_feedback_) { - allocation_sites++; - site = site_and_count.first; - // Count is always access through the site. - DCHECK_EQ(0, site_and_count.second); - int found_count = site.memento_found_count(); - // An entry in the storage does not imply that the count is > 0 because - // allocation sites might have been reset due to too many objects dying - // in old space. - if (found_count > 0) { - DCHECK(site.IsAllocationSite()); - active_allocation_sites++; - allocation_mementos_found += found_count; - if (DigestPretenuringFeedback(isolate_, site, maximum_size_scavenge)) { - trigger_deoptimization = true; - } - if (site.GetAllocationType() == AllocationType::kOld) { - tenure_decisions++; - } else { - dont_tenure_decisions++; - } - } - } - - // Step 2: Pretenure allocation sites for manual requests. - if (allocation_sites_to_pretenure_) { - while (!allocation_sites_to_pretenure_->empty()) { - auto pretenure_site = allocation_sites_to_pretenure_->Pop(); - if (PretenureAllocationSiteManually(isolate_, pretenure_site)) { - trigger_deoptimization = true; - } - } - allocation_sites_to_pretenure_.reset(); - } - - // Step 3: Deopt maybe tenured allocation sites if necessary. - bool deopt_maybe_tenured = DeoptMaybeTenuredAllocationSites(); - if (deopt_maybe_tenured) { - ForeachAllocationSite( - allocation_sites_list(), - [&allocation_sites, &trigger_deoptimization](AllocationSite site) { - DCHECK(site.IsAllocationSite()); - allocation_sites++; - if (site.IsMaybeTenure()) { - site.set_deopt_dependent_code(true); - trigger_deoptimization = true; - } - }); - } - - if (trigger_deoptimization) { - isolate_->stack_guard()->RequestDeoptMarkedAllocationSites(); - } - - if (v8_flags.trace_pretenuring_statistics && - (allocation_mementos_found > 0 || tenure_decisions > 0 || - dont_tenure_decisions > 0)) { - PrintIsolate(isolate(), - "pretenuring: deopt_maybe_tenured=%d visited_sites=%d " - "active_sites=%d " - "mementos=%d tenured=%d not_tenured=%d\n", - deopt_maybe_tenured ? 1 : 0, allocation_sites, - active_allocation_sites, allocation_mementos_found, - tenure_decisions, dont_tenure_decisions); - } - - global_pretenuring_feedback_.clear(); - global_pretenuring_feedback_.reserve(kInitialFeedbackCapacity); - } -} - -void Heap::PretenureAllocationSiteOnNextCollection(AllocationSite site) { - if (!allocation_sites_to_pretenure_) { - allocation_sites_to_pretenure_.reset( - new GlobalHandleVector<AllocationSite>(this)); - } - allocation_sites_to_pretenure_->Push(site); -} - void Heap::InvalidateCodeDeoptimizationData(Code code) { CodePageMemoryModificationScope modification_scope(code); code.set_deoptimization_data(ReadOnlyRoots(this).empty_fixed_array()); @@ -1485,6 +1278,10 @@ void Heap::GarbageCollectionEpilogueInSafepoint(GarbageCollector collector) { TRACE_GC(tracer(), GCTracer::Scope::HEAP_EPILOGUE_REDUCE_NEW_SPACE); ReduceNewSpaceSize(); + if (!v8_flags.minor_mc) { + SemiSpaceNewSpace::From(new_space())->MakeAllPagesInFromSpaceIterable(); + } + #ifdef V8_ENABLE_INNER_POINTER_RESOLUTION_OSB new_space()->ClearUnusedObjectStartBitmaps(); #endif // V8_ENABLE_INNER_POINTER_RESOLUTION_OSB @@ -1587,7 +1384,7 @@ size_t Heap::MinorMCTaskTriggerSize() const { } void Heap::StartMinorMCIncrementalMarkingIfNeeded() { - if (v8_flags.concurrent_minor_mc && !IsTearingDown() && + if (v8_flags.concurrent_minor_mc_marking && !IsTearingDown() && !incremental_marking()->IsMarking() && incremental_marking()->CanBeStarted() && V8_LIKELY(!v8_flags.gc_global) && (new_space()->Size() >= MinorMCTaskTriggerSize())) { @@ -2035,16 +1832,26 @@ void Heap::StartIncrementalMarking(int gc_flags, GarbageCollector collector) { DCHECK(incremental_marking()->IsStopped()); - // Sweeping needs to be completed such that markbits are all cleared before - // starting marking again. - CompleteSweepingFull(); + if (IsYoungGenerationCollector(collector)) { + CompleteSweepingYoung(collector); + } else { + // Sweeping needs to be completed such that markbits are all cleared before + // starting marking again. + CompleteSweepingFull(); + } + base::Optional<GlobalSafepointScope> global_safepoint_scope; base::Optional<SafepointScope> safepoint_scope; { AllowGarbageCollection allow_shared_gc; IgnoreLocalGCRequests ignore_gc_requests(this); - safepoint_scope.emplace(this); + + if (isolate()->is_shared_heap_isolate()) { + global_safepoint_scope.emplace(isolate()); + } else { + safepoint_scope.emplace(this); + } } #ifdef DEBUG @@ -2062,11 +1869,13 @@ void Heap::StartIncrementalMarking(int gc_flags, } void Heap::CompleteSweepingFull() { - array_buffer_sweeper()->EnsureFinished(); - mark_compact_collector()->EnsureSweepingCompleted( - MarkCompactCollector::SweepingForcedFinalizationMode::kUnifiedHeap); + { + TRACE_GC(tracer(), GCTracer::Scope::MC_COMPLETE_SWEEP_ARRAY_BUFFERS); + array_buffer_sweeper()->EnsureFinished(); + } + EnsureSweepingCompleted(SweepingForcedFinalizationMode::kUnifiedHeap); - DCHECK(!mark_compact_collector()->sweeping_in_progress()); + DCHECK(!sweeping_in_progress()); DCHECK_IMPLIES(cpp_heap(), !CppHeap::From(cpp_heap())->sweeper().IsSweepingInProgress()); DCHECK(!tracer()->IsSweepingInProgress()); @@ -2078,7 +1887,7 @@ void Heap::StartIncrementalMarkingIfAllocationLimitIsReached( // Do not start incremental marking while invoking GC callbacks. // Heap::CollectGarbage already decided which GC is going to be invoked. In // case it chose a young-gen GC, starting an incremental full GC during - // callbacks would break the seperate GC phases guarantee. + // callbacks would break the separate GC phases guarantee. return; } if (incremental_marking()->IsStopped()) { @@ -2230,12 +2039,26 @@ void Heap::CheckCollectionRequested() { #if V8_ENABLE_WEBASSEMBLY void Heap::EnsureWasmCanonicalRttsSize(int length) { + HandleScope scope(isolate()); + Handle<WeakArrayList> current_rtts = handle(wasm_canonical_rtts(), isolate_); if (length <= current_rtts->length()) return; - Handle<WeakArrayList> result = WeakArrayList::EnsureSpace( + Handle<WeakArrayList> new_rtts = WeakArrayList::EnsureSpace( isolate(), current_rtts, length, AllocationType::kOld); - result->set_length(length); - set_wasm_canonical_rtts(*result); + new_rtts->set_length(length); + set_wasm_canonical_rtts(*new_rtts); + + // Wrappers are indexed by canonical rtt length, and an additional boolean + // storing whether the corresponding function is imported or not. + int required_wrapper_length = 2 * length; + Handle<WeakArrayList> current_wrappers = + handle(js_to_wasm_wrappers(), isolate_); + if (required_wrapper_length <= current_wrappers->length()) return; + Handle<WeakArrayList> new_wrappers = + WeakArrayList::EnsureSpace(isolate(), current_wrappers, + required_wrapper_length, AllocationType::kOld); + new_wrappers->set_length(required_wrapper_length); + set_js_to_wasm_wrappers(*new_wrappers); } #endif @@ -2245,19 +2068,19 @@ void Heap::UpdateSurvivalStatistics(int start_new_space_size) { promotion_ratio_ = (static_cast<double>(promoted_objects_size_) / static_cast<double>(start_new_space_size) * 100); - if (previous_semi_space_copied_object_size_ > 0) { + if (previous_new_space_surviving_object_size_ > 0) { promotion_rate_ = (static_cast<double>(promoted_objects_size_) / - static_cast<double>(previous_semi_space_copied_object_size_) * 100); + static_cast<double>(previous_new_space_surviving_object_size_) * 100); } else { promotion_rate_ = 0; } - semi_space_copied_rate_ = - (static_cast<double>(semi_space_copied_object_size_) / + new_space_surviving_rate_ = + (static_cast<double>(new_space_surviving_object_size_) / static_cast<double>(start_new_space_size) * 100); - double survival_rate = promotion_ratio_ + semi_space_copied_rate_; + double survival_rate = promotion_ratio_ + new_space_surviving_rate_; tracer()->AddSurvivalRatio(survival_rate); } @@ -2319,18 +2142,31 @@ size_t Heap::PerformGarbageCollection( DCHECK(tracer()->IsConsistentWithCollector(collector)); TRACE_GC_EPOCH(tracer(), CollectorScopeId(collector), ThreadKind::kMain); - base::Optional<SafepointScope> safepoint_scope; + base::Optional<GlobalSafepointScope> global_safepoint_scope; + base::Optional<SafepointScope> isolate_safepoint_scope; { AllowGarbageCollection allow_shared_gc; IgnoreLocalGCRequests ignore_gc_requests(this); - safepoint_scope.emplace(this); + + if (isolate()->is_shared_heap_isolate()) { + global_safepoint_scope.emplace(isolate()); + } else { + isolate_safepoint_scope.emplace(this); + } } collection_barrier_->StopTimeToCollectionTimer(); HeapVerifier::VerifyHeapIfEnabled(this); + if (isolate()->is_shared_heap_isolate()) { + isolate()->global_safepoint()->IterateClientIsolates([](Isolate* client) { + if (client->is_shared_heap_isolate()) return; + HeapVerifier::VerifyHeapIfEnabled(client->heap()); + }); + } + tracer()->StartInSafepoint(); GarbageCollectionPrologueInSafepoint(); @@ -2349,7 +2185,7 @@ size_t Heap::PerformGarbageCollection( Scavenge(); } - ProcessPretenuringFeedback(); + pretenuring_handler_.ProcessPretenuringFeedback(); UpdateSurvivalStatistics(static_cast<int>(start_young_generation_size)); ConfigureInitialOldGenerationSize(); @@ -2395,8 +2231,9 @@ size_t Heap::PerformGarbageCollection( if (cpp_heap() && IsYoungGenerationCollector(collector)) { const bool with_stack = (gc_reason != GarbageCollectionReason::kTask); CppHeap::From(cpp_heap()) - ->RunMinorGC(with_stack ? CppHeap::StackState::kMayContainHeapPointers - : CppHeap::StackState::kNoHeapPointers); + ->RunMinorGCIfNeeded(with_stack + ? CppHeap::StackState::kMayContainHeapPointers + : CppHeap::StackState::kNoHeapPointers); } #endif // defined(CPPGC_YOUNG_GENERATION) @@ -2408,16 +2245,59 @@ size_t Heap::PerformGarbageCollection( HeapVerifier::VerifyHeapIfEnabled(this); + if (isolate()->is_shared_heap_isolate()) { + isolate()->global_safepoint()->IterateClientIsolates([](Isolate* client) { + if (client->is_shared_heap_isolate()) return; + HeapVerifier::VerifyHeapIfEnabled(client->heap()); + }); + } + return freed_global_handles; } -void Heap::CollectSharedGarbage(GarbageCollectionReason gc_reason) { +bool Heap::CollectGarbageShared(LocalHeap* local_heap, + GarbageCollectionReason gc_reason) { CHECK(deserialization_complete()); - DCHECK(!IsShared()); - DCHECK_NOT_NULL(isolate()->shared_isolate()); + DCHECK(isolate()->has_shared_heap()); + + if (v8_flags.shared_space) { + Isolate* shared_space_isolate = isolate()->shared_space_isolate(); + return shared_space_isolate->heap()->CollectGarbageFromAnyThread(local_heap, + gc_reason); + + } else { + DCHECK(!IsShared()); + DCHECK_NOT_NULL(isolate()->shared_isolate()); + + isolate()->shared_isolate()->heap()->PerformSharedGarbageCollection( + isolate(), gc_reason); + return true; + } +} + +bool Heap::CollectGarbageFromAnyThread(LocalHeap* local_heap, + GarbageCollectionReason gc_reason) { + DCHECK(local_heap->IsRunning()); + + if (isolate() == local_heap->heap()->isolate() && + local_heap->is_main_thread()) { + CollectAllGarbage(current_gc_flags_, gc_reason, current_gc_callback_flags_); + return true; + } else { + if (!collection_barrier_->TryRequestGC()) return false; + + const LocalHeap::ThreadState old_state = + main_thread_local_heap()->state_.SetCollectionRequested(); - isolate()->shared_isolate()->heap()->PerformSharedGarbageCollection( - isolate(), gc_reason); + if (old_state.IsRunning()) { + const bool performed_gc = + collection_barrier_->AwaitCollectionBackground(local_heap); + return performed_gc; + } else { + DCHECK(old_state.IsParked()); + return false; + } + } } void Heap::PerformSharedGarbageCollection(Isolate* initiator, @@ -2486,19 +2366,29 @@ void Heap::CompleteSweepingYoung(GarbageCollector collector) { array_buffer_sweeper()->EnsureFinished(); } - // If sweeping is in progress and there are no sweeper tasks running, finish - // the sweeping here, to avoid having to pause and resume during the young - // generation GC. - mark_compact_collector()->FinishSweepingIfOutOfWork(); + if (v8_flags.minor_mc) { + DCHECK(v8_flags.separate_gc_phases); + // Do not interleave sweeping. + EnsureSweepingCompleted(SweepingForcedFinalizationMode::kV8Only); + } else { + // If sweeping is in progress and there are no sweeper tasks running, finish + // the sweeping here, to avoid having to pause and resume during the young + // generation GC. + FinishSweepingIfOutOfWork(); + } #if defined(CPPGC_YOUNG_GENERATION) // Always complete sweeping if young generation is enabled. - if (cpp_heap()) CppHeap::From(cpp_heap())->FinishSweepingIfRunning(); + if (cpp_heap()) { + if (auto* iheap = CppHeap::From(cpp_heap()); + iheap->generational_gc_supported()) + iheap->FinishSweepingIfRunning(); + } #endif // defined(CPPGC_YOUNG_GENERATION) } -void Heap::EnsureSweepingCompleted(HeapObject object) { - if (!mark_compact_collector()->sweeping_in_progress()) return; +void Heap::EnsureSweepingCompletedForObject(HeapObject object) { + if (!sweeping_in_progress()) return; BasicMemoryChunk* basic_chunk = BasicMemoryChunk::FromHeapObject(object); if (basic_chunk->InReadOnlySpace()) return; @@ -2510,7 +2400,7 @@ void Heap::EnsureSweepingCompleted(HeapObject object) { DCHECK(!chunk->IsLargePage()); Page* page = Page::cast(chunk); - mark_compact_collector()->EnsurePageIsSwept(page); + sweeper()->EnsurePageIsSwept(page); } void Heap::RecomputeLimits(GarbageCollector collector) { @@ -2700,9 +2590,6 @@ void Heap::Scavenge() { "[IncrementalMarking] Scavenge during marking.\n"); } - tracer()->NotifyYoungGenerationHandling( - YoungGenerationHandling::kRegularScavenge); - TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE); base::MutexGuard guard(relocation_mutex()); // Young generation garbage collection is orthogonal from full GC marking. It @@ -3043,7 +2930,8 @@ void Heap::ResetAllAllocationSitesDependentCode(AllocationType allocation) { site.ResetPretenureDecision(); site.set_deopt_dependent_code(true); marked = true; - RemoveAllocationSitePretenuringFeedback(site); + pretenuring_handler_ + .RemoveAllocationSitePretenuringFeedback(site); return; } }); @@ -3114,6 +3002,7 @@ static_assert(!USE_ALLOCATION_ALIGNMENT_BOOL || (HeapNumber::kValueOffset & kDoubleAlignmentMask) == kTaggedSize); int Heap::GetMaximumFillToAlign(AllocationAlignment alignment) { + if (V8_COMPRESS_POINTERS_8GB_BOOL) return 0; switch (alignment) { case kTaggedAligned: return 0; @@ -3127,10 +3016,12 @@ int Heap::GetMaximumFillToAlign(AllocationAlignment alignment) { // static int Heap::GetFillToAlign(Address address, AllocationAlignment alignment) { + if (V8_COMPRESS_POINTERS_8GB_BOOL) return 0; if (alignment == kDoubleAligned && (address & kDoubleAlignmentMask) != 0) return kTaggedSize; - if (alignment == kDoubleUnaligned && (address & kDoubleAlignmentMask) == 0) + if (alignment == kDoubleUnaligned && (address & kDoubleAlignmentMask) == 0) { return kDoubleSize - kTaggedSize; // No fill if double is always aligned. + } return 0; } @@ -3229,6 +3120,12 @@ namespace { void CreateFillerObjectAtImpl(Heap* heap, Address addr, int size, ClearFreedMemoryMode clear_memory_mode) { if (size == 0) return; + DCHECK_IMPLIES(V8_COMPRESS_POINTERS_8GB_BOOL, + IsAligned(addr, kObjectAlignment8GbHeap)); + DCHECK_IMPLIES(V8_COMPRESS_POINTERS_8GB_BOOL, + IsAligned(size, kObjectAlignment8GbHeap)); + // TODO(v8:13070): Filler sizes are irrelevant for 8GB+ heaps. Adding them + // should be avoided in this mode. HeapObject filler = HeapObject::FromAddress(addr); ReadOnlyRoots roots(heap); if (size == kTaggedSize) { @@ -3569,6 +3466,7 @@ void Heap::CreateFillerForArray(T object, int elements_to_trim, if (MayContainRecordedSlots(object)) { MemoryChunk* chunk = MemoryChunk::FromHeapObject(object); DCHECK(!chunk->RegisteredObjectWithInvalidatedSlots<OLD_TO_NEW>(object)); + DCHECK(!chunk->RegisteredObjectWithInvalidatedSlots<OLD_TO_SHARED>(object)); DCHECK(!chunk->RegisteredObjectWithInvalidatedSlots<OLD_TO_OLD>(object)); } #endif @@ -3587,9 +3485,9 @@ void Heap::CreateFillerForArray(T object, int elements_to_trim, // Clear the mark bits of the black area that belongs now to the filler. // This is an optimization. The sweeper will release black fillers anyway. if (incremental_marking()->black_allocation() && - incremental_marking()->marking_state()->IsBlackOrGrey(filler)) { + marking_state()->IsBlackOrGrey(filler)) { Page* page = Page::FromAddress(new_end); - incremental_marking()->marking_state()->bitmap(page)->ClearRange( + marking_state()->bitmap(page)->ClearRange( page->AddressToMarkbitIndex(new_end), page->AddressToMarkbitIndex(new_end + bytes_to_trim)); } @@ -3613,8 +3511,7 @@ void Heap::CreateFillerForArray(T object, int elements_to_trim, } void Heap::MakeHeapIterable() { - mark_compact_collector()->EnsureSweepingCompleted( - MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only); + EnsureSweepingCompleted(SweepingForcedFinalizationMode::kV8Only); safepoint()->IterateLocalHeaps([](LocalHeap* local_heap) { local_heap->MakeLinearAllocationAreaIterable(); @@ -3626,6 +3523,9 @@ void Heap::MakeHeapIterable() { space->MakeLinearAllocationAreaIterable(); } + if (v8_flags.shared_space && shared_space_allocator_) { + shared_space_allocator_->MakeLinearAllocationAreaIterable(); + } if (new_space()) new_space()->MakeLinearAllocationAreaIterable(); } @@ -3633,17 +3533,26 @@ void Heap::FreeLinearAllocationAreas() { safepoint()->IterateLocalHeaps( [](LocalHeap* local_heap) { local_heap->FreeLinearAllocationArea(); }); + if (isolate()->is_shared_space_isolate()) { + isolate()->global_safepoint()->IterateClientIsolates([](Isolate* client) { + client->heap()->FreeSharedLinearAllocationAreas(); + }); + } + PagedSpaceIterator spaces(this); for (PagedSpace* space = spaces.Next(); space != nullptr; space = spaces.Next()) { space->FreeLinearAllocationArea(); } + if (v8_flags.shared_space && shared_space_allocator_) { + shared_space_allocator_->FreeLinearAllocationArea(); + } if (new_space()) new_space()->FreeLinearAllocationArea(); } void Heap::FreeSharedLinearAllocationAreas() { - if (!isolate()->shared_isolate()) return; + if (!isolate()->has_shared_heap()) return; safepoint()->IterateLocalHeaps([](LocalHeap* local_heap) { local_heap->FreeSharedLinearAllocationArea(); }); @@ -3651,12 +3560,34 @@ void Heap::FreeSharedLinearAllocationAreas() { } void Heap::FreeMainThreadSharedLinearAllocationAreas() { - if (!isolate()->shared_isolate()) return; - shared_old_allocator_->FreeLinearAllocationArea(); + if (!isolate()->has_shared_heap()) return; + shared_space_allocator_->FreeLinearAllocationArea(); if (shared_map_allocator_) shared_map_allocator_->FreeLinearAllocationArea(); main_thread_local_heap()->FreeSharedLinearAllocationArea(); } +void Heap::MarkSharedLinearAllocationAreasBlack() { + DCHECK(v8_flags.shared_space); + if (shared_space_allocator_) { + shared_space_allocator_->MarkLinearAllocationAreaBlack(); + } + safepoint()->IterateLocalHeaps([](LocalHeap* local_heap) { + local_heap->MarkSharedLinearAllocationAreaBlack(); + }); + main_thread_local_heap()->MarkSharedLinearAllocationAreaBlack(); +} + +void Heap::UnmarkSharedLinearAllocationAreas() { + DCHECK(v8_flags.shared_space); + if (shared_space_allocator_) { + shared_space_allocator_->UnmarkLinearAllocationArea(); + } + safepoint()->IterateLocalHeaps([](LocalHeap* local_heap) { + local_heap->MarkSharedLinearAllocationAreaBlack(); + }); + main_thread_local_heap()->MarkSharedLinearAllocationAreaBlack(); +} + namespace { double ComputeMutatorUtilizationImpl(double mutator_speed, double gc_speed) { @@ -3789,19 +3720,25 @@ void Heap::ActivateMemoryReducerIfNeeded() { } } -void Heap::ReduceNewSpaceSize() { +bool Heap::ShouldReduceNewSpaceSize() const { static const size_t kLowAllocationThroughput = 1000; + + if (v8_flags.predictable) return false; + const double allocation_throughput = - tracer()->CurrentAllocationThroughputInBytesPerMillisecond(); + tracer_->CurrentAllocationThroughputInBytesPerMillisecond(); - if (v8_flags.predictable) return; + return ShouldReduceMemory() || + ((allocation_throughput != 0) && + (allocation_throughput < kLowAllocationThroughput)); +} - if (ShouldReduceMemory() || - ((allocation_throughput != 0) && - (allocation_throughput < kLowAllocationThroughput))) { - new_space_->Shrink(); - new_lo_space_->SetCapacity(new_space_->Capacity()); - } +void Heap::ReduceNewSpaceSize() { + if (!ShouldReduceNewSpaceSize()) return; + + // MinorMC shrinks new space as part of sweeping. + if (!v8_flags.minor_mc) new_space_->Shrink(); + new_lo_space_->SetCapacity(new_space_->Capacity()); } size_t Heap::NewSpaceSize() { return new_space() ? new_space()->Size() : 0; } @@ -3874,22 +3811,46 @@ void Heap::NotifyObjectLayoutChange( #endif } -void Heap::NotifyObjectSizeChange(HeapObject object, int old_size, int new_size, - ClearRecordedSlots clear_recorded_slots) { +void Heap::NotifyObjectSizeChange( + HeapObject object, int old_size, int new_size, + ClearRecordedSlots clear_recorded_slots, + enum UpdateInvalidatedObjectSize update_invalidated_object_size) { + old_size = ALIGN_TO_ALLOCATION_ALIGNMENT(old_size); + new_size = ALIGN_TO_ALLOCATION_ALIGNMENT(new_size); DCHECK_LE(new_size, old_size); if (new_size == old_size) return; - UpdateInvalidatedObjectSize(object, new_size); + const bool is_main_thread = LocalHeap::Current() == nullptr; - const bool is_background = LocalHeap::Current() != nullptr; - DCHECK_IMPLIES(is_background, + DCHECK_IMPLIES(!is_main_thread, clear_recorded_slots == ClearRecordedSlots::kNo); + DCHECK_IMPLIES(!is_main_thread, update_invalidated_object_size == + UpdateInvalidatedObjectSize::kNo); + + if (update_invalidated_object_size == UpdateInvalidatedObjectSize::kYes) { + UpdateInvalidatedObjectSize(object, new_size); + } else { + DCHECK_EQ(update_invalidated_object_size, UpdateInvalidatedObjectSize::kNo); - const VerifyNoSlotsRecorded verify_no_slots_recorded = - is_background ? VerifyNoSlotsRecorded::kNo : VerifyNoSlotsRecorded::kYes; +#if DEBUG + if (is_main_thread) { + // When running on the main thread we can actually DCHECK that this object + // wasn't recorded in the invalidated_slots map yet. + MemoryChunk* chunk = MemoryChunk::FromHeapObject(object); + DCHECK(!chunk->RegisteredObjectWithInvalidatedSlots<OLD_TO_NEW>(object)); + DCHECK( + !chunk->RegisteredObjectWithInvalidatedSlots<OLD_TO_SHARED>(object)); + DCHECK_IMPLIES( + incremental_marking()->IsCompacting(), + !chunk->RegisteredObjectWithInvalidatedSlots<OLD_TO_OLD>(object)); + } +#endif + } + + const auto verify_no_slots_recorded = + is_main_thread ? VerifyNoSlotsRecorded::kYes : VerifyNoSlotsRecorded::kNo; - const ClearFreedMemoryMode clear_memory_mode = - ClearFreedMemoryMode::kDontClearFreedMemory; + const auto clear_memory_mode = ClearFreedMemoryMode::kDontClearFreedMemory; const Address filler = object.address() + new_size; const int filler_size = old_size - new_size; @@ -3900,7 +3861,12 @@ void Heap::NotifyObjectSizeChange(HeapObject object, int old_size, int new_size, void Heap::UpdateInvalidatedObjectSize(HeapObject object, int new_size) { if (!MayContainRecordedSlots(object)) return; - if (incremental_marking()->IsCompacting()) { + // Updating invalidated_slots is unsychronized and thus needs to happen on the + // main thread. + DCHECK_NULL(LocalHeap::Current()); + DCHECK_EQ(isolate()->thread_id(), ThreadId::Current()); + + if (incremental_marking()->IsCompacting() || gc_state() == MARK_COMPACT) { MemoryChunk::FromHeapObject(object) ->UpdateInvalidatedObjectSize<OLD_TO_OLD>(object, new_size); } @@ -4298,12 +4264,16 @@ bool Heap::Contains(HeapObject value) const { if (memory_allocator()->IsOutsideAllocatedSpace(value.address())) { return false; } - return HasBeenSetUp() && - ((new_space_ && new_space_->Contains(value)) || - old_space_->Contains(value) || code_space_->Contains(value) || - (map_space_ && map_space_->Contains(value)) || - lo_space_->Contains(value) || code_lo_space_->Contains(value) || - (new_lo_space_ && new_lo_space_->Contains(value))); + + if (!HasBeenSetUp()) return false; + + return (new_space_ && new_space_->Contains(value)) || + old_space_->Contains(value) || code_space_->Contains(value) || + (map_space_ && map_space_->Contains(value)) || + (shared_space_ && shared_space_->Contains(value)) || + lo_space_->Contains(value) || code_lo_space_->Contains(value) || + (new_lo_space_ && new_lo_space_->Contains(value)) || + (shared_lo_space_ && shared_lo_space_->Contains(value)); } bool Heap::ContainsCode(HeapObject value) const { @@ -4319,9 +4289,14 @@ bool Heap::ContainsCode(HeapObject value) const { } bool Heap::SharedHeapContains(HeapObject value) const { - if (shared_old_space_) - return shared_old_space_->Contains(value) || - (shared_map_space_ && shared_map_space_->Contains(value)); + if (shared_allocation_space_) { + if (shared_allocation_space_->Contains(value)) return true; + if (shared_lo_allocation_space_->Contains(value)) return true; + if (shared_map_allocation_space_ && + shared_map_allocation_space_->Contains(value)) + return true; + } + return false; } @@ -4352,19 +4327,27 @@ bool Heap::InSpace(HeapObject value, AllocationSpace space) const { case MAP_SPACE: DCHECK(map_space_); return map_space_->Contains(value); + case SHARED_SPACE: + return shared_space_->Contains(value); case LO_SPACE: return lo_space_->Contains(value); case CODE_LO_SPACE: return code_lo_space_->Contains(value); case NEW_LO_SPACE: return new_lo_space_->Contains(value); + case SHARED_LO_SPACE: + return shared_lo_space_->Contains(value); case RO_SPACE: return ReadOnlyHeap::Contains(value); } UNREACHABLE(); } -bool Heap::IsShared() { return isolate()->is_shared(); } +bool Heap::IsShared() const { return isolate()->is_shared(); } + +bool Heap::ShouldMarkSharedHeap() const { + return isolate()->is_shared() || isolate()->is_shared_space_isolate(); +} bool Heap::InSpaceSlow(Address addr, AllocationSpace space) const { if (memory_allocator()->IsOutsideAllocatedSpace(addr)) { @@ -4382,12 +4365,16 @@ bool Heap::InSpaceSlow(Address addr, AllocationSpace space) const { case MAP_SPACE: DCHECK(map_space_); return map_space_->ContainsSlow(addr); + case SHARED_SPACE: + return shared_space_->ContainsSlow(addr); case LO_SPACE: return lo_space_->ContainsSlow(addr); case CODE_LO_SPACE: return code_lo_space_->ContainsSlow(addr); case NEW_LO_SPACE: return new_lo_space_->ContainsSlow(addr); + case SHARED_LO_SPACE: + return shared_lo_space_->ContainsSlow(addr); case RO_SPACE: return read_only_space_->ContainsSlow(addr); } @@ -4400,9 +4387,11 @@ bool Heap::IsValidAllocationSpace(AllocationSpace space) { case OLD_SPACE: case CODE_SPACE: case MAP_SPACE: + case SHARED_SPACE: case LO_SPACE: case NEW_LO_SPACE: case CODE_LO_SPACE: + case SHARED_LO_SPACE: case RO_SPACE: return true; default: @@ -4426,7 +4415,7 @@ void Heap::VerifyCountersBeforeConcurrentSweeping() { // We need to refine the counters on pages that are already swept and have // not been moved over to the actual space. Otherwise, the AccountingStats // are just an over approximation. - space->RefillFreeList(mark_compact_collector()->sweeper()); + space->RefillFreeList(); space->VerifyCountersBeforeConcurrentSweeping(); } } @@ -4689,13 +4678,15 @@ void Heap::IterateRoots(RootVisitor* v, base::EnumSet<SkipRoot> options) { SerializerDeserializer::IterateStartupObjectCache(isolate_, v); v->Synchronize(VisitorSynchronization::kStartupObjectCache); - // When shared_isolate() is null, isolate_ is either an unshared (instead of - // a client) Isolate or the shared Isolate. In both cases isolate_ owns its - // shared heap object cache and should iterate it. + // Iterate over shared heap object cache when the isolate owns this data + // structure. Isolates which own the shared heap object cache are: + // * Shared isolate + // * Shared space/main isolate + // * All isolates which do not use the shared heap feature. // - // When shared_isolate() is not null, isolate_ is a client Isolate, does not - // own its shared heap object cache, and should not iterate it. - if (isolate_->shared_isolate() == nullptr) { + // However, worker/client isolates do not own the shared heap object cache + // and should not iterate it. + if (isolate_->is_shared_heap_isolate() || !isolate_->has_shared_heap()) { SerializerDeserializer::IterateSharedHeapObjectCache(isolate_, v); v->Synchronize(VisitorSynchronization::kSharedHeapObjectCache); } @@ -4706,13 +4697,56 @@ void Heap::IterateRoots(RootVisitor* v, base::EnumSet<SkipRoot> options) { } } +class ClientRootVisitor : public RootVisitor { + public: + explicit ClientRootVisitor(RootVisitor* actual_visitor) + : actual_visitor_(actual_visitor) {} + + void VisitRootPointers(Root root, const char* description, + FullObjectSlot start, FullObjectSlot end) final { + for (FullObjectSlot p = start; p < end; ++p) { + MaybeForwardSlot(root, description, p); + } + } + + void VisitRootPointers(Root root, const char* description, + OffHeapObjectSlot start, OffHeapObjectSlot end) final { + actual_visitor_->VisitRootPointers(root, description, start, end); + } + + void VisitRunningCode(FullObjectSlot slot) final { +#if DEBUG + HeapObject object = HeapObject::cast(*slot); + DCHECK(!object.InSharedWritableHeap()); +#endif + } + + void Synchronize(VisitorSynchronization::SyncTag tag) final { + actual_visitor_->Synchronize(tag); + } + + private: + void MaybeForwardSlot(Root root, const char* description, + FullObjectSlot slot) { + Object object = *slot; + if (!object.IsHeapObject()) return; + HeapObject heap_object = HeapObject::cast(object); + if (heap_object.InSharedWritableHeap()) { + actual_visitor_->VisitRootPointer(root, description, slot); + } + } + + RootVisitor* const actual_visitor_; +}; + void Heap::IterateRootsIncludingClients(RootVisitor* v, base::EnumSet<SkipRoot> options) { IterateRoots(v, options); - if (isolate()->is_shared()) { + if (isolate()->is_shared_heap_isolate()) { + ClientRootVisitor client_root_visitor(v); isolate()->global_safepoint()->IterateClientIsolates( - [v, options](Isolate* client) { + [v = &client_root_visitor, options](Isolate* client) { client->heap()->IterateRoots(v, options); }); } @@ -4720,9 +4754,12 @@ void Heap::IterateRootsIncludingClients(RootVisitor* v, void Heap::IterateRootsFromStackIncludingClient(RootVisitor* v) { IterateStackRoots(v); - if (isolate()->is_shared()) { + if (isolate()->is_shared_heap_isolate()) { + ClientRootVisitor client_root_visitor(v); isolate()->global_safepoint()->IterateClientIsolates( - [v](Isolate* client) { client->heap()->IterateStackRoots(v); }); + [v = &client_root_visitor](Isolate* client) { + client->heap()->IterateStackRoots(v); + }); } } @@ -4915,7 +4952,7 @@ void Heap::ConfigureHeap(const v8::ResourceConstraints& constraints) { DCHECK(kMaxRegularHeapObjectSize >= (JSArray::kHeaderSize + FixedArray::SizeFor(JSArray::kInitialMaxFastElementArray) + - AllocationMemento::kSize)); + ALIGN_TO_ALLOCATION_ALIGNMENT(AllocationMemento::kSize))); code_range_size_ = constraints.code_range_size_in_bytes(); @@ -5344,6 +5381,8 @@ void Heap::SetUp(LocalHeap* main_thread_local_heap) { memory_allocator_.reset( new MemoryAllocator(isolate_, code_page_allocator, MaxReserved())); + sweeper_.reset(new Sweeper(this)); + mark_compact_collector_.reset(new MarkCompactCollector(this)); scavenger_collector_.reset(new ScavengerCollector(this)); @@ -5359,10 +5398,6 @@ void Heap::SetUp(LocalHeap* main_thread_local_heap) { concurrent_marking_.reset(new ConcurrentMarking(this, nullptr)); } - for (int i = FIRST_SPACE; i <= LAST_SPACE; i++) { - space_[i] = nullptr; - } - // Set up layout tracing callback. if (V8_UNLIKELY(v8_flags.trace_gc_heap_layout)) { v8::GCType gc_type = kGCTypeMarkSweepCompact; @@ -5381,7 +5416,7 @@ void Heap::SetUpFromReadOnlyHeap(ReadOnlyHeap* ro_heap) { DCHECK_NOT_NULL(ro_heap); DCHECK_IMPLIES(read_only_space_ != nullptr, read_only_space_ == ro_heap->read_only_space()); - space_[RO_SPACE] = nullptr; + DCHECK_NULL(space_[RO_SPACE].get()); read_only_space_ = ro_heap->read_only_space(); heap_allocator_.SetReadOnlySpace(read_only_space_); } @@ -5424,24 +5459,50 @@ void Heap::SetUpSpaces(LinearAllocationArea& new_allocation_info, const bool has_young_gen = !v8_flags.single_generation && !IsShared(); if (has_young_gen) { if (v8_flags.minor_mc) { - space_[NEW_SPACE] = new_space_ = - new PagedNewSpace(this, initial_semispace_size_, max_semi_space_size_, - new_allocation_info); + space_[NEW_SPACE] = std::make_unique<PagedNewSpace>( + this, initial_semispace_size_, max_semi_space_size_, + new_allocation_info); } else { - space_[NEW_SPACE] = new_space_ = - new SemiSpaceNewSpace(this, initial_semispace_size_, - max_semi_space_size_, new_allocation_info); + space_[NEW_SPACE] = std::make_unique<SemiSpaceNewSpace>( + this, initial_semispace_size_, max_semi_space_size_, + new_allocation_info); } - space_[NEW_LO_SPACE] = new_lo_space_ = - new NewLargeObjectSpace(this, NewSpaceCapacity()); + new_space_ = static_cast<NewSpace*>(space_[NEW_SPACE].get()); + + space_[NEW_LO_SPACE] = + std::make_unique<NewLargeObjectSpace>(this, NewSpaceCapacity()); + new_lo_space_ = + static_cast<NewLargeObjectSpace*>(space_[NEW_LO_SPACE].get()); } - space_[OLD_SPACE] = old_space_ = new OldSpace(this, old_allocation_info); - space_[CODE_SPACE] = code_space_ = new CodeSpace(this); + + space_[OLD_SPACE] = std::make_unique<OldSpace>(this, old_allocation_info); + old_space_ = static_cast<OldSpace*>(space_[OLD_SPACE].get()); + + space_[CODE_SPACE] = std::make_unique<CodeSpace>(this); + code_space_ = static_cast<CodeSpace*>(space_[CODE_SPACE].get()); + if (v8_flags.use_map_space) { - space_[MAP_SPACE] = map_space_ = new MapSpace(this); + space_[MAP_SPACE] = std::make_unique<MapSpace>(this); + map_space_ = static_cast<MapSpace*>(space_[MAP_SPACE].get()); + } + + if (isolate()->is_shared_space_isolate()) { + space_[SHARED_SPACE] = std::make_unique<SharedSpace>(this); + shared_space_ = static_cast<SharedSpace*>(space_[SHARED_SPACE].get()); + } + + space_[LO_SPACE] = std::make_unique<OldLargeObjectSpace>(this); + lo_space_ = static_cast<OldLargeObjectSpace*>(space_[LO_SPACE].get()); + + space_[CODE_LO_SPACE] = std::make_unique<CodeLargeObjectSpace>(this); + code_lo_space_ = + static_cast<CodeLargeObjectSpace*>(space_[CODE_LO_SPACE].get()); + + if (isolate()->is_shared_space_isolate()) { + space_[SHARED_LO_SPACE] = std::make_unique<SharedLargeObjectSpace>(this); + shared_lo_space_ = + static_cast<SharedLargeObjectSpace*>(space_[SHARED_LO_SPACE].get()); } - space_[LO_SPACE] = lo_space_ = new OldLargeObjectSpace(this); - space_[CODE_LO_SPACE] = code_lo_space_ = new CodeLargeObjectSpace(this); for (int i = 0; i < static_cast<int>(v8::Isolate::kUseCounterFeatureCount); i++) { @@ -5475,12 +5536,12 @@ void Heap::SetUpSpaces(LinearAllocationArea& new_allocation_info, } if (new_space()) { - if (v8_flags.concurrent_minor_mc) { + if (v8_flags.concurrent_minor_mc_marking) { // TODO(v8:13012): Atomic MinorMC should not use ScavengeJob. Instead, we // should schedule MinorMC tasks at a soft limit, which are used by atomic // MinorMC, and to finalize concurrent MinorMC. The condition - // v8_flags.concurrent_minor_mc can then be changed to v8_flags.minor_mc - // (here and at the RemoveAllocationObserver call site). + // v8_flags.concurrent_minor_mc_marking can then be changed to + // v8_flags.minor_mc (here and at the RemoveAllocationObserver call site). minor_mc_task_observer_.reset( new MinorMCTaskObserver(this, MinorMCTaskObserver::kStepSize)); new_space()->AddAllocationObserver(minor_mc_task_observer_.get()); @@ -5516,19 +5577,33 @@ void Heap::SetUpSpaces(LinearAllocationArea& new_allocation_info, } #endif // V8_HEAP_USE_PKU_JIT_WRITE_PROTECT - if (isolate()->shared_isolate()) { + if (isolate()->shared_space_isolate()) { + Heap* heap = isolate()->shared_space_isolate()->heap(); + + shared_space_allocator_ = std::make_unique<ConcurrentAllocator>( + main_thread_local_heap(), heap->shared_space_); + + DCHECK_NULL(shared_map_allocator_.get()); + + shared_allocation_space_ = heap->shared_space_; + shared_lo_allocation_space_ = heap->shared_lo_space_; + DCHECK(!v8_flags.use_map_space); + DCHECK_NULL(shared_map_allocation_space_); + + } else if (isolate()->shared_isolate()) { Heap* shared_heap = isolate()->shared_isolate()->heap(); - shared_old_space_ = shared_heap->old_space(); - shared_lo_space_ = shared_heap->lo_space(); - shared_old_allocator_.reset( - new ConcurrentAllocator(main_thread_local_heap(), shared_old_space_)); + shared_space_allocator_ = std::make_unique<ConcurrentAllocator>( + main_thread_local_heap(), shared_heap->old_space()); if (shared_heap->map_space()) { - shared_map_space_ = shared_heap->map_space(); - shared_map_allocator_.reset( - new ConcurrentAllocator(main_thread_local_heap(), shared_map_space_)); + shared_map_allocator_ = std::make_unique<ConcurrentAllocator>( + main_thread_local_heap(), shared_heap->map_space()); } + + shared_allocation_space_ = shared_heap->old_space(); + shared_lo_allocation_space_ = shared_heap->lo_space(); + shared_map_allocation_space_ = shared_heap->map_space(); } main_thread_local_heap()->SetUpMainThread(); @@ -5668,8 +5743,8 @@ void Heap::AttachCppHeap(v8::CppHeap* cpp_heap) { void Heap::DetachCppHeap() { CppHeap::From(cpp_heap_)->DetachIsolate(); - cpp_heap_ = nullptr; local_embedder_heap_tracer()->SetCppHeap(nullptr); + cpp_heap_ = nullptr; } const cppgc::EmbedderStackState* Heap::overriden_stack_state() const { @@ -5738,6 +5813,17 @@ void Heap::StartTearDown() { } } +void Heap::TearDownWithSharedHeap() { + DCHECK_EQ(gc_state(), TEAR_DOWN); + + // Assert that there are no background threads left and no executable memory + // chunks are unprotected. + safepoint()->AssertMainThreadIsOnlyThread(); + + // Might use the external pointer which might be in the shared heap. + external_string_table_.TearDown(); +} + void Heap::TearDown() { DCHECK_EQ(gc_state(), TEAR_DOWN); @@ -5763,9 +5849,11 @@ void Heap::TearDown() { } if (new_space()) { - if (v8_flags.concurrent_minor_mc) { + if (minor_mc_task_observer_) { + DCHECK_NULL(scavenge_task_observer_); new_space()->RemoveAllocationObserver(minor_mc_task_observer_.get()); } else { + DCHECK_NOT_NULL(scavenge_task_observer_); new_space()->RemoveAllocationObserver(scavenge_task_observer_.get()); } } @@ -5804,6 +5892,9 @@ void Heap::TearDown() { minor_mark_compact_collector_.reset(); } + sweeper_->TearDown(); + sweeper_.reset(); + scavenger_collector_.reset(); array_buffer_sweeper_.reset(); incremental_marking_.reset(); @@ -5830,16 +5921,11 @@ void Heap::TearDown() { cpp_heap_ = nullptr; } - external_string_table_.TearDown(); - tracer_.reset(); - allocation_sites_to_pretenure_.reset(); - - shared_old_space_ = nullptr; - shared_old_allocator_.reset(); + pretenuring_handler_.reset(); - shared_map_space_ = nullptr; + shared_space_allocator_.reset(); shared_map_allocator_.reset(); { @@ -5847,8 +5933,7 @@ void Heap::TearDown() { "Deletion of CODE_SPACE and CODE_LO_SPACE requires write access to " "Code page headers"); for (int i = FIRST_MUTABLE_SPACE; i <= LAST_MUTABLE_SPACE; i++) { - delete space_[i]; - space_[i] = nullptr; + space_[i].reset(); } } @@ -6060,6 +6145,7 @@ void Heap::ClearRecordedSlot(HeapObject object, ObjectSlot slot) { // No need to update old-to-old here since that remembered set is gone // after a full GC and not re-recorded until sweeping is finished. RememberedSet<OLD_TO_NEW>::Remove(page, slot.address()); + RememberedSet<OLD_TO_SHARED>::Remove(page, slot.address()); } } #endif @@ -6090,6 +6176,7 @@ void Heap::VerifySlotRangeHasNoRecordedSlots(Address start, Address end) { #ifndef V8_DISABLE_WRITE_BARRIERS Page* page = Page::FromAddress(start); RememberedSet<OLD_TO_NEW>::CheckNoneInRange(page, start, end); + RememberedSet<OLD_TO_SHARED>::CheckNoneInRange(page, start, end); #endif } #endif @@ -6104,6 +6191,8 @@ void Heap::ClearRecordedSlotRange(Address start, Address end) { if (!page->SweepingDone()) { RememberedSet<OLD_TO_NEW>::RemoveRange(page, start, end, SlotSet::KEEP_EMPTY_BUCKETS); + RememberedSet<OLD_TO_SHARED>::RemoveRange(page, start, end, + SlotSet::KEEP_EMPTY_BUCKETS); } } #endif @@ -6567,7 +6656,7 @@ void Heap::RemoveDirtyFinalizationRegistriesOnContext(NativeContext context) { set_dirty_js_finalization_registries_list_tail(prev); } -void Heap::KeepDuringJob(Handle<JSReceiver> target) { +void Heap::KeepDuringJob(Handle<HeapObject> target) { DCHECK(weak_refs_keep_during_job().IsUndefined() || weak_refs_keep_during_job().IsOrderedHashSet()); Handle<OrderedHashSet> table; @@ -6773,9 +6862,12 @@ bool Heap::AllowedToBeMigrated(Map map, HeapObject obj, AllocationSpace dst) { return dst == CODE_SPACE && type == CODE_TYPE; case MAP_SPACE: return dst == MAP_SPACE && type == MAP_TYPE; + case SHARED_SPACE: + return dst == SHARED_SPACE; case LO_SPACE: case CODE_LO_SPACE: case NEW_LO_SPACE: + case SHARED_LO_SPACE: case RO_SPACE: return false; } @@ -7214,6 +7306,69 @@ void Heap::set_allocation_timeout(int allocation_timeout) { } #endif // V8_ENABLE_ALLOCATION_TIMEOUT +void Heap::FinishSweepingIfOutOfWork() { + if (sweeper()->sweeping_in_progress() && v8_flags.concurrent_sweeping && + !sweeper()->AreSweeperTasksRunning()) { + // At this point we know that all concurrent sweeping tasks have run + // out of work and quit: all pages are swept. The main thread still needs + // to complete sweeping though. + EnsureSweepingCompleted(SweepingForcedFinalizationMode::kV8Only); + } + if (cpp_heap()) { + // Ensure that sweeping is also completed for the C++ managed heap, if one + // exists and it's out of work. + CppHeap::From(cpp_heap())->FinishSweepingIfOutOfWork(); + } +} + +void Heap::EnsureSweepingCompleted(SweepingForcedFinalizationMode mode) { + if (sweeper()->sweeping_in_progress()) { + TRACE_GC_EPOCH(tracer(), GCTracer::Scope::MC_COMPLETE_SWEEPING, + ThreadKind::kMain); + + sweeper()->EnsureCompleted(); + old_space()->RefillFreeList(); + { + CodePageHeaderModificationScope rwx_write_scope( + "Updating per-page stats stored in page headers requires write " + "access to Code page headers"); + code_space()->RefillFreeList(); + } + if (shared_space()) { + shared_space()->RefillFreeList(); + } + if (map_space()) { + map_space()->RefillFreeList(); + map_space()->SortFreeList(); + } + + tracer()->NotifySweepingCompleted(); + +#ifdef VERIFY_HEAP + if (v8_flags.verify_heap && !evacuation()) { + FullEvacuationVerifier verifier(this); + verifier.Run(); + } +#endif + } + + if (mode == SweepingForcedFinalizationMode::kUnifiedHeap && cpp_heap()) { + // Ensure that sweeping is also completed for the C++ managed heap, if one + // exists. + CppHeap::From(cpp_heap())->FinishSweepingIfRunning(); + DCHECK(!CppHeap::From(cpp_heap())->sweeper().IsSweepingInProgress()); + } + + DCHECK_IMPLIES( + mode == SweepingForcedFinalizationMode::kUnifiedHeap || !cpp_heap(), + !tracer()->IsSweepingInProgress()); +} + +void Heap::DrainSweepingWorklistForSpace(AllocationSpace space) { + if (!sweeper()->sweeping_in_progress()) return; + sweeper()->DrainSweepingWorklistForSpace(space); +} + EmbedderStackStateScope::EmbedderStackStateScope(Heap* heap, Origin origin, StackState stack_state) : local_tracer_(heap->local_embedder_heap_tracer()), diff --git a/deps/v8/src/heap/heap.h b/deps/v8/src/heap/heap.h index 6e270f246df648..0cf23b5ef4d128 100644 --- a/deps/v8/src/heap/heap.h +++ b/deps/v8/src/heap/heap.h @@ -31,6 +31,9 @@ #include "src/heap/base/stack.h" #include "src/heap/gc-callbacks.h" #include "src/heap/heap-allocator.h" +#include "src/heap/marking-state.h" +#include "src/heap/pretenuring-handler.h" +#include "src/heap/sweeper.h" #include "src/init/heap-symbols.h" #include "src/objects/allocation-site.h" #include "src/objects/fixed-array.h" @@ -98,8 +101,6 @@ class CppHeap; class GCIdleTimeHandler; class GCIdleTimeHeapState; class GCTracer; -template <typename T> -class GlobalHandleVector; class IsolateSafepoint; class HeapObjectAllocationTracker; class HeapObjectsFilter; @@ -127,7 +128,9 @@ class SafepointScope; class ScavengeJob; class Scavenger; class ScavengerCollector; +class SharedLargeObjectSpace; class SharedReadOnlySpace; +class SharedSpace; class Space; class StressScavengeObserver; class TimedHistogram; @@ -140,6 +143,8 @@ enum ArrayStorageAllocationMode { enum class ClearRecordedSlots { kYes, kNo }; +enum class UpdateInvalidatedObjectSize { kYes, kNo }; + enum class InvalidateRecordedSlots { kYes, kNo }; enum class ClearFreedMemoryMode { kClearFreedMemory, kDontClearFreedMemory }; @@ -148,46 +153,6 @@ enum ExternalBackingStoreType { kArrayBuffer, kExternalString, kNumTypes }; enum class RetainingPathOption { kDefault, kTrackEphemeronPath }; -// These values are persisted to logs. Entries should not be renumbered and -// numeric values should never be reused. If you add new items here, update -// src/tools/metrics/histograms/enums.xml in chromium. -enum class GarbageCollectionReason : int { - kUnknown = 0, - kAllocationFailure = 1, - kAllocationLimit = 2, - kContextDisposal = 3, - kCountersExtension = 4, - kDebugger = 5, - kDeserializer = 6, - kExternalMemoryPressure = 7, - kFinalizeMarkingViaStackGuard = 8, - kFinalizeMarkingViaTask = 9, - kFullHashtable = 10, - kHeapProfiler = 11, - kTask = 12, - kLastResort = 13, - kLowMemoryNotification = 14, - kMakeHeapIterable = 15, - kMemoryPressure = 16, - kMemoryReducer = 17, - kRuntime = 18, - kSamplingProfiler = 19, - kSnapshotCreator = 20, - kTesting = 21, - kExternalFinalize = 22, - kGlobalAllocationLimit = 23, - kMeasureMemory = 24, - kBackgroundAllocationFailure = 25, - kFinalizeMinorMC = 26, - kCppHeapAllocationFailure = 27, - - kLastReason = kCppHeapAllocationFailure, -}; - -static_assert(kGarbageCollectionReasonMaxValue == - static_cast<int>(GarbageCollectionReason::kLastReason), - "The value of kGarbageCollectionReasonMaxValue is inconsistent."); - enum class YoungGenerationHandling { kRegularScavenge = 0, kFastPromotionDuringScavenge = 1, @@ -254,7 +219,6 @@ class Heap { // and the key of the entry is in new-space. Such keys do not appear in the // usual OLD_TO_NEW remembered set. EphemeronRememberedSet ephemeron_remembered_set_; - enum FindMementoMode { kForRuntime, kForGC }; enum class HeapGrowingMode { kSlow, kConservative, kMinimal, kDefault }; @@ -334,9 +298,6 @@ class Heap { std::atomic<int64_t> low_since_mark_compact_{0}; }; - using PretenuringFeedbackMap = - std::unordered_map<AllocationSite, size_t, Object::Hasher>; - // Taking this mutex prevents the GC from entering a phase that relocates // object references. base::Mutex* relocation_mutex() { return &relocation_mutex_; } @@ -695,11 +656,6 @@ class Heap { bool IsGCWithStack() const; - // If an object has an AllocationMemento trailing it, return it, otherwise - // return a null AllocationMemento. - template <FindMementoMode mode> - inline AllocationMemento FindAllocationMemento(Map map, HeapObject object); - // Performs GC after background allocation failure. void CollectGarbageForBackground(LocalHeap* local_heap); @@ -818,9 +774,9 @@ class Heap { } #if V8_ENABLE_WEBASSEMBLY - // TODO(manoskouk): Inline this if STRONG_MUTABLE_MOVABLE_ROOT_LIST setters - // become public. - void EnsureWasmCanonicalRttsSize(int length); + // TODO(manoskouk): Consider inlining/moving this if + // STRONG_MUTABLE_MOVABLE_ROOT_LIST setters become public. + V8_EXPORT_PRIVATE void EnsureWasmCanonicalRttsSize(int length); #endif // =========================================================================== @@ -861,6 +817,9 @@ class Heap { // Sets the TearDown state, so no new GC tasks get posted. void StartTearDown(); + // Destroys all data that might require the shared heap. + void TearDownWithSharedHeap(); + // Destroys all memory allocated by the heap. void TearDown(); @@ -876,16 +835,23 @@ class Heap { NewSpace* new_space() const { return new_space_; } inline PagedNewSpace* paged_new_space() const; OldSpace* old_space() const { return old_space_; } - OldSpace* shared_old_space() const { return shared_old_space_; } CodeSpace* code_space() const { return code_space_; } + SharedSpace* shared_space() const { return shared_space_; } MapSpace* map_space() const { return map_space_; } inline PagedSpace* space_for_maps(); OldLargeObjectSpace* lo_space() const { return lo_space_; } - OldLargeObjectSpace* shared_lo_space() const { return shared_lo_space_; } CodeLargeObjectSpace* code_lo_space() const { return code_lo_space_; } + SharedLargeObjectSpace* shared_lo_space() const { return shared_lo_space_; } NewLargeObjectSpace* new_lo_space() const { return new_lo_space_; } ReadOnlySpace* read_only_space() const { return read_only_space_; } + PagedSpace* shared_allocation_space() const { + return shared_allocation_space_; + } + OldLargeObjectSpace* shared_lo_allocation_space() const { + return shared_lo_allocation_space_; + } + inline PagedSpace* paged_space(int idx); inline Space* space(int idx); @@ -904,13 +870,11 @@ class Heap { inline Isolate* isolate() const; -#ifdef DEBUG // Check if we run on isolate's main thread. inline bool IsMainThread() const; // Check if we run on the current main thread of the shared isolate during // shared GC. inline bool IsSharedMainThread() const; -#endif MarkCompactCollector* mark_compact_collector() { return mark_compact_collector_.get(); @@ -920,6 +884,8 @@ class Heap { return minor_mark_compact_collector_.get(); } + Sweeper* sweeper() { return sweeper_.get(); } + ArrayBufferSweeper* array_buffer_sweeper() { return array_buffer_sweeper_.get(); } @@ -991,7 +957,7 @@ class Heap { return is_finalization_registry_cleanup_task_posted_; } - V8_EXPORT_PRIVATE void KeepDuringJob(Handle<JSReceiver> target); + V8_EXPORT_PRIVATE void KeepDuringJob(Handle<HeapObject> target); void ClearKeptObjects(); // =========================================================================== @@ -1030,8 +996,14 @@ class Heap { const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags); // Performs garbage collection operation for the shared heap. - V8_EXPORT_PRIVATE void CollectSharedGarbage( - GarbageCollectionReason gc_reason); + V8_EXPORT_PRIVATE bool CollectGarbageShared( + LocalHeap* local_heap, GarbageCollectionReason gc_reason); + + // Requests garbage collection from some other thread. + V8_EXPORT_PRIVATE bool CollectGarbageFromAnyThread( + LocalHeap* local_heap, + GarbageCollectionReason gc_reason = + GarbageCollectionReason::kBackgroundAllocationFailure); // Reports and external memory pressure event, either performs a major GC or // completes incremental marking in order to free external resources. @@ -1125,7 +1097,7 @@ class Heap { void CompleteSweepingYoung(GarbageCollector collector); // Ensures that sweeping is finished for that object's page. - void EnsureSweepingCompleted(HeapObject object); + void EnsureSweepingCompletedForObject(HeapObject object); IncrementalMarking* incremental_marking() const { return incremental_marking_.get(); @@ -1151,8 +1123,11 @@ class Heap { // The runtime uses this function to inform the GC of object size changes. The // GC will fill this area with a filler object and might clear recorded slots // in that area. - void NotifyObjectSizeChange(HeapObject, int old_size, int new_size, - ClearRecordedSlots clear_recorded_slots); + void NotifyObjectSizeChange( + HeapObject, int old_size, int new_size, + ClearRecordedSlots clear_recorded_slots, + UpdateInvalidatedObjectSize update_invalidated_object_size = + UpdateInvalidatedObjectSize::kYes); // =========================================================================== // Deoptimization support API. =============================================== @@ -1169,8 +1144,6 @@ class Heap { void DeoptMarkedAllocationSites(); - bool DeoptMaybeTenuredAllocationSites(); - // =========================================================================== // Embedder heap tracer support. ============================================= // =========================================================================== @@ -1202,7 +1175,7 @@ class Heap { V8_EXPORT_PRIVATE void SetStackStart(void* stack_start); - ::heap::base::Stack& stack(); + V8_EXPORT_PRIVATE ::heap::base::Stack& stack(); // =========================================================================== // Embedder roots optimizations. ============================================= @@ -1274,7 +1247,8 @@ class Heap { V8_EXPORT_PRIVATE bool InSpace(HeapObject value, AllocationSpace space) const; // Returns true when this heap is shared. - V8_EXPORT_PRIVATE bool IsShared(); + V8_EXPORT_PRIVATE bool IsShared() const; + V8_EXPORT_PRIVATE bool ShouldMarkSharedHeap() const; // Slow methods that can be used for verification as they can also be used // with off-heap Addresses. @@ -1393,15 +1367,15 @@ class Heap { } inline size_t promoted_objects_size() { return promoted_objects_size_; } - inline void IncrementSemiSpaceCopiedObjectSize(size_t object_size) { - semi_space_copied_object_size_ += object_size; + inline void IncrementNewSpaceSurvivingObjectSize(size_t object_size) { + new_space_surviving_object_size_ += object_size; } - inline size_t semi_space_copied_object_size() { - return semi_space_copied_object_size_; + inline size_t new_space_surviving_object_size() { + return new_space_surviving_object_size_; } inline size_t SurvivedYoungObjectSize() { - return promoted_objects_size_ + semi_space_copied_object_size_; + return promoted_objects_size_ + new_space_surviving_object_size_; } inline void IncrementNodesDiedInNewSpace(int count) { @@ -1413,7 +1387,6 @@ class Heap { inline void IncrementNodesPromoted() { nodes_promoted_++; } inline void IncrementYoungSurvivorsCounter(size_t survived) { - survived_last_scavenge_ = survived; survived_since_last_expansion_ += survived; } @@ -1531,27 +1504,6 @@ class Heap { V8_EXPORT_PRIVATE void* AllocateExternalBackingStore( const std::function<void*(size_t)>& allocate, size_t byte_length); - // =========================================================================== - // Allocation site tracking. ================================================= - // =========================================================================== - - // Updates the AllocationSite of a given {object}. The entry (including the - // count) is cached on the local pretenuring feedback. - inline void UpdateAllocationSite( - Map map, HeapObject object, PretenuringFeedbackMap* pretenuring_feedback); - - // Merges local pretenuring feedback into the global one. Note that this - // method needs to be called after evacuation, as allocation sites may be - // evacuated and this method resolves forward pointers accordingly. - void MergeAllocationSitePretenuringFeedback( - const PretenuringFeedbackMap& local_pretenuring_feedback); - - // Adds an allocation site to the list of sites to be pretenured during the - // next collection. Added allocation sites are pretenured independent of - // their feedback. - V8_EXPORT_PRIVATE void PretenureAllocationSiteOnNextCollection( - AllocationSite site); - // =========================================================================== // Allocation tracking. ====================================================== // =========================================================================== @@ -1627,6 +1579,28 @@ class Heap { // it supports a forwarded map. Fails if the map is not the code map. Map GcSafeMapOfCodeSpaceObject(HeapObject object); + // =========================================================================== + // Sweeping. ================================================================= + // =========================================================================== + + bool sweeping_in_progress() const { return sweeper_->sweeping_in_progress(); } + + void FinishSweepingIfOutOfWork(); + + enum class SweepingForcedFinalizationMode { kUnifiedHeap, kV8Only }; + + // Ensures that sweeping is finished. + // + // Note: Can only be called safely from main thread. + V8_EXPORT_PRIVATE void EnsureSweepingCompleted( + SweepingForcedFinalizationMode mode); + + void DrainSweepingWorklistForSpace(AllocationSpace space); + + void set_evacuation(bool evacuation) { evacuation_ = evacuation; } + + bool evacuation() const { return evacuation_; } + // ============================================================================= #ifdef V8_ENABLE_ALLOCATION_TIMEOUT @@ -1683,6 +1657,16 @@ class Heap { return (current_gc_flags_ & kReduceMemoryFootprintMask) != 0; } + MarkingState* marking_state() { return &marking_state_; } + + NonAtomicMarkingState* non_atomic_marking_state() { + return &non_atomic_marking_state_; + } + + AtomicMarkingState* atomic_marking_state() { return &atomic_marking_state_; } + + PretenturingHandler* pretenuring_handler() { return &pretenuring_handler_; } + private: class AllocationTrackerForDebugging; @@ -1763,8 +1747,6 @@ class Heap { static const int kMaxMarkCompactsInIdleRound = 7; - static const int kInitialFeedbackCapacity = 256; - Heap(); ~Heap(); @@ -1802,6 +1784,10 @@ class Heap { // Free all shared LABs of main thread. void FreeMainThreadSharedLinearAllocationAreas(); + // Enables/Disables black allocation in shared LABs. + void MarkSharedLinearAllocationAreasBlack(); + void UnmarkSharedLinearAllocationAreas(); + // Performs garbage collection in a safepoint. // Returns the number of freed global handles. size_t PerformGarbageCollection( @@ -1869,6 +1855,7 @@ class Heap { bool HasLowOldGenerationAllocationRate(); bool HasLowEmbedderAllocationRate(); + bool ShouldReduceNewSpaceSize() const; void ReduceNewSpaceSize(); GCIdleTimeHeapState ComputeHeapState(); @@ -1900,18 +1887,6 @@ class Heap { void InvokeIncrementalMarkingPrologueCallbacks(); void InvokeIncrementalMarkingEpilogueCallbacks(); - // =========================================================================== - // Pretenuring. ============================================================== - // =========================================================================== - - // Pretenuring decisions are made based on feedback collected during new space - // evacuation. Note that between feedback collection and calling this method - // object in old space must not move. - void ProcessPretenuringFeedback(); - - // Removes an entry from the global pretenuring storage. - void RemoveAllocationSitePretenuringFeedback(AllocationSite site); - // =========================================================================== // Actual GC. ================================================================ // =========================================================================== @@ -1962,7 +1937,7 @@ class Heap { void UpdateTotalGCTime(double duration); - bool MaximumSizeScavenge() { return maximum_size_scavenges_ > 0; } + bool MaximumSizeMinorGC() { return maximum_size_minor_gcs_ > 0; } bool IsIneffectiveMarkCompact(size_t old_generation_size, double mutator_utilization); @@ -2173,9 +2148,6 @@ class Heap { // scavenge since last new space expansion. size_t survived_since_last_expansion_ = 0; - // ... and since the last scavenge. - size_t survived_last_scavenge_ = 0; - // This is not the depth of nested AlwaysAllocateScope's but rather a single // count, as scopes can be acquired from multiple tasks (read: threads). std::atomic<size_t> always_allocate_scope_count_{0}; @@ -2190,24 +2162,30 @@ class Heap { // For keeping track of context disposals. int contexts_disposed_ = 0; + // Spaces owned by this heap through space_. NewSpace* new_space_ = nullptr; OldSpace* old_space_ = nullptr; CodeSpace* code_space_ = nullptr; MapSpace* map_space_ = nullptr; + SharedSpace* shared_space_ = nullptr; OldLargeObjectSpace* lo_space_ = nullptr; CodeLargeObjectSpace* code_lo_space_ = nullptr; NewLargeObjectSpace* new_lo_space_ = nullptr; + SharedLargeObjectSpace* shared_lo_space_ = nullptr; ReadOnlySpace* read_only_space_ = nullptr; - OldSpace* shared_old_space_ = nullptr; - OldLargeObjectSpace* shared_lo_space_ = nullptr; - MapSpace* shared_map_space_ = nullptr; + // Either pointer to owned shared spaces or pointer to unowned shared spaces + // in another isolate. + PagedSpace* shared_allocation_space_ = nullptr; + OldLargeObjectSpace* shared_lo_allocation_space_ = nullptr; + PagedSpace* shared_map_allocation_space_ = nullptr; - std::unique_ptr<ConcurrentAllocator> shared_old_allocator_; + // Allocators for the shared spaces. + std::unique_ptr<ConcurrentAllocator> shared_space_allocator_; std::unique_ptr<ConcurrentAllocator> shared_map_allocator_; // Map from the space id to the space. - Space* space_[LAST_SPACE + 1]; + std::unique_ptr<Space> space_[LAST_SPACE + 1]; LocalHeap* main_thread_local_heap_ = nullptr; @@ -2284,9 +2262,9 @@ class Heap { size_t promoted_objects_size_ = 0; double promotion_ratio_ = 0.0; double promotion_rate_ = 0.0; - size_t semi_space_copied_object_size_ = 0; - size_t previous_semi_space_copied_object_size_ = 0; - double semi_space_copied_rate_ = 0.0; + size_t new_space_surviving_object_size_ = 0; + size_t previous_new_space_surviving_object_size_ = 0; + double new_space_surviving_rate_ = 0.0; int nodes_died_in_new_space_ = 0; int nodes_copied_in_new_space_ = 0; int nodes_promoted_ = 0; @@ -2295,7 +2273,7 @@ class Heap { // tenure state. When we switched to the maximum new space size we deoptimize // the code that belongs to the allocation site and derive the lifetime // of the allocation site. - unsigned int maximum_size_scavenges_ = 0; + unsigned int maximum_size_minor_gcs_ = 0; // Total time spent in GC. double total_gc_time_ms_ = 0.0; @@ -2304,6 +2282,7 @@ class Heap { double last_gc_time_ = 0.0; std::unique_ptr<GCTracer> tracer_; + std::unique_ptr<Sweeper> sweeper_; std::unique_ptr<MarkCompactCollector> mark_compact_collector_; std::unique_ptr<MinorMarkCompactCollector> minor_mark_compact_collector_; std::unique_ptr<ScavengerCollector> scavenger_collector_; @@ -2359,16 +2338,6 @@ class Heap { // The size of global memory after the last MarkCompact GC. size_t global_memory_at_last_gc_ = 0; - // The feedback storage is used to store allocation sites (keys) and how often - // they have been visited (values) by finding a memento behind an object. The - // storage is only alive temporary during a GC. The invariant is that all - // pointers in this map are already fixed, i.e., they do not point to - // forwarding pointers. - PretenuringFeedbackMap global_pretenuring_feedback_; - - std::unique_ptr<GlobalHandleVector<AllocationSite>> - allocation_sites_to_pretenure_; - char trace_ring_buffer_[kTraceRingBufferSize]; // If it's not full then the data is from 0 to ring_buffer_end_. If it's @@ -2433,12 +2402,22 @@ class Heap { bool is_finalization_registry_cleanup_task_posted_ = false; + bool evacuation_ = false; + std::unique_ptr<third_party_heap::Heap> tp_heap_; + MarkingState marking_state_; + NonAtomicMarkingState non_atomic_marking_state_; + AtomicMarkingState atomic_marking_state_; + + PretenturingHandler pretenuring_handler_; + // Classes in "heap" can be friends. friend class AlwaysAllocateScope; friend class ArrayBufferCollector; friend class ArrayBufferSweeper; + friend class CollectorBase; + friend class ConcurrentAllocator; friend class ConcurrentMarking; friend class EvacuateVisitorBase; friend class GCCallbacksScope; @@ -2468,6 +2447,7 @@ class Heap { friend class ObjectStatsCollector; friend class Page; friend class PagedSpaceBase; + friend class PretenturingHandler; friend class ReadOnlyRoots; friend class Scavenger; friend class ScavengerCollector; @@ -2887,6 +2867,18 @@ class V8_NODISCARD CppClassNamesAsHeapObjectNameScope final { std::unique_ptr<cppgc::internal::ClassNameAsHeapObjectNameScope> scope_; }; +class V8_NODISCARD EvacuationScope { + public: + explicit EvacuationScope(Heap* heap) : heap_(heap) { + heap_->set_evacuation(true); + } + + ~EvacuationScope() { heap_->set_evacuation(false); } + + private: + Heap* const heap_; +}; + } // namespace internal } // namespace v8 diff --git a/deps/v8/src/heap/incremental-marking-inl.h b/deps/v8/src/heap/incremental-marking-inl.h index 797026352d6732..cc413f2b82c385 100644 --- a/deps/v8/src/heap/incremental-marking-inl.h +++ b/deps/v8/src/heap/incremental-marking-inl.h @@ -8,6 +8,7 @@ #include "src/execution/isolate.h" #include "src/heap/heap-inl.h" #include "src/heap/incremental-marking.h" +#include "src/heap/marking-state-inl.h" namespace v8 { namespace internal { diff --git a/deps/v8/src/heap/incremental-marking.cc b/deps/v8/src/heap/incremental-marking.cc index 780bf08da7eba8..6f633f073365f0 100644 --- a/deps/v8/src/heap/incremental-marking.cc +++ b/deps/v8/src/heap/incremental-marking.cc @@ -58,9 +58,8 @@ IncrementalMarking::IncrementalMarking(Heap* heap, WeakObjects* weak_objects) incremental_marking_job_(heap), new_generation_observer_(this, kYoungGenerationAllocatedThreshold), old_generation_observer_(this, kOldGenerationAllocatedThreshold), - marking_state_(heap->isolate()), - atomic_marking_state_(heap->isolate()), - non_atomic_marking_state_(heap->isolate()) {} + marking_state_(heap->marking_state()), + atomic_marking_state_(heap->atomic_marking_state()) {} void IncrementalMarking::MarkBlackAndVisitObjectDueToLayoutChange( HeapObject obj) { @@ -131,7 +130,7 @@ bool IncrementalMarking::IsBelowActivationThresholds() const { void IncrementalMarking::Start(GarbageCollector garbage_collector, GarbageCollectionReason gc_reason) { - DCHECK(!major_collector_->sweeping_in_progress()); + DCHECK(!heap_->sweeping_in_progress()); DCHECK(!heap_->IsShared()); if (v8_flags.trace_incremental_marking) { @@ -160,15 +159,22 @@ void IncrementalMarking::Start(GarbageCollector garbage_collector, Counters* counters = heap_->isolate()->counters(); - counters->incremental_marking_reason()->AddSample( - static_cast<int>(gc_reason)); + const bool is_major = garbage_collector == GarbageCollector::MARK_COMPACTOR; + if (is_major) { + // Reasons are only reported for major GCs + counters->incremental_marking_reason()->AddSample( + static_cast<int>(gc_reason)); + } NestedTimedHistogramScope incremental_marking_scope( - counters->gc_incremental_marking_start()); - TRACE_EVENT1( - "v8", "V8.GCIncrementalMarkingStart", "epoch", - heap_->tracer()->CurrentEpoch(GCTracer::Scope::MC_INCREMENTAL_START)); - TRACE_GC_EPOCH(heap()->tracer(), GCTracer::Scope::MC_INCREMENTAL_START, - ThreadKind::kMain); + is_major ? counters->gc_incremental_marking_start() + : counters->gc_minor_incremental_marking_start()); + const auto scope_id = is_major ? GCTracer::Scope::MC_INCREMENTAL_START + : GCTracer::Scope::MINOR_MC_INCREMENTAL_START; + TRACE_EVENT1("v8", + is_major ? "V8.GCIncrementalMarkingStart" + : "V8.GCMinorIncrementalMarkingStart", + "epoch", heap_->tracer()->CurrentEpoch(scope_id)); + TRACE_GC_EPOCH(heap()->tracer(), scope_id, ThreadKind::kMain); heap_->tracer()->NotifyIncrementalMarkingStart(); start_time_ms_ = heap()->MonotonicallyIncreasingTimeInMs(); @@ -181,7 +187,7 @@ void IncrementalMarking::Start(GarbageCollector garbage_collector, schedule_update_time_ms_ = start_time_ms_; bytes_marked_concurrently_ = 0; - if (garbage_collector == GarbageCollector::MARK_COMPACTOR) { + if (is_major) { current_collector_ = CurrentCollector::kMajorMC; StartMarkingMajor(); heap_->AddAllocationObserversToAllSpaces(&old_generation_observer_, @@ -266,7 +272,20 @@ void IncrementalMarking::MarkRoots() { heap()->isolate()->global_handles()->IterateYoungStrongAndDependentRoots( &visitor); - // TODO(v8:13012): Do PageMarkingItem processing. + + std::vector<PageMarkingItem> marking_items; + RememberedSet<OLD_TO_NEW>::IterateMemoryChunks( + heap_, [&marking_items](MemoryChunk* chunk) { + marking_items.emplace_back(chunk); + }); + + V8::GetCurrentPlatform() + ->CreateJob( + v8::TaskPriority::kUserBlocking, + std::make_unique<YoungGenerationMarkingJob>( + heap_->isolate(), heap_, minor_collector_->marking_worklists(), + std::move(marking_items), YoungMarkingJobType::kIncremental)) + ->Join(); } } @@ -389,6 +408,13 @@ void IncrementalMarking::StartBlackAllocation() { "Marking Code objects requires write access to the Code page header"); heap()->code_space()->MarkLinearAllocationAreaBlack(); } + if (heap()->isolate()->is_shared_heap_isolate()) { + DCHECK_EQ(heap()->shared_space()->top(), kNullAddress); + heap()->isolate()->global_safepoint()->IterateClientIsolates( + [](Isolate* client) { + client->heap()->MarkSharedLinearAllocationAreasBlack(); + }); + } heap()->safepoint()->IterateLocalHeaps([](LocalHeap* local_heap) { local_heap->MarkLinearAllocationAreaBlack(); }); @@ -407,6 +433,13 @@ void IncrementalMarking::PauseBlackAllocation() { "Marking Code objects requires write access to the Code page header"); heap()->code_space()->UnmarkLinearAllocationArea(); } + if (heap()->isolate()->is_shared_heap_isolate()) { + DCHECK_EQ(heap()->shared_space()->top(), kNullAddress); + heap()->isolate()->global_safepoint()->IterateClientIsolates( + [](Isolate* client) { + client->heap()->UnmarkSharedLinearAllocationAreas(); + }); + } heap()->safepoint()->IterateLocalHeaps( [](LocalHeap* local_heap) { local_heap->UnmarkLinearAllocationArea(); }); if (v8_flags.trace_incremental_marking) { @@ -433,14 +466,13 @@ void IncrementalMarking::UpdateMarkingWorklistAfterYoungGenGC() { Map filler_map = ReadOnlyRoots(heap_).one_pointer_filler_map(); - MarkingState* minor_marking_state = - heap()->minor_mark_compact_collector()->marking_state(); + MarkingState* marking_state = heap()->marking_state(); major_collector_->local_marking_worklists()->Publish(); MarkingBarrier::PublishAll(heap()); PtrComprCageBase cage_base(heap_->isolate()); - major_collector_->marking_worklists()->Update([this, minor_marking_state, - cage_base, filler_map]( + major_collector_->marking_worklists()->Update([this, marking_state, cage_base, + filler_map]( HeapObject obj, HeapObject* out) -> bool { DCHECK(obj.IsHeapObject()); @@ -458,7 +490,7 @@ void IncrementalMarking::UpdateMarkingWorklistAfterYoungGenGC() { } HeapObject dest = map_word.ToForwardingAddress(); USE(this); - DCHECK_IMPLIES(marking_state()->IsWhite(obj), obj.IsFreeSpaceOrFiller()); + DCHECK_IMPLIES(marking_state->IsWhite(obj), obj.IsFreeSpaceOrFiller()); if (dest.InSharedHeap()) { // Object got promoted into the shared heap. Drop it from the client // heap marking worklist. @@ -476,7 +508,7 @@ void IncrementalMarking::UpdateMarkingWorklistAfterYoungGenGC() { DCHECK_IMPLIES( v8_flags.minor_mc, !obj.map_word(cage_base, kRelaxedLoad).IsForwardingAddress()); - if (minor_marking_state->IsWhite(obj)) { + if (marking_state->IsWhite(obj)) { return false; } // Either a large object or an object marked by the minor @@ -488,13 +520,13 @@ void IncrementalMarking::UpdateMarkingWorklistAfterYoungGenGC() { // Only applicable during minor MC garbage collections. if (!Heap::IsLargeObject(obj) && Page::FromHeapObject(obj)->IsFlagSet(Page::PAGE_NEW_OLD_PROMOTION)) { - if (minor_marking_state->IsWhite(obj)) { + if (marking_state->IsWhite(obj)) { return false; } *out = obj; return true; } - DCHECK_IMPLIES(marking_state()->IsWhite(obj), + DCHECK_IMPLIES(marking_state->IsWhite(obj), obj.IsFreeSpaceOrFiller(cage_base)); // Skip one word filler objects that appear on the // stack when we perform in place array shift. @@ -730,6 +762,7 @@ void IncrementalMarking::AdvanceAndFinalizeIfComplete() { } void IncrementalMarking::AdvanceAndFinalizeIfNecessary() { + if (!IsMajorMarking()) return; DCHECK(!heap_->always_allocate()); AdvanceOnAllocation(); @@ -746,7 +779,7 @@ void IncrementalMarking::AdvanceForTesting(double max_step_size_in_ms) { void IncrementalMarking::AdvanceOnAllocation() { DCHECK_EQ(heap_->gc_state(), Heap::NOT_IN_GC); DCHECK(v8_flags.incremental_marking); - DCHECK(IsMarking()); + DCHECK(IsMajorMarking()); // Code using an AlwaysAllocateScope assumes that the GC state does not // change; that implies that no marking steps must be performed. diff --git a/deps/v8/src/heap/incremental-marking.h b/deps/v8/src/heap/incremental-marking.h index de99330123cc9f..a3fb20a0af1b83 100644 --- a/deps/v8/src/heap/incremental-marking.h +++ b/deps/v8/src/heap/incremental-marking.h @@ -86,12 +86,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking final { IncrementalMarking(Heap* heap, WeakObjects* weak_objects); - MarkingState* marking_state() { return &marking_state_; } - AtomicMarkingState* atomic_marking_state() { return &atomic_marking_state_; } - NonAtomicMarkingState* non_atomic_marking_state() { - return &non_atomic_marking_state_; - } - void NotifyLeftTrimming(HeapObject from, HeapObject to); bool IsStopped() const { return !IsMarking(); } @@ -169,6 +163,9 @@ class V8_EXPORT_PRIVATE IncrementalMarking final { } private: + MarkingState* marking_state() { return marking_state_; } + AtomicMarkingState* atomic_marking_state() { return atomic_marking_state_; } + class IncrementalMarkingRootMarkingVisitor; class Observer : public AllocationObserver { @@ -270,9 +267,8 @@ class V8_EXPORT_PRIVATE IncrementalMarking final { Observer new_generation_observer_; Observer old_generation_observer_; - MarkingState marking_state_; - AtomicMarkingState atomic_marking_state_; - NonAtomicMarkingState non_atomic_marking_state_; + MarkingState* const marking_state_; + AtomicMarkingState* const atomic_marking_state_; base::Mutex background_live_bytes_mutex_; std::unordered_map<MemoryChunk*, intptr_t> background_live_bytes_; diff --git a/deps/v8/src/heap/invalidated-slots-inl.h b/deps/v8/src/heap/invalidated-slots-inl.h index 5c776ff3fb3df6..3ab25a0c7ef21f 100644 --- a/deps/v8/src/heap/invalidated-slots-inl.h +++ b/deps/v8/src/heap/invalidated-slots-inl.h @@ -7,6 +7,7 @@ #include "src/base/logging.h" #include "src/heap/invalidated-slots.h" +#include "src/heap/marking-state-inl.h" #include "src/heap/spaces.h" #include "src/objects/heap-object.h" #include "src/objects/objects-inl.h" diff --git a/deps/v8/src/heap/invalidated-slots.cc b/deps/v8/src/heap/invalidated-slots.cc index 696b7f318aa8d5..d17394cc2c9725 100644 --- a/deps/v8/src/heap/invalidated-slots.cc +++ b/deps/v8/src/heap/invalidated-slots.cc @@ -34,7 +34,10 @@ InvalidatedSlotsFilter InvalidatedSlotsFilter::OldToShared( InvalidatedSlotsFilter::InvalidatedSlotsFilter( MemoryChunk* chunk, InvalidatedSlots* invalidated_slots, - RememberedSetType remembered_set_type, LivenessCheck liveness_check) { + RememberedSetType remembered_set_type, LivenessCheck liveness_check) + : marking_state_(liveness_check == LivenessCheck::kYes + ? chunk->heap()->non_atomic_marking_state() + : nullptr) { USE(remembered_set_type); invalidated_slots = invalidated_slots ? invalidated_slots : &empty_; @@ -42,14 +45,6 @@ InvalidatedSlotsFilter::InvalidatedSlotsFilter( iterator_end_ = invalidated_slots->end(); sentinel_ = chunk->area_end(); - if (liveness_check == LivenessCheck::kYes) { - marking_state_ = - chunk->heap()->mark_compact_collector()->non_atomic_marking_state(); - } else { - DCHECK_EQ(LivenessCheck::kNo, liveness_check); - marking_state_ = nullptr; - } - // Invoke NextInvalidatedObject twice, to initialize // invalidated_start_ to the first invalidated object and // next_invalidated_object_ to the second one. diff --git a/deps/v8/src/heap/invalidated-slots.h b/deps/v8/src/heap/invalidated-slots.h index cace4202c6ddb5..1215664575bcad 100644 --- a/deps/v8/src/heap/invalidated-slots.h +++ b/deps/v8/src/heap/invalidated-slots.h @@ -64,7 +64,7 @@ class V8_EXPORT_PRIVATE InvalidatedSlotsFilter { Address sentinel_; InvalidatedObjectInfo current_{kNullAddress, 0, false}; InvalidatedObjectInfo next_{kNullAddress, 0, false}; - NonAtomicMarkingState* marking_state_; + NonAtomicMarkingState* const marking_state_; InvalidatedSlots empty_; #ifdef DEBUG Address last_slot_; diff --git a/deps/v8/src/heap/large-spaces.cc b/deps/v8/src/heap/large-spaces.cc index 74c621e81f1066..20697a2fd466bd 100644 --- a/deps/v8/src/heap/large-spaces.cc +++ b/deps/v8/src/heap/large-spaces.cc @@ -11,6 +11,7 @@ #include "src/heap/combined-heap.h" #include "src/heap/incremental-marking.h" #include "src/heap/list.h" +#include "src/heap/marking-state-inl.h" #include "src/heap/marking.h" #include "src/heap/memory-allocator.h" #include "src/heap/memory-chunk-inl.h" @@ -132,6 +133,7 @@ AllocationResult OldLargeObjectSpace::AllocateRaw(int object_size) { AllocationResult OldLargeObjectSpace::AllocateRaw(int object_size, Executability executable) { + object_size = ALIGN_TO_ALLOCATION_ALIGNMENT(object_size); DCHECK(!v8_flags.enable_third_party_heap); // Check if we want to force a GC before growing the old space further. // If so, fail the allocation. @@ -150,11 +152,10 @@ AllocationResult OldLargeObjectSpace::AllocateRaw(int object_size, heap()->GCFlagsForIncrementalMarking(), kGCCallbackScheduleIdleGarbageCollection); if (heap()->incremental_marking()->black_allocation()) { - heap()->incremental_marking()->marking_state()->WhiteToBlack(object); + heap()->marking_state()->WhiteToBlack(object); } - DCHECK_IMPLIES( - heap()->incremental_marking()->black_allocation(), - heap()->incremental_marking()->marking_state()->IsBlack(object)); + DCHECK_IMPLIES(heap()->incremental_marking()->black_allocation(), + heap()->marking_state()->IsBlack(object)); page->InitializationMemoryFence(); heap()->NotifyOldGenerationExpansion(identity(), page); AdvanceAndInvokeAllocationObservers(object.address(), @@ -169,6 +170,7 @@ AllocationResult OldLargeObjectSpace::AllocateRawBackground( AllocationResult OldLargeObjectSpace::AllocateRawBackground( LocalHeap* local_heap, int object_size, Executability executable) { + object_size = ALIGN_TO_ALLOCATION_ALIGNMENT(object_size); DCHECK(!v8_flags.enable_third_party_heap); // Check if we want to force a GC before growing the old space further. // If so, fail the allocation. @@ -183,11 +185,10 @@ AllocationResult OldLargeObjectSpace::AllocateRawBackground( HeapObject object = page->GetObject(); heap()->StartIncrementalMarkingIfAllocationLimitIsReachedBackground(); if (heap()->incremental_marking()->black_allocation()) { - heap()->incremental_marking()->marking_state()->WhiteToBlack(object); + heap()->marking_state()->WhiteToBlack(object); } - DCHECK_IMPLIES( - heap()->incremental_marking()->black_allocation(), - heap()->incremental_marking()->marking_state()->IsBlack(object)); + DCHECK_IMPLIES(heap()->incremental_marking()->black_allocation(), + heap()->marking_state()->IsBlack(object)); page->InitializationMemoryFence(); if (identity() == CODE_LO_SPACE) { heap()->isolate()->AddCodeMemoryChunk(page); @@ -478,6 +479,7 @@ NewLargeObjectSpace::NewLargeObjectSpace(Heap* heap, size_t capacity) capacity_(capacity) {} AllocationResult NewLargeObjectSpace::AllocateRaw(int object_size) { + object_size = ALIGN_TO_ALLOCATION_ALIGNMENT(object_size); DCHECK(!v8_flags.enable_third_party_heap); // Do not allocate more objects if promoting the existing object would exceed // the old generation capacity. @@ -501,10 +503,7 @@ AllocationResult NewLargeObjectSpace::AllocateRaw(int object_size) { page->SetFlag(MemoryChunk::TO_PAGE); UpdatePendingObject(result); if (v8_flags.minor_mc) { - heap() - ->minor_mark_compact_collector() - ->non_atomic_marking_state() - ->ClearLiveness(page); + heap()->non_atomic_marking_state()->ClearLiveness(page); } page->InitializationMemoryFence(); DCHECK(page->IsLargePage()); @@ -582,5 +581,15 @@ void CodeLargeObjectSpace::RemovePage(LargePage* page) { OldLargeObjectSpace::RemovePage(page); } +SharedLargeObjectSpace::SharedLargeObjectSpace(Heap* heap) + : OldLargeObjectSpace(heap, SHARED_LO_SPACE) {} + +AllocationResult SharedLargeObjectSpace::AllocateRawBackground( + LocalHeap* local_heap, int object_size) { + DCHECK(!v8_flags.enable_third_party_heap); + return OldLargeObjectSpace::AllocateRawBackground(local_heap, object_size, + NOT_EXECUTABLE); +} + } // namespace internal } // namespace v8 diff --git a/deps/v8/src/heap/large-spaces.h b/deps/v8/src/heap/large-spaces.h index 70c55833e18ded..576c672fff49c0 100644 --- a/deps/v8/src/heap/large-spaces.h +++ b/deps/v8/src/heap/large-spaces.h @@ -190,6 +190,14 @@ class OldLargeObjectSpace : public LargeObjectSpace { LocalHeap* local_heap, int object_size, Executability executable); }; +class SharedLargeObjectSpace : public OldLargeObjectSpace { + public: + explicit SharedLargeObjectSpace(Heap* heap); + + V8_EXPORT_PRIVATE V8_WARN_UNUSED_RESULT AllocationResult + AllocateRawBackground(LocalHeap* local_heap, int object_size); +}; + class NewLargeObjectSpace : public LargeObjectSpace { public: NewLargeObjectSpace(Heap* heap, size_t capacity); diff --git a/deps/v8/src/heap/linear-allocation-area.h b/deps/v8/src/heap/linear-allocation-area.h index 2b9b3a9132d84b..873dd31f7f2ca6 100644 --- a/deps/v8/src/heap/linear-allocation-area.h +++ b/deps/v8/src/heap/linear-allocation-area.h @@ -98,7 +98,11 @@ class LinearAllocationArea final { #ifdef DEBUG SLOW_DCHECK(start_ <= top_); SLOW_DCHECK(top_ <= limit_); - SLOW_DCHECK(top_ == kNullAddress || (top_ & kHeapObjectTagMask) == 0); + if (V8_COMPRESS_POINTERS_8GB_BOOL) { + SLOW_DCHECK(IsAligned(top_, kObjectAlignment8GbHeap)); + } else { + SLOW_DCHECK(IsAligned(top_, kObjectAlignment)); + } #endif // DEBUG } diff --git a/deps/v8/src/heap/local-heap-inl.h b/deps/v8/src/heap/local-heap-inl.h index cb6210fd571f43..401b7a4903ba2a 100644 --- a/deps/v8/src/heap/local-heap-inl.h +++ b/deps/v8/src/heap/local-heap-inl.h @@ -64,7 +64,8 @@ AllocationResult LocalHeap::AllocateRaw(int size_in_bytes, AllocationType type, DCHECK_EQ(type, AllocationType::kSharedOld); if (large_object) { - return heap()->code_lo_space()->AllocateRawBackground(this, size_in_bytes); + return heap()->shared_lo_allocation_space()->AllocateRawBackground( + this, size_in_bytes); } else { return shared_old_space_allocator()->AllocateRaw(size_in_bytes, alignment, origin); @@ -74,6 +75,7 @@ AllocationResult LocalHeap::AllocateRaw(int size_in_bytes, AllocationType type, Address LocalHeap::AllocateRawOrFail(int object_size, AllocationType type, AllocationOrigin origin, AllocationAlignment alignment) { + object_size = ALIGN_TO_ALLOCATION_ALIGNMENT(object_size); DCHECK(!v8_flags.enable_third_party_heap); AllocationResult result = AllocateRaw(object_size, type, origin, alignment); HeapObject object; diff --git a/deps/v8/src/heap/local-heap.cc b/deps/v8/src/heap/local-heap.cc index 611537708bb6dc..1541683ee59d7b 100644 --- a/deps/v8/src/heap/local-heap.cc +++ b/deps/v8/src/heap/local-heap.cc @@ -85,6 +85,7 @@ LocalHeap::~LocalHeap() { heap_->safepoint()->RemoveLocalHeap(this, [this] { FreeLinearAllocationArea(); + FreeSharedLinearAllocationArea(); if (!is_main_thread()) { CodePageHeaderModificationScope rwx_write_scope( @@ -120,9 +121,9 @@ void LocalHeap::SetUp() { std::make_unique<ConcurrentAllocator>(this, heap_->code_space()); DCHECK_NULL(shared_old_space_allocator_); - if (heap_->isolate()->shared_isolate()) { - shared_old_space_allocator_ = - std::make_unique<ConcurrentAllocator>(this, heap_->shared_old_space()); + if (heap_->isolate()->has_shared_heap()) { + shared_old_space_allocator_ = std::make_unique<ConcurrentAllocator>( + this, heap_->shared_allocation_space()); } DCHECK_NULL(marking_barrier_); @@ -347,7 +348,9 @@ void LocalHeap::FreeLinearAllocationArea() { } void LocalHeap::FreeSharedLinearAllocationArea() { - shared_old_space_allocator_->FreeLinearAllocationArea(); + if (shared_old_space_allocator_) { + shared_old_space_allocator_->FreeLinearAllocationArea(); + } } void LocalHeap::MakeLinearAllocationAreaIterable() { @@ -365,26 +368,15 @@ void LocalHeap::UnmarkLinearAllocationArea() { code_space_allocator_->UnmarkLinearAllocationArea(); } -bool LocalHeap::TryPerformCollection() { - if (is_main_thread()) { - heap_->CollectGarbageForBackground(this); - return true; - } else { - DCHECK(IsRunning()); - if (!heap_->collection_barrier_->TryRequestGC()) return false; - - LocalHeap* main_thread = heap_->main_thread_local_heap(); - - const ThreadState old_state = main_thread->state_.SetCollectionRequested(); +void LocalHeap::MarkSharedLinearAllocationAreaBlack() { + if (shared_old_space_allocator_) { + shared_old_space_allocator_->MarkLinearAllocationAreaBlack(); + } +} - if (old_state.IsRunning()) { - const bool performed_gc = - heap_->collection_barrier_->AwaitCollectionBackground(this); - return performed_gc; - } else { - DCHECK(old_state.IsParked()); - return false; - } +void LocalHeap::UnmarkSharedLinearAllocationArea() { + if (shared_old_space_allocator_) { + shared_old_space_allocator_->UnmarkLinearAllocationArea(); } } @@ -395,21 +387,34 @@ Address LocalHeap::PerformCollectionAndAllocateAgain( CHECK(!main_thread_parked_); allocation_failed_ = true; static const int kMaxNumberOfRetries = 3; + int failed_allocations = 0; + int parked_allocations = 0; for (int i = 0; i < kMaxNumberOfRetries; i++) { - if (!TryPerformCollection()) { + if (!heap_->CollectGarbageFromAnyThread(this)) { main_thread_parked_ = true; + parked_allocations++; } AllocationResult result = AllocateRaw(object_size, type, origin, alignment); - if (!result.IsFailure()) { + if (result.IsFailure()) { + failed_allocations++; + } else { allocation_failed_ = false; main_thread_parked_ = false; return result.ToObjectChecked().address(); } } + if (v8_flags.trace_gc) { + heap_->isolate()->PrintWithTimestamp( + "Background allocation failure: " + "allocations=%d" + "allocations.parked=%d", + failed_allocations, parked_allocations); + } + heap_->FatalProcessOutOfMemory("LocalHeap: allocation failed"); } @@ -433,9 +438,11 @@ void LocalHeap::InvokeGCEpilogueCallbacksInSafepoint(GCType gc_type, void LocalHeap::NotifyObjectSizeChange( HeapObject object, int old_size, int new_size, - ClearRecordedSlots clear_recorded_slots) { + ClearRecordedSlots clear_recorded_slots, + UpdateInvalidatedObjectSize update_invalidated_object_size) { heap()->NotifyObjectSizeChange(object, old_size, new_size, - clear_recorded_slots); + clear_recorded_slots, + update_invalidated_object_size); } } // namespace internal diff --git a/deps/v8/src/heap/local-heap.h b/deps/v8/src/heap/local-heap.h index a4f0e49b0792fb..4e6437669a69a2 100644 --- a/deps/v8/src/heap/local-heap.h +++ b/deps/v8/src/heap/local-heap.h @@ -115,6 +115,11 @@ class V8_EXPORT_PRIVATE LocalHeap { void MarkLinearAllocationAreaBlack(); void UnmarkLinearAllocationArea(); + // Mark/Unmark linear allocation areas in shared heap black. Used for black + // allocation. + void MarkSharedLinearAllocationAreaBlack(); + void UnmarkSharedLinearAllocationArea(); + // Give up linear allocation areas. Used for mark-compact GC. void FreeLinearAllocationArea(); @@ -149,8 +154,11 @@ class V8_EXPORT_PRIVATE LocalHeap { AllocationOrigin origin = AllocationOrigin::kRuntime, AllocationAlignment alignment = kTaggedAligned); - void NotifyObjectSizeChange(HeapObject object, int old_size, int new_size, - ClearRecordedSlots clear_recorded_slots); + void NotifyObjectSizeChange( + HeapObject object, int old_size, int new_size, + ClearRecordedSlots clear_recorded_slots, + UpdateInvalidatedObjectSize update_invalidated_object_size = + UpdateInvalidatedObjectSize::kYes); bool is_main_thread() const { return is_main_thread_; } bool deserialization_complete() const { @@ -158,9 +166,6 @@ class V8_EXPORT_PRIVATE LocalHeap { } ReadOnlySpace* read_only_space() { return heap_->read_only_space(); } - // Requests GC and blocks until the collection finishes. - bool TryPerformCollection(); - // Adds a callback that is invoked with the given |data| after each GC. // The callback is invoked on the main thread before any background thread // resumes. The callback must not allocate or make any other calls that diff --git a/deps/v8/src/heap/mark-compact-inl.h b/deps/v8/src/heap/mark-compact-inl.h index 2ce0abfe9a4453..1a14b388a0b662 100644 --- a/deps/v8/src/heap/mark-compact-inl.h +++ b/deps/v8/src/heap/mark-compact-inl.h @@ -9,7 +9,9 @@ #include "src/codegen/assembler-inl.h" #include "src/heap/heap-inl.h" #include "src/heap/incremental-marking.h" +#include "src/heap/index-generator.h" #include "src/heap/mark-compact.h" +#include "src/heap/marking-state-inl.h" #include "src/heap/marking-worklist-inl.h" #include "src/heap/marking-worklist.h" #include "src/heap/objects-visiting-inl.h" @@ -42,7 +44,7 @@ void MarkCompactCollector::MarkRootObject(Root root, HeapObject obj) { void MinorMarkCompactCollector::MarkRootObject(HeapObject obj) { if (Heap::InYoungGeneration(obj) && - non_atomic_marking_state_.WhiteToGrey(obj)) { + non_atomic_marking_state()->WhiteToGrey(obj)) { local_marking_worklists_->Push(obj); } } @@ -88,6 +90,16 @@ void MarkCompactCollector::AddTransitionArray(TransitionArray array) { local_weak_objects()->transition_arrays_local.Push(array); } +bool MarkCompactCollector::ShouldMarkObject(HeapObject object) const { + if (V8_LIKELY(!uses_shared_heap_)) return true; + if (v8_flags.shared_space) { + if (is_shared_heap_isolate_) return true; + return !object.InSharedHeap(); + } else { + return is_shared_heap_isolate_ == object.InSharedHeap(); + } +} + template <typename MarkingState> template <typename T, typename TBodyDescriptor> int MainMarkingVisitor<MarkingState>::VisitJSObjectSubclass(Map map, T object) { @@ -199,8 +211,9 @@ void LiveObjectRange<mode>::iterator::AdvanceToNextValidObject() { // Map might be forwarded during GC. DCHECK(MarkCompactCollector::IsMapOrForwarded(map)); size = black_object.SizeFromMap(map); - CHECK_LE(addr + size, chunk_->area_end()); - Address end = addr + size - kTaggedSize; + int aligned_size = ALIGN_TO_ALLOCATION_ALIGNMENT(size); + CHECK_LE(addr + aligned_size, chunk_->area_end()); + Address end = addr + aligned_size - kTaggedSize; // One word filler objects do not borrow the second mark bit. We have // to jump over the advancing and clearing part. // Note that we know that we are at a one word filler when @@ -231,7 +244,8 @@ void LiveObjectRange<mode>::iterator::AdvanceToNextValidObject() { map = Map::cast(map_object); DCHECK(map.IsMap(cage_base)); size = object.SizeFromMap(map); - CHECK_LE(addr + size, chunk_->area_end()); + CHECK_LE(addr + ALIGN_TO_ALLOCATION_ALIGNMENT(size), + chunk_->area_end()); } // We found a live object. @@ -281,6 +295,64 @@ typename LiveObjectRange<mode>::iterator LiveObjectRange<mode>::end() { Isolate* CollectorBase::isolate() { return heap()->isolate(); } +class YoungGenerationMarkingTask; + +class PageMarkingItem : public ParallelWorkItem { + public: + explicit PageMarkingItem(MemoryChunk* chunk) : chunk_(chunk) {} + ~PageMarkingItem() = default; + + void Process(YoungGenerationMarkingTask* task); + + private: + inline Heap* heap() { return chunk_->heap(); } + + void MarkUntypedPointers(YoungGenerationMarkingTask* task); + + void MarkTypedPointers(YoungGenerationMarkingTask* task); + + template <typename TSlot> + V8_INLINE SlotCallbackResult + CheckAndMarkObject(YoungGenerationMarkingTask* task, TSlot slot); + + MemoryChunk* chunk_; +}; + +enum class YoungMarkingJobType { kAtomic, kIncremental }; + +class YoungGenerationMarkingJob : public v8::JobTask { + public: + YoungGenerationMarkingJob(Isolate* isolate, Heap* heap, + MarkingWorklists* global_worklists, + std::vector<PageMarkingItem> marking_items, + YoungMarkingJobType young_marking_job_type) + : isolate_(isolate), + heap_(heap), + global_worklists_(global_worklists), + marking_items_(std::move(marking_items)), + remaining_marking_items_(marking_items_.size()), + generator_(marking_items_.size()), + young_marking_job_type_(young_marking_job_type) {} + + void Run(JobDelegate* delegate) override; + size_t GetMaxConcurrency(size_t worker_count) const override; + bool incremental() const { + return young_marking_job_type_ == YoungMarkingJobType::kIncremental; + } + + private: + void ProcessItems(JobDelegate* delegate); + void ProcessMarkingItems(YoungGenerationMarkingTask* task); + + Isolate* isolate_; + Heap* heap_; + MarkingWorklists* global_worklists_; + std::vector<PageMarkingItem> marking_items_; + std::atomic_size_t remaining_marking_items_{0}; + IndexGenerator generator_; + YoungMarkingJobType young_marking_job_type_; +}; + } // namespace internal } // namespace v8 diff --git a/deps/v8/src/heap/mark-compact.cc b/deps/v8/src/heap/mark-compact.cc index 5fd704b6229d38..180aabb5f5e5d6 100644 --- a/deps/v8/src/heap/mark-compact.cc +++ b/deps/v8/src/heap/mark-compact.cc @@ -19,12 +19,14 @@ #include "src/execution/isolate-utils-inl.h" #include "src/execution/isolate-utils.h" #include "src/execution/vm-state-inl.h" +#include "src/flags/flags.h" #include "src/handles/global-handles.h" #include "src/heap/array-buffer-sweeper.h" #include "src/heap/basic-memory-chunk.h" #include "src/heap/code-object-registry.h" #include "src/heap/concurrent-allocator.h" #include "src/heap/evacuation-allocator-inl.h" +#include "src/heap/evacuation-verifier-inl.h" #include "src/heap/gc-tracer-inl.h" #include "src/heap/gc-tracer.h" #include "src/heap/global-handle-marking-visitor.h" @@ -36,6 +38,7 @@ #include "src/heap/large-spaces.h" #include "src/heap/mark-compact-inl.h" #include "src/heap/marking-barrier.h" +#include "src/heap/marking-state-inl.h" #include "src/heap/marking-visitor-inl.h" #include "src/heap/marking-visitor.h" #include "src/heap/memory-chunk-layout.h" @@ -46,6 +49,8 @@ #include "src/heap/object-stats.h" #include "src/heap/objects-visiting-inl.h" #include "src/heap/parallel-work-item.h" +#include "src/heap/pretenuring-handler-inl.h" +#include "src/heap/pretenuring-handler.h" #include "src/heap/read-only-heap.h" #include "src/heap/read-only-spaces.h" #include "src/heap/remembered-set.h" @@ -218,8 +223,7 @@ class FullMarkingVerifier : public MarkingVerifier { public: explicit FullMarkingVerifier(Heap* heap) : MarkingVerifier(heap), - marking_state_( - heap->mark_compact_collector()->non_atomic_marking_state()) {} + marking_state_(heap->non_atomic_marking_state()) {} void Run() override { VerifyRoots(); @@ -227,9 +231,11 @@ class FullMarkingVerifier : public MarkingVerifier { VerifyMarking(heap_->new_lo_space()); VerifyMarking(heap_->old_space()); VerifyMarking(heap_->code_space()); + if (heap_->shared_space()) VerifyMarking(heap_->shared_space()); if (heap_->map_space()) VerifyMarking(heap_->map_space()); VerifyMarking(heap_->lo_space()); VerifyMarking(heap_->code_lo_space()); + if (heap_->shared_lo_space()) VerifyMarking(heap_->shared_lo_space()); } protected: @@ -281,9 +287,7 @@ class FullMarkingVerifier : public MarkingVerifier { private: V8_INLINE void VerifyHeapObjectImpl(HeapObject heap_object) { - if (heap_->IsShared() != - BasicMemoryChunk::FromHeapObject(heap_object)->InSharedHeap()) - return; + if (!ShouldVerifyObject(heap_object)) return; if (heap_->MustBeInSharedOldSpace(heap_object)) { CHECK(heap_->SharedHeapContains(heap_object)); @@ -292,6 +296,12 @@ class FullMarkingVerifier : public MarkingVerifier { CHECK(marking_state_->IsBlack(heap_object)); } + V8_INLINE bool ShouldVerifyObject(HeapObject heap_object) { + const bool in_shared_heap = heap_object.InSharedWritableHeap(); + return heap_->isolate()->is_shared_heap_isolate() ? in_shared_heap + : !in_shared_heap; + } + template <typename TSlot> V8_INLINE void VerifyPointersImpl(TSlot start, TSlot end) { for (TSlot slot = start; slot < end; ++slot) { @@ -303,170 +313,15 @@ class FullMarkingVerifier : public MarkingVerifier { } } - NonAtomicMarkingState* marking_state_; -}; - -class EvacuationVerifier : public ObjectVisitorWithCageBases, - public RootVisitor { - public: - virtual void Run() = 0; - - void VisitPointers(HeapObject host, ObjectSlot start, - ObjectSlot end) override { - VerifyPointers(start, end); - } - - void VisitPointers(HeapObject host, MaybeObjectSlot start, - MaybeObjectSlot end) override { - VerifyPointers(start, end); - } - - void VisitCodePointer(HeapObject host, CodeObjectSlot slot) override { - CHECK(V8_EXTERNAL_CODE_SPACE_BOOL); - VerifyCodePointer(slot); - } - - void VisitRootPointers(Root root, const char* description, - FullObjectSlot start, FullObjectSlot end) override { - VerifyRootPointers(start, end); - } - - void VisitMapPointer(HeapObject object) override { - VerifyMap(object.map(cage_base())); - } - - protected: - explicit EvacuationVerifier(Heap* heap) - : ObjectVisitorWithCageBases(heap), heap_(heap) {} - - inline Heap* heap() { return heap_; } - - virtual void VerifyMap(Map map) = 0; - virtual void VerifyPointers(ObjectSlot start, ObjectSlot end) = 0; - virtual void VerifyPointers(MaybeObjectSlot start, MaybeObjectSlot end) = 0; - virtual void VerifyCodePointer(CodeObjectSlot slot) = 0; - virtual void VerifyRootPointers(FullObjectSlot start, FullObjectSlot end) = 0; - - void VerifyRoots(); - void VerifyEvacuationOnPage(Address start, Address end); - void VerifyEvacuation(NewSpace* new_space); - void VerifyEvacuation(PagedSpaceBase* paged_space); - - Heap* heap_; -}; - -void EvacuationVerifier::VerifyRoots() { - heap_->IterateRootsIncludingClients(this, - base::EnumSet<SkipRoot>{SkipRoot::kWeak}); -} - -void EvacuationVerifier::VerifyEvacuationOnPage(Address start, Address end) { - Address current = start; - while (current < end) { - HeapObject object = HeapObject::FromAddress(current); - if (!object.IsFreeSpaceOrFiller(cage_base())) { - object.Iterate(cage_base(), this); - } - current += object.Size(cage_base()); - } -} - -void EvacuationVerifier::VerifyEvacuation(NewSpace* space) { - if (!space) return; - if (v8_flags.minor_mc) { - VerifyEvacuation(PagedNewSpace::From(space)->paged_space()); - return; - } - PageRange range(space->first_allocatable_address(), space->top()); - for (auto it = range.begin(); it != range.end();) { - Page* page = *(it++); - Address current = page->area_start(); - Address limit = it != range.end() ? page->area_end() : space->top(); - CHECK(limit == space->top() || !page->Contains(space->top())); - VerifyEvacuationOnPage(current, limit); - } -} - -void EvacuationVerifier::VerifyEvacuation(PagedSpaceBase* space) { - for (Page* p : *space) { - if (p->IsEvacuationCandidate()) continue; - if (p->Contains(space->top())) { - CodePageMemoryModificationScope memory_modification_scope(p); - heap_->CreateFillerObjectAt( - space->top(), static_cast<int>(space->limit() - space->top())); - } - VerifyEvacuationOnPage(p->area_start(), p->area_end()); - } -} - -class FullEvacuationVerifier : public EvacuationVerifier { - public: - explicit FullEvacuationVerifier(Heap* heap) : EvacuationVerifier(heap) {} - - void Run() override { - DCHECK(!heap_->mark_compact_collector()->sweeping_in_progress()); - VerifyRoots(); - VerifyEvacuation(heap_->new_space()); - VerifyEvacuation(heap_->old_space()); - VerifyEvacuation(heap_->code_space()); - if (heap_->map_space()) VerifyEvacuation(heap_->map_space()); - } - - protected: - V8_INLINE void VerifyHeapObjectImpl(HeapObject heap_object) { - if (heap_->IsShared() != - BasicMemoryChunk::FromHeapObject(heap_object)->InSharedHeap()) - return; - - CHECK_IMPLIES(Heap::InYoungGeneration(heap_object), - Heap::InToPage(heap_object)); - CHECK(!MarkCompactCollector::IsOnEvacuationCandidate(heap_object)); - } - - template <typename TSlot> - void VerifyPointersImpl(TSlot start, TSlot end) { - for (TSlot current = start; current < end; ++current) { - typename TSlot::TObject object = current.load(cage_base()); - HeapObject heap_object; - if (object.GetHeapObjectIfStrong(&heap_object)) { - VerifyHeapObjectImpl(heap_object); - } - } - } - void VerifyMap(Map map) override { VerifyHeapObjectImpl(map); } - void VerifyPointers(ObjectSlot start, ObjectSlot end) override { - VerifyPointersImpl(start, end); - } - void VerifyPointers(MaybeObjectSlot start, MaybeObjectSlot end) override { - VerifyPointersImpl(start, end); - } - void VerifyCodePointer(CodeObjectSlot slot) override { - CHECK(V8_EXTERNAL_CODE_SPACE_BOOL); - Object maybe_code = slot.load(code_cage_base()); - HeapObject code; - // The slot might contain smi during CodeDataContainer creation, so skip it. - if (maybe_code.GetHeapObject(&code)) { - VerifyHeapObjectImpl(code); - } - } - void VisitCodeTarget(Code host, RelocInfo* rinfo) override { - Code target = Code::GetCodeFromTargetAddress(rinfo->target_address()); - VerifyHeapObjectImpl(target); - } - void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) override { - VerifyHeapObjectImpl(rinfo->target_object(cage_base())); - } - void VerifyRootPointers(FullObjectSlot start, FullObjectSlot end) override { - VerifyPointersImpl(start, end); - } + NonAtomicMarkingState* const marking_state_; }; } // namespace #endif // VERIFY_HEAP -// ============================================================================= -// MarkCompactCollectorBase, MinorMarkCompactCollector, MarkCompactCollector -// ============================================================================= +// ================================================================== +// CollectorBase, MinorMarkCompactCollector, MarkCompactCollector +// ================================================================== namespace { @@ -492,8 +347,8 @@ int NumberOfParallelCompactionTasks(Heap* heap) { CollectorBase::CollectorBase(Heap* heap, GarbageCollector collector) : heap_(heap), garbage_collector_(collector), - marking_state_(heap->isolate()), - non_atomic_marking_state_(heap->isolate()) { + marking_state_(heap_->marking_state()), + non_atomic_marking_state_(heap_->non_atomic_marking_state()) { DCHECK_NE(GarbageCollector::SCAVENGER, garbage_collector_); } @@ -501,16 +356,120 @@ bool CollectorBase::IsMajorMC() { return !heap_->IsYoungGenerationCollector(garbage_collector_); } +void CollectorBase::StartSweepSpace(PagedSpace* space) { + DCHECK_NE(NEW_SPACE, space->identity()); + space->ClearAllocatorState(); + + int will_be_swept = 0; + bool unused_page_present = false; + + Sweeper* sweeper = heap()->sweeper(); + + // Loop needs to support deletion if live bytes == 0 for a page. + for (auto it = space->begin(); it != space->end();) { + Page* p = *(it++); + DCHECK(p->SweepingDone()); + + if (p->IsEvacuationCandidate()) { + DCHECK_NE(NEW_SPACE, space->identity()); + // Will be processed in Evacuate. + continue; + } + + // One unused page is kept, all further are released before sweeping them. + if (non_atomic_marking_state()->live_bytes(p) == 0) { + if (unused_page_present) { + if (v8_flags.gc_verbose) { + PrintIsolate(isolate(), "sweeping: released page: %p", + static_cast<void*>(p)); + } + space->ReleasePage(p); + continue; + } + unused_page_present = true; + } + + sweeper->AddPage(space->identity(), p, Sweeper::REGULAR); + will_be_swept++; + } + + if (v8_flags.gc_verbose) { + PrintIsolate(isolate(), "sweeping: space=%s initialized_for_sweeping=%d", + space->name(), will_be_swept); + } +} + +void CollectorBase::StartSweepNewSpace() { + PagedSpaceForNewSpace* paged_space = heap()->paged_new_space()->paged_space(); + paged_space->ClearAllocatorState(); + + int will_be_swept = 0; + + if (heap()->ShouldReduceNewSpaceSize()) { + paged_space->StartShrinking(); + is_new_space_shrinking_ = true; + } + + Sweeper* sweeper = heap()->sweeper(); + + for (auto it = paged_space->begin(); it != paged_space->end();) { + Page* p = *(it++); + DCHECK(p->SweepingDone()); + + if (non_atomic_marking_state()->live_bytes(p) > 0) { + // Non-empty pages will be evacuated/promoted. + continue; + } + + if (is_new_space_shrinking_ && paged_space->ShouldReleasePage()) { + paged_space->ReleasePage(p); + } else { + sweeper->AddNewSpacePage(p); + } + will_be_swept++; + } + + if (v8_flags.gc_verbose) { + PrintIsolate(isolate(), "sweeping: space=%s initialized_for_sweeping=%d", + paged_space->name(), will_be_swept); + } +} + +void CollectorBase::SweepLargeSpace(LargeObjectSpace* space) { + auto* marking_state = heap()->non_atomic_marking_state(); + PtrComprCageBase cage_base(heap()->isolate()); + size_t surviving_object_size = 0; + for (auto it = space->begin(); it != space->end();) { + LargePage* current = *(it++); + HeapObject object = current->GetObject(); + DCHECK(!marking_state->IsGrey(object)); + if (!marking_state->IsBlack(object)) { + // Object is dead and page can be released. + space->RemovePage(current); + heap()->memory_allocator()->Free(MemoryAllocator::FreeMode::kConcurrently, + current); + + continue; + } + Marking::MarkWhite(non_atomic_marking_state()->MarkBitFrom(object)); + current->ProgressBar().ResetIfEnabled(); + non_atomic_marking_state()->SetLiveBytes(current, 0); + surviving_object_size += static_cast<size_t>(object.Size(cage_base)); + } + space->set_objects_size(surviving_object_size); +} + MarkCompactCollector::MarkCompactCollector(Heap* heap) : CollectorBase(heap, GarbageCollector::MARK_COMPACTOR), #ifdef DEBUG state_(IDLE), #endif - is_shared_heap_(heap->IsShared()), - sweeper_(new Sweeper(heap, non_atomic_marking_state())) { + uses_shared_heap_(isolate()->has_shared_heap() || isolate()->is_shared()), + is_shared_heap_isolate_(isolate()->is_shared_heap_isolate()), + sweeper_(heap_->sweeper()) { } -MarkCompactCollector::~MarkCompactCollector() { delete sweeper_; } +MarkCompactCollector::~MarkCompactCollector() = default; void MarkCompactCollector::SetUp() { DCHECK_EQ(0, strcmp(Marking::kWhiteBitPattern, "00")); @@ -529,7 +488,6 @@ void MarkCompactCollector::TearDown() { local_weak_objects()->Publish(); weak_objects()->Clear(); } - sweeper()->TearDown(); } // static @@ -585,6 +543,10 @@ bool MarkCompactCollector::StartCompaction(StartCompactionMode mode) { CollectEvacuationCandidates(heap()->map_space()); } + if (heap()->shared_space()) { + CollectEvacuationCandidates(heap()->shared_space()); + } + if (v8_flags.compact_code_space && (!heap()->IsGCWithStack() || v8_flags.compact_code_space_with_stack)) { CollectEvacuationCandidates(heap()->code_space()); @@ -702,74 +664,6 @@ void MarkCompactCollector::VerifyMarkbitsAreClean() { #endif // VERIFY_HEAP -void MarkCompactCollector::FinishSweepingIfOutOfWork() { - if (sweeper()->sweeping_in_progress() && v8_flags.concurrent_sweeping && - !sweeper()->AreSweeperTasksRunning()) { - // At this point we know that all concurrent sweeping tasks have run - // out of work and quit: all pages are swept. The main thread still needs - // to complete sweeping though. - EnsureSweepingCompleted(SweepingForcedFinalizationMode::kV8Only); - } - if (heap()->cpp_heap()) { - // Ensure that sweeping is also completed for the C++ managed heap, if one - // exists and it's out of work. - CppHeap::From(heap()->cpp_heap())->FinishSweepingIfOutOfWork(); - } -} - -void MarkCompactCollector::EnsureSweepingCompleted( - SweepingForcedFinalizationMode mode) { - if (sweeper()->sweeping_in_progress()) { - TRACE_GC_EPOCH(heap()->tracer(), GCTracer::Scope::MC_COMPLETE_SWEEPING, - ThreadKind::kMain); - - sweeper()->EnsureCompleted(); - heap()->old_space()->RefillFreeList(sweeper()); - { - CodePageHeaderModificationScope rwx_write_scope( - "Updating per-page stats stored in page headers requires write " - "access to Code page headers"); - heap()->code_space()->RefillFreeList(sweeper()); - } - if (heap()->map_space()) { - heap()->map_space()->RefillFreeList(sweeper()); - heap()->map_space()->SortFreeList(); - } - - heap()->tracer()->NotifySweepingCompleted(); - -#ifdef VERIFY_HEAP - if (v8_flags.verify_heap && !evacuation()) { - FullEvacuationVerifier verifier(heap()); - verifier.Run(); - } -#endif - } - - if (mode == SweepingForcedFinalizationMode::kUnifiedHeap && - heap()->cpp_heap()) { - // Ensure that sweeping is also completed for the C++ managed heap, if one - // exists. - CppHeap::From(heap()->cpp_heap())->FinishSweepingIfRunning(); - DCHECK( - !CppHeap::From(heap()->cpp_heap())->sweeper().IsSweepingInProgress()); - } - - DCHECK_IMPLIES(mode == SweepingForcedFinalizationMode::kUnifiedHeap || - !heap()->cpp_heap(), - !heap()->tracer()->IsSweepingInProgress()); -} - -void MarkCompactCollector::EnsurePageIsSwept(Page* page) { - sweeper()->EnsurePageIsSwept(page); -} - -void MarkCompactCollector::DrainSweepingWorklistForSpace( - AllocationSpace space) { - if (!sweeper()->sweeping_in_progress()) return; - sweeper()->DrainSweepingWorklistForSpace(space); -} - void MarkCompactCollector::ComputeEvacuationHeuristics( size_t area_size, int* target_fragmentation_percent, size_t* max_evacuated_bytes) { @@ -820,7 +714,7 @@ void MarkCompactCollector::ComputeEvacuationHeuristics( void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) { DCHECK(space->identity() == OLD_SPACE || space->identity() == CODE_SPACE || - space->identity() == MAP_SPACE); + space->identity() == MAP_SPACE || space->identity() == SHARED_SPACE); int number_of_pages = space->CountTotalPages(); size_t area_size = space->AreaSize(); @@ -854,7 +748,7 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) { CodePageHeaderModificationScope rwx_write_scope( "Modification of Code page header flags requires write access"); - DCHECK(!sweeping_in_progress()); + DCHECK(!sweeper()->sweeping_in_progress()); Page* owner_of_linear_allocation_area = space->top() == space->limit() ? nullptr @@ -1002,7 +896,7 @@ void MarkCompactCollector::Prepare() { state_ = PREPARE_GC; #endif - DCHECK(!sweeping_in_progress()); + DCHECK(!sweeper()->sweeping_in_progress()); // Unmapper tasks needs to be stopped during the GC, otherwise pages queued // for freeing might get unmapped during the GC. @@ -1031,22 +925,8 @@ void MarkCompactCollector::Prepare() { heap_->FreeLinearAllocationAreas(); - PagedSpaceIterator spaces(heap()); - for (PagedSpace* space = spaces.Next(); space != nullptr; - space = spaces.Next()) { - space->PrepareForMarkCompact(); - } - - // All objects are guaranteed to be initialized in atomic pause - if (heap()->new_lo_space()) { - heap()->new_lo_space()->ResetPendingObject(); - } - NewSpace* new_space = heap()->new_space(); if (new_space) { - if (v8_flags.minor_mc) { - PagedNewSpace::From(new_space)->paged_space()->PrepareForMarkCompact(); - } DCHECK_EQ(new_space->top(), new_space->original_top_acquire()); } } @@ -1081,6 +961,7 @@ void MarkCompactCollector::VerifyMarking() { heap()->old_space()->VerifyLiveBytes(); if (heap()->map_space()) heap()->map_space()->VerifyLiveBytes(); heap()->code_space()->VerifyLiveBytes(); + if (heap()->shared_space()) heap()->shared_space()->VerifyLiveBytes(); if (v8_flags.minor_mc && heap()->paged_new_space()) heap()->paged_new_space()->paged_space()->VerifyLiveBytes(); } @@ -1105,16 +986,34 @@ void ShrinkPagesToObjectSizes(Heap* heap, OldLargeObjectSpace* space) { } // namespace void MarkCompactCollector::Finish() { + { + TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_SWEEP); + if (heap()->new_lo_space()) { + TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_SWEEP_NEW_LO); + SweepLargeSpace(heap()->new_lo_space()); + } + + if (v8_flags.minor_mc && heap()->new_space()) { + // Keep new space sweeping atomic. + GCTracer::Scope sweep_scope(heap()->tracer(), + GCTracer::Scope::MC_SWEEP_FINISH_NEW, + ThreadKind::kMain); + sweeper()->ParallelSweepSpace(NEW_SPACE, + Sweeper::SweepingMode::kEagerDuringGC, 0); + heap()->paged_new_space()->paged_space()->RefillFreeList(); + } + +#ifdef DEBUG + heap()->VerifyCountersBeforeConcurrentSweeping(); +#endif + } + TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_FINISH); heap()->isolate()->global_handles()->ClearListOfYoungNodes(); SweepArrayBufferExtensions(); -#ifdef DEBUG - heap()->VerifyCountersBeforeConcurrentSweeping(); -#endif - marking_visitor_.reset(); local_marking_worklists_.reset(); marking_worklists_.ReleaseContextWorklists(); @@ -1126,23 +1025,6 @@ void MarkCompactCollector::Finish() { local_weak_objects_.reset(); weak_objects_.next_ephemerons.Clear(); - if (heap()->new_lo_space()) { - GCTracer::Scope sweep_scope(heap()->tracer(), - GCTracer::Scope::MC_FINISH_SWEEP_NEW_LO, - ThreadKind::kMain); - SweepLargeSpace(heap()->new_lo_space()); - } - - if (v8_flags.minor_mc && heap()->new_space()) { - // Keep new space sweeping atomic. - GCTracer::Scope sweep_scope(heap()->tracer(), - GCTracer::Scope::MC_FINISH_SWEEP_NEW, - ThreadKind::kMain); - sweeper()->ParallelSweepSpace(NEW_SPACE, - Sweeper::SweepingMode::kEagerDuringGC, 0); - heap()->paged_new_space()->paged_space()->RefillFreeList(sweeper()); - } - sweeper()->StartSweeperTasks(); // Ensure unmapper tasks are stopped such that queued pages aren't freed @@ -1175,7 +1057,7 @@ void MarkCompactCollector::SweepArrayBufferExtensions() { class MarkCompactCollector::RootMarkingVisitor final : public RootVisitor { public: explicit RootMarkingVisitor(MarkCompactCollector* collector) - : collector_(collector), is_shared_heap_(collector->is_shared_heap()) {} + : collector_(collector) {} void VisitRootPointer(Root root, const char* description, FullObjectSlot p) final { @@ -1231,14 +1113,11 @@ class MarkCompactCollector::RootMarkingVisitor final : public RootVisitor { Object object = *p; if (!object.IsHeapObject()) return; HeapObject heap_object = HeapObject::cast(object); - BasicMemoryChunk* target_page = - BasicMemoryChunk::FromHeapObject(heap_object); - if (is_shared_heap_ != target_page->InSharedHeap()) return; + if (!collector_->ShouldMarkObject(heap_object)) return; collector_->MarkRootObject(root, heap_object); } MarkCompactCollector* const collector_; - const bool is_shared_heap_; }; // This visitor is used to visit the body of special objects held alive by @@ -1300,12 +1179,7 @@ class MarkCompactCollector::CustomRootBodyMarkingVisitor final V8_INLINE void MarkObject(HeapObject host, Object object) { if (!object.IsHeapObject()) return; HeapObject heap_object = HeapObject::cast(object); - // We use this visitor both in client and shared GCs. The client GC should - // not mark objects in the shared heap. In shared GCs we are marking each - // client's top stack frame, so it is actually legal to encounter references - // into the client heap here in a shared GC. We need to bail out in these - // cases as well. - if (collector_->is_shared_heap() != heap_object.InSharedHeap()) return; + if (!collector_->ShouldMarkObject(heap_object)) return; collector_->MarkObject(host, heap_object); } @@ -1394,7 +1268,7 @@ class InternalizedStringTableCleaner final : public RootVisitor { OffHeapObjectSlot end) override { DCHECK_EQ(root, Root::kStringTable); // Visit all HeapObject pointers in [start, end). - auto* marking_state = heap_->mark_compact_collector()->marking_state(); + auto* marking_state = heap_->marking_state(); Isolate* isolate = heap_->isolate(); for (OffHeapObjectSlot p = start; p < end; ++p) { Object o = p.load(isolate); @@ -1424,8 +1298,7 @@ class ExternalStringTableCleaner : public RootVisitor { void VisitRootPointers(Root root, const char* description, FullObjectSlot start, FullObjectSlot end) override { // Visit all HeapObject pointers in [start, end). - NonAtomicMarkingState* marking_state = - heap_->mark_compact_collector()->non_atomic_marking_state(); + NonAtomicMarkingState* marking_state = heap_->non_atomic_marking_state(); Object the_hole = ReadOnlyRoots(heap_).the_hole_value(); for (FullObjectSlot p = start; p < end; ++p) { Object o = *p; @@ -1552,10 +1425,9 @@ class MarkCompactWeakObjectRetainer : public WeakObjectRetainer { class RecordMigratedSlotVisitor : public ObjectVisitorWithCageBases { public: explicit RecordMigratedSlotVisitor( - MarkCompactCollector* collector, - EphemeronRememberedSet* ephemeron_remembered_set) - : ObjectVisitorWithCageBases(collector->isolate()), - collector_(collector), + Heap* heap, EphemeronRememberedSet* ephemeron_remembered_set) + : ObjectVisitorWithCageBases(heap->isolate()), + heap_(heap), ephemeron_remembered_set_(ephemeron_remembered_set) {} inline void VisitPointer(HeapObject host, ObjectSlot p) final { @@ -1623,7 +1495,7 @@ class RecordMigratedSlotVisitor : public ObjectVisitorWithCageBases { // the old-to-new remembered set. DCHECK(!Heap::InYoungGeneration(target)); DCHECK(!target.InSharedWritableHeap()); - collector_->RecordRelocSlot(host, rinfo, target); + heap_->mark_compact_collector()->RecordRelocSlot(host, rinfo, target); } inline void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) override { @@ -1632,12 +1504,11 @@ class RecordMigratedSlotVisitor : public ObjectVisitorWithCageBases { HeapObject object = rinfo->target_object(cage_base()); GenerationalBarrierForCode(host, rinfo, object); WriteBarrier::Shared(host, rinfo, object); - collector_->RecordRelocSlot(host, rinfo, object); + heap_->mark_compact_collector()->RecordRelocSlot(host, rinfo, object); } // Entries that are skipped for recording. inline void VisitExternalReference(Code host, RelocInfo* rinfo) final {} - inline void VisitRuntimeEntry(Code host, RelocInfo* rinfo) final {} inline void VisitInternalReference(Code host, RelocInfo* rinfo) final {} inline void VisitExternalPointer(HeapObject host, ExternalPointerSlot slot, ExternalPointerTag tag) final {} @@ -1667,14 +1538,14 @@ class RecordMigratedSlotVisitor : public ObjectVisitorWithCageBases { RememberedSet<OLD_TO_OLD>::Insert<AccessMode::NON_ATOMIC>( MemoryChunk::FromHeapObject(host), slot); } - } else if (p->InSharedHeap() && !collector_->is_shared_heap()) { + } else if (p->InSharedHeap() && !host.InSharedWritableHeap()) { RememberedSet<OLD_TO_SHARED>::Insert<AccessMode::NON_ATOMIC>( MemoryChunk::FromHeapObject(host), slot); } } } - MarkCompactCollector* collector_; + Heap* const heap_; EphemeronRememberedSet* ephemeron_remembered_set_; }; @@ -1778,7 +1649,7 @@ class EvacuateVisitorBase : public HeapObjectVisitor { if (V8_UNLIKELY(v8_flags.minor_mc)) { base->record_visitor_->MarkArrayBufferExtensionPromoted(dst); } - } else if (dest == MAP_SPACE) { + } else if (dest == MAP_SPACE || dest == SHARED_SPACE) { DCHECK_OBJECT_SIZE(size); DCHECK(IsAligned(size, kTaggedSize)); base->heap_->CopyBlock(dst_addr, src_addr, size); @@ -1832,8 +1703,7 @@ class EvacuateVisitorBase : public HeapObjectVisitor { Map map = object.map(cage_base()); AllocationAlignment alignment = HeapObject::RequiredAlignment(map); AllocationResult allocation; - if (ShouldPromoteIntoSharedHeap(map)) { - DCHECK_EQ(target_space, OLD_SPACE); + if (target_space == OLD_SPACE && ShouldPromoteIntoSharedHeap(map)) { DCHECK_NOT_NULL(shared_old_allocator_); allocation = shared_old_allocator_->AllocateRaw(size, alignment, AllocationOrigin::kGC); @@ -1890,13 +1760,14 @@ class EvacuateNewSpaceVisitor final : public EvacuateVisitorBase { Heap* heap, EvacuationAllocator* local_allocator, ConcurrentAllocator* shared_old_allocator, RecordMigratedSlotVisitor* record_visitor, - Heap::PretenuringFeedbackMap* local_pretenuring_feedback, + PretenturingHandler::PretenuringFeedbackMap* local_pretenuring_feedback, AlwaysPromoteYoung always_promote_young) : EvacuateVisitorBase(heap, local_allocator, shared_old_allocator, record_visitor), buffer_(LocalAllocationBuffer::InvalidBuffer()), promoted_size_(0), semispace_copied_size_(0), + pretenuring_handler_(heap_->pretenuring_handler()), local_pretenuring_feedback_(local_pretenuring_feedback), is_incremental_marking_(heap->incremental_marking()->IsMarking()), always_promote_young_(always_promote_young) {} @@ -1906,8 +1777,8 @@ class EvacuateNewSpaceVisitor final : public EvacuateVisitorBase { HeapObject target_object; if (always_promote_young_ == AlwaysPromoteYoung::kYes) { - heap_->UpdateAllocationSite(object.map(), object, - local_pretenuring_feedback_); + pretenuring_handler_->UpdateAllocationSite(object.map(), object, + local_pretenuring_feedback_); if (!TryEvacuateObject(OLD_SPACE, object, size, &target_object)) { heap_->FatalProcessOutOfMemory( @@ -1918,17 +1789,18 @@ class EvacuateNewSpaceVisitor final : public EvacuateVisitorBase { return true; } + DCHECK(!v8_flags.minor_mc); + if (heap_->new_space()->ShouldBePromoted(object.address()) && TryEvacuateObject(OLD_SPACE, object, size, &target_object)) { // Full GCs use AlwaysPromoteYoung::kYes above and MinorMC should never // move objects. - DCHECK(!v8_flags.minor_mc); promoted_size_ += size; return true; } - heap_->UpdateAllocationSite(object.map(), object, - local_pretenuring_feedback_); + pretenuring_handler_->UpdateAllocationSite(object.map(), object, + local_pretenuring_feedback_); HeapObject target; AllocationSpace space = AllocateTargetObject(object, size, &target); @@ -1990,7 +1862,8 @@ class EvacuateNewSpaceVisitor final : public EvacuateVisitorBase { LocalAllocationBuffer buffer_; intptr_t promoted_size_; intptr_t semispace_copied_size_; - Heap::PretenuringFeedbackMap* local_pretenuring_feedback_; + PretenturingHandler* const pretenuring_handler_; + PretenturingHandler::PretenuringFeedbackMap* local_pretenuring_feedback_; bool is_incremental_marking_; AlwaysPromoteYoung always_promote_young_; }; @@ -2000,10 +1873,11 @@ class EvacuateNewSpacePageVisitor final : public HeapObjectVisitor { public: explicit EvacuateNewSpacePageVisitor( Heap* heap, RecordMigratedSlotVisitor* record_visitor, - Heap::PretenuringFeedbackMap* local_pretenuring_feedback) + PretenturingHandler::PretenuringFeedbackMap* local_pretenuring_feedback) : heap_(heap), record_visitor_(record_visitor), moved_bytes_(0), + pretenuring_handler_(heap_->pretenuring_handler()), local_pretenuring_feedback_(local_pretenuring_feedback) {} static void Move(Page* page) { @@ -2022,12 +1896,12 @@ class EvacuateNewSpacePageVisitor final : public HeapObjectVisitor { inline bool Visit(HeapObject object, int size) override { if (mode == NEW_TO_NEW) { DCHECK(!v8_flags.minor_mc); - heap_->UpdateAllocationSite(object.map(), object, - local_pretenuring_feedback_); + pretenuring_handler_->UpdateAllocationSite(object.map(), object, + local_pretenuring_feedback_); } else if (mode == NEW_TO_OLD) { if (v8_flags.minor_mc) { - heap_->UpdateAllocationSite(object.map(), object, - local_pretenuring_feedback_); + pretenuring_handler_->UpdateAllocationSite(object.map(), object, + local_pretenuring_feedback_); } DCHECK_IMPLIES(V8_EXTERNAL_CODE_SPACE_BOOL, !IsCodeSpaceObject(object)); PtrComprCageBase cage_base = GetPtrComprCageBase(object); @@ -2046,7 +1920,8 @@ class EvacuateNewSpacePageVisitor final : public HeapObjectVisitor { Heap* heap_; RecordMigratedSlotVisitor* record_visitor_; intptr_t moved_bytes_; - Heap::PretenuringFeedbackMap* local_pretenuring_feedback_; + PretenturingHandler* const pretenuring_handler_; + PretenturingHandler::PretenuringFeedbackMap* local_pretenuring_feedback_; }; class EvacuateOldSpaceVisitor final : public EvacuateVisitorBase { @@ -2091,8 +1966,7 @@ class EvacuateRecordOnlyVisitor final : public HeapObjectVisitor { } inline bool Visit(HeapObject object, int size) override { - RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector(), - &heap_->ephemeron_remembered_set_); + RecordMigratedSlotVisitor visitor(heap_, &heap_->ephemeron_remembered_set_); Map map = object.map(cage_base()); // Instead of calling object.IterateFast(cage_base(), &visitor) here // we can shortcut and use the precomputed size value passed to the visitor. @@ -2126,7 +2000,7 @@ void MarkCompactCollector::MarkRoots(RootVisitor* root_visitor, // Custom marking for top optimized frame. ProcessTopOptimizedFrame(custom_root_body_visitor, isolate()); - if (isolate()->is_shared()) { + if (isolate()->is_shared_heap_isolate()) { isolate()->global_safepoint()->IterateClientIsolates( [this, custom_root_body_visitor](Isolate* client) { ProcessTopOptimizedFrame(custom_root_body_visitor, client); @@ -2142,7 +2016,7 @@ void MarkCompactCollector::MarkRoots(RootVisitor* root_visitor, heap_->local_embedder_heap_tracer()->embedder_stack_state() == cppgc::EmbedderStackState::kMayContainHeapPointers) { GlobalHandleMarkingVisitor global_handles_marker( - *heap_, marking_state_, *local_marking_worklists_); + *heap_, *local_marking_worklists_); stack.IteratePointers(&global_handles_marker); } } @@ -2246,9 +2120,14 @@ Address MarkCompactCollector::FindBasePtrForMarking(Address maybe_inner_ptr) { if (chunk->IsLargePage()) return chunk->area_start(); // Otherwise, we have a pointer inside a normal page. const Page* page = static_cast<const Page*>(chunk); + // If it is in the young generation "from" semispace, it is not used and we + // must ignore it, as its markbits may not be clean. + if (page->IsFromPage()) return kNullAddress; + // Try to find the address of a previous valid object on this page. Address base_ptr = FindPreviousObjectForConservativeMarking(page, maybe_inner_ptr); - // If the markbit is set, then we have an object that does not need be marked. + // If the markbit is set, then we have an object that does not need to be + // marked. if (base_ptr == kNullAddress) return kNullAddress; // Iterate through the objects in the page forwards, until we find the object // containing maybe_inner_ptr. @@ -2271,10 +2150,11 @@ void MarkCompactCollector::MarkRootsFromStack(RootVisitor* root_visitor) { } void MarkCompactCollector::MarkObjectsFromClientHeaps() { - if (!isolate()->is_shared()) return; + if (!isolate()->is_shared_heap_isolate()) return; isolate()->global_safepoint()->IterateClientIsolates( [collector = this](Isolate* client) { + if (client->is_shared_heap_isolate()) return; collector->MarkObjectsFromClientHeap(client); }); } @@ -2290,6 +2170,9 @@ void MarkCompactCollector::MarkObjectsFromClientHeap(Isolate* client) { PtrComprCageBase cage_base(client); Heap* heap = client->heap(); + // Ensure new space is iterable. + heap->MakeHeapIterable(); + if (heap->new_space()) { std::unique_ptr<ObjectIterator> iterator = heap->new_space()->GetObjectIterator(heap); @@ -2312,6 +2195,9 @@ void MarkCompactCollector::MarkObjectsFromClientHeap(Isolate* client) { // find all incoming pointers into the shared heap. OldGenerationMemoryChunkIterator chunk_iterator(heap); + // Tracking OLD_TO_SHARED requires the write barrier. + DCHECK(!v8_flags.disable_write_barriers); + for (MemoryChunk* chunk = chunk_iterator.next(); chunk; chunk = chunk_iterator.next()) { InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToShared( @@ -2333,6 +2219,18 @@ void MarkCompactCollector::MarkObjectsFromClientHeap(Isolate* client) { }, SlotSet::FREE_EMPTY_BUCKETS); chunk->ReleaseInvalidatedSlots<OLD_TO_SHARED>(); + + RememberedSet<OLD_TO_SHARED>::IterateTyped( + chunk, [collector = this, heap](SlotType slot_type, Address slot) { + HeapObject heap_object = + UpdateTypedSlotHelper::GetTargetObject(heap, slot_type, slot); + if (heap_object.InSharedWritableHeap()) { + collector->MarkRootObject(Root::kClientHeap, heap_object); + return KEEP_SLOT; + } else { + return REMOVE_SLOT; + } + }); } #ifdef V8_COMPRESS_POINTERS @@ -2934,8 +2832,7 @@ class StringForwardingTableCleaner final { explicit StringForwardingTableCleaner(Heap* heap) : heap_(heap), isolate_(heap_->isolate()), - marking_state_( - heap_->mark_compact_collector()->non_atomic_marking_state()) {} + marking_state_(heap_->non_atomic_marking_state()) {} void Run() { StringForwardingTable* forwarding_table = isolate_->string_forwarding_table(); @@ -3013,9 +2910,9 @@ class StringForwardingTableCleaner final { ThinString::cast(original_string).RawField(ThinString::kActualOffset); MarkCompactCollector::RecordSlot(original_string, slot, forward_string); } - Heap* heap_; - Isolate* isolate_; - NonAtomicMarkingState* marking_state_; + Heap* const heap_; + Isolate* const isolate_; + NonAtomicMarkingState* const marking_state_; }; } // namespace @@ -3192,13 +3089,16 @@ void MarkCompactCollector::FlushBytecodeFromSFI( // Replace bytecode array with an uncompiled data array. HeapObject compiled_data = shared_info.GetBytecodeArray(isolate()); Address compiled_data_start = compiled_data.address(); - int compiled_data_size = compiled_data.Size(); + int compiled_data_size = ALIGN_TO_ALLOCATION_ALIGNMENT(compiled_data.Size()); MemoryChunk* chunk = MemoryChunk::FromAddress(compiled_data_start); // Clear any recorded slots for the compiled data as being invalid. RememberedSet<OLD_TO_NEW>::RemoveRange( chunk, compiled_data_start, compiled_data_start + compiled_data_size, SlotSet::FREE_EMPTY_BUCKETS); + RememberedSet<OLD_TO_SHARED>::RemoveRange( + chunk, compiled_data_start, compiled_data_start + compiled_data_size, + SlotSet::FREE_EMPTY_BUCKETS); RememberedSet<OLD_TO_OLD>::RemoveRange( chunk, compiled_data_start, compiled_data_start + compiled_data_size, SlotSet::FREE_EMPTY_BUCKETS); @@ -3211,9 +3111,11 @@ void MarkCompactCollector::FlushBytecodeFromSFI( // Create a filler object for any left over space in the bytecode array. if (!heap()->IsLargeObject(compiled_data)) { + const int aligned_filler_offset = + ALIGN_TO_ALLOCATION_ALIGNMENT(UncompiledDataWithoutPreparseData::kSize); heap()->CreateFillerObjectAt( - compiled_data.address() + UncompiledDataWithoutPreparseData::kSize, - compiled_data_size - UncompiledDataWithoutPreparseData::kSize); + compiled_data.address() + aligned_filler_offset, + compiled_data_size - aligned_filler_offset); } // Initialize the uncompiled data. @@ -3226,8 +3128,8 @@ void MarkCompactCollector::FlushBytecodeFromSFI( // Mark the uncompiled data as black, and ensure all fields have already been // marked. - DCHECK(marking_state()->IsBlackOrGrey(inferred_name) || - (!is_shared_heap() && inferred_name.InSharedWritableHeap())); + DCHECK(!ShouldMarkObject(inferred_name) || + marking_state()->IsBlackOrGrey(inferred_name)); marking_state()->WhiteToBlack(uncompiled_data); // Use the raw function data setter to avoid validity checks, since we're @@ -3452,9 +3354,25 @@ void MarkCompactCollector::RightTrimDescriptorArray(DescriptorArray array, MemoryChunk* chunk = MemoryChunk::FromHeapObject(array); RememberedSet<OLD_TO_NEW>::RemoveRange(chunk, start, end, SlotSet::FREE_EMPTY_BUCKETS); + RememberedSet<OLD_TO_SHARED>::RemoveRange(chunk, start, end, + SlotSet::FREE_EMPTY_BUCKETS); RememberedSet<OLD_TO_OLD>::RemoveRange(chunk, start, end, SlotSet::FREE_EMPTY_BUCKETS); - heap()->CreateFillerObjectAt(start, static_cast<int>(end - start)); + if (V8_COMPRESS_POINTERS_8GB_BOOL) { + Address aligned_start = ALIGN_TO_ALLOCATION_ALIGNMENT(start); + Address aligned_end = ALIGN_TO_ALLOCATION_ALIGNMENT(end); + if (aligned_start < aligned_end) { + heap()->CreateFillerObjectAt( + aligned_start, static_cast<int>(aligned_end - aligned_start)); + } + if (Heap::ShouldZapGarbage()) { + Address zap_end = std::min(aligned_start, end); + MemsetTagged(ObjectSlot(start), Object(static_cast<Address>(kZapValue)), + (zap_end - start) >> kTaggedSizeLog2); + } + } else { + heap()->CreateFillerObjectAt(start, static_cast<int>(end - start)); + } array.set_number_of_all_descriptors(new_nof_all_descriptors); } @@ -3509,14 +3427,14 @@ void MarkCompactCollector::ClearWeakCollections() { if (value.IsHeapObject()) { HeapObject heap_object = HeapObject::cast(value); CHECK_IMPLIES( - (!is_shared_heap_ && key.InSharedHeap()) || + !ShouldMarkObject(key) || non_atomic_marking_state()->IsBlackOrGrey(key), - (!is_shared_heap_ && heap_object.InSharedHeap()) || + !ShouldMarkObject(heap_object) || non_atomic_marking_state()->IsBlackOrGrey(heap_object)); } } #endif - if (!is_shared_heap_ && key.InSharedHeap()) continue; + if (!ShouldMarkObject(key)) continue; if (!non_atomic_marking_state()->IsBlackOrGrey(key)) { table.RemoveEntry(i); } @@ -3580,7 +3498,7 @@ void MarkCompactCollector::ClearJSWeakRefs() { }; HeapObject target = HeapObject::cast(weak_cell.target()); if (!non_atomic_marking_state()->IsBlackOrGrey(target)) { - DCHECK(!target.IsUndefined()); + DCHECK(target.CanBeHeldWeakly()); // The value of the WeakCell is dead. JSFinalizationRegistry finalization_registry = JSFinalizationRegistry::cast(weak_cell.finalization_registry()); @@ -3602,6 +3520,7 @@ void MarkCompactCollector::ClearJSWeakRefs() { HeapObject unregister_token = weak_cell.unregister_token(); if (!non_atomic_marking_state()->IsBlackOrGrey(unregister_token)) { + DCHECK(unregister_token.CanBeHeldWeakly()); // The unregister token is dead. Remove any corresponding entries in the // key map. Multiple WeakCell with the same token will have all their // unregister_token field set to undefined when processing the first @@ -3610,7 +3529,7 @@ void MarkCompactCollector::ClearJSWeakRefs() { JSFinalizationRegistry finalization_registry = JSFinalizationRegistry::cast(weak_cell.finalization_registry()); finalization_registry.RemoveUnregisterToken( - JSReceiver::cast(unregister_token), isolate(), + unregister_token, isolate(), JSFinalizationRegistry::kKeepMatchedCellsInRegistry, gc_notify_updated_slot); } else { @@ -3664,11 +3583,9 @@ MarkCompactCollector::ProcessRelocInfo(Code host, RelocInfo* rinfo, slot_type = SlotType::kCodeEntry; } else if (RelocInfo::IsFullEmbeddedObject(rmode)) { slot_type = SlotType::kEmbeddedObjectFull; - } else if (RelocInfo::IsCompressedEmbeddedObject(rmode)) { - slot_type = SlotType::kEmbeddedObjectCompressed; } else { - DCHECK(RelocInfo::IsDataEmbeddedObject(rmode)); - slot_type = SlotType::kEmbeddedObjectData; + DCHECK(RelocInfo::IsCompressedEmbeddedObject(rmode)); + slot_type = SlotType::kEmbeddedObjectCompressed; } } @@ -3959,71 +3876,6 @@ class PointersUpdatingVisitor final : public ObjectVisitorWithCageBases, } }; -#ifdef VERIFY_HEAP -// Visitor for updating root pointers and to-space pointers. -// It does not expect to encounter pointers to dead objects. -class ClientHeapVerifier final : public ObjectVisitorWithCageBases { - public: - explicit ClientHeapVerifier(Heap* heap) : ObjectVisitorWithCageBases(heap) {} - - void VisitPointer(HeapObject host, ObjectSlot p) override { - VerifySlot(cage_base(), p); - } - - void VisitPointer(HeapObject host, MaybeObjectSlot p) override { - VerifySlot(cage_base(), p); - } - - void VisitPointers(HeapObject host, ObjectSlot start, - ObjectSlot end) override { - for (ObjectSlot p = start; p < end; ++p) { - VerifySlot(cage_base(), p); - } - } - - void VisitPointers(HeapObject host, MaybeObjectSlot start, - MaybeObjectSlot end) final { - for (MaybeObjectSlot p = start; p < end; ++p) { - VerifySlot(cage_base(), p); - } - } - - void VisitMapPointer(HeapObject host) override { - VerifySlot(cage_base(), host.map_slot()); - } - - void VisitCodePointer(HeapObject host, CodeObjectSlot slot) override { - VerifySlot(code_cage_base(), ObjectSlot(slot.address())); - } - - void VisitCodeTarget(Code host, RelocInfo* rinfo) override {} - - void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) override {} - - private: - void VerifySlot(PtrComprCageBase cage_base, ObjectSlot slot) { - HeapObject heap_object; - if (slot.load(cage_base).GetHeapObject(&heap_object)) { - VerifyHeapObject(heap_object); - } - } - - void VerifySlot(PtrComprCageBase cage_base, MaybeObjectSlot slot) { - HeapObject heap_object; - if (slot.load(cage_base).GetHeapObject(&heap_object)) { - VerifyHeapObject(heap_object); - } - } - - void VerifyHeapObject(HeapObject heap_object) { - if (BasicMemoryChunk::FromHeapObject(heap_object)->InReadOnlySpace()) - return; - if (!heap_object.InSharedHeap()) return; - CHECK(!heap_object.map_word(kRelaxedLoad).IsForwardingAddress()); - } -}; -#endif // VERIFY_HEAP - static String UpdateReferenceInExternalStringTableEntry(Heap* heap, FullObjectSlot p) { HeapObject old_string = HeapObject::cast(*p); @@ -4097,6 +3949,15 @@ void MarkCompactCollector::EvacuateEpilogue() { DCHECK_NULL((chunk->slot_set<OLD_TO_NEW, AccessMode::ATOMIC>())); DCHECK_NULL((chunk->typed_slot_set<OLD_TO_NEW, AccessMode::ATOMIC>())); + // Old-to-shared slots may survive GC but there should never be any slots in + // new or shared spaces. + AllocationSpace id = chunk->owner_identity(); + if (id == SHARED_SPACE || id == SHARED_LO_SPACE || id == NEW_SPACE || + id == NEW_LO_SPACE || isolate()->is_shared()) { + DCHECK_NULL((chunk->slot_set<OLD_TO_SHARED, AccessMode::ATOMIC>())); + DCHECK_NULL((chunk->typed_slot_set<OLD_TO_SHARED, AccessMode::ATOMIC>())); + } + // GCs need to filter invalidated slots. DCHECK_NULL(chunk->invalidated_slots<OLD_TO_OLD>()); DCHECK_NULL(chunk->invalidated_slots<OLD_TO_NEW>()); @@ -4107,8 +3968,8 @@ void MarkCompactCollector::EvacuateEpilogue() { namespace { ConcurrentAllocator* CreateSharedOldAllocator(Heap* heap) { - if (v8_flags.shared_string_table && heap->isolate()->shared_isolate()) { - return new ConcurrentAllocator(nullptr, heap->shared_old_space()); + if (v8_flags.shared_string_table && heap->isolate()->has_shared_heap()) { + return new ConcurrentAllocator(nullptr, heap->shared_allocation_space()); } return nullptr; @@ -4158,7 +4019,8 @@ class Evacuator : public Malloced { EvacuationAllocator* local_allocator, AlwaysPromoteYoung always_promote_young) : heap_(heap), - local_pretenuring_feedback_(kInitialLocalPretenuringFeedbackCapacity), + local_pretenuring_feedback_( + PretenturingHandler::kInitialFeedbackCapacity), shared_old_allocator_(CreateSharedOldAllocator(heap_)), new_space_visitor_(heap_, local_allocator, shared_old_allocator_.get(), record_visitor, &local_pretenuring_feedback_, @@ -4191,10 +4053,8 @@ class Evacuator : public Malloced { virtual GCTracer::Scope::ScopeId GetTracingScope() = 0; protected: - static const int kInitialLocalPretenuringFeedbackCapacity = 256; - // |saved_live_bytes| returns the live bytes of the page that was processed. - virtual void RawEvacuatePage(MemoryChunk* chunk, + virtual bool RawEvacuatePage(MemoryChunk* chunk, intptr_t* saved_live_bytes) = 0; inline Heap* heap() { return heap_; } @@ -4206,7 +4066,7 @@ class Evacuator : public Malloced { Heap* heap_; - Heap::PretenuringFeedbackMap local_pretenuring_feedback_; + PretenturingHandler::PretenuringFeedbackMap local_pretenuring_feedback_; // Allocator for the shared heap. std::unique_ptr<ConcurrentAllocator> shared_old_allocator_; @@ -4232,10 +4092,11 @@ void Evacuator::EvacuatePage(MemoryChunk* chunk) { DCHECK(chunk->SweepingDone()); intptr_t saved_live_bytes = 0; double evacuation_time = 0.0; + bool success = false; { AlwaysAllocateScope always_allocate(heap()); TimedScope timed_scope(&evacuation_time); - RawEvacuatePage(chunk, &saved_live_bytes); + success = RawEvacuatePage(chunk, &saved_live_bytes); } ReportCompactionProgress(evacuation_time, saved_live_bytes); if (v8_flags.trace_evacuation) { @@ -4249,8 +4110,7 @@ void Evacuator::EvacuatePage(MemoryChunk* chunk) { chunk->IsFlagSet(Page::PAGE_NEW_NEW_PROMOTION), chunk->IsFlagSet(MemoryChunk::IS_EXECUTABLE), heap()->new_space()->IsPromotionCandidate(chunk), - saved_live_bytes, evacuation_time, - chunk->IsFlagSet(Page::COMPACTION_WAS_ABORTED)); + saved_live_bytes, evacuation_time, success); } } @@ -4260,7 +4120,7 @@ void Evacuator::Finalize() { heap()->tracer()->AddCompactionEvent(duration_, bytes_compacted_); heap()->IncrementPromotedObjectsSize(new_space_visitor_.promoted_size() + new_to_old_page_visitor_.moved_bytes()); - heap()->IncrementSemiSpaceCopiedObjectSize( + heap()->IncrementNewSpaceSurvivingObjectSize( new_space_visitor_.semispace_copied_size() + new_to_new_page_visitor_.moved_bytes()); heap()->IncrementYoungSurvivorsCounter( @@ -4268,18 +4128,18 @@ void Evacuator::Finalize() { new_space_visitor_.semispace_copied_size() + new_to_old_page_visitor_.moved_bytes() + new_to_new_page_visitor_.moved_bytes()); - heap()->MergeAllocationSitePretenuringFeedback(local_pretenuring_feedback_); + heap()->pretenuring_handler()->MergeAllocationSitePretenuringFeedback( + local_pretenuring_feedback_); } class FullEvacuator : public Evacuator { public: - explicit FullEvacuator(MarkCompactCollector* collector) - : Evacuator(collector->heap(), &record_visitor_, &local_allocator_, + explicit FullEvacuator(Heap* heap) + : Evacuator(heap, &record_visitor_, &local_allocator_, AlwaysPromoteYoung::kYes), - record_visitor_(collector, &ephemeron_remembered_set_), + record_visitor_(heap_, &ephemeron_remembered_set_), local_allocator_(heap_, - CompactionSpaceKind::kCompactionSpaceForMarkCompact), - collector_(collector) {} + CompactionSpaceKind::kCompactionSpaceForMarkCompact) {} GCTracer::Scope::ScopeId GetBackgroundTracingScope() override { return GCTracer::Scope::MC_BACKGROUND_EVACUATE_COPY; @@ -4307,18 +4167,16 @@ class FullEvacuator : public Evacuator { } protected: - void RawEvacuatePage(MemoryChunk* chunk, intptr_t* live_bytes) override; + bool RawEvacuatePage(MemoryChunk* chunk, intptr_t* live_bytes) override; EphemeronRememberedSet ephemeron_remembered_set_; RecordMigratedSlotVisitor record_visitor_; EvacuationAllocator local_allocator_; - - MarkCompactCollector* collector_; }; -void FullEvacuator::RawEvacuatePage(MemoryChunk* chunk, intptr_t* live_bytes) { +bool FullEvacuator::RawEvacuatePage(MemoryChunk* chunk, intptr_t* live_bytes) { const EvacuationMode evacuation_mode = ComputeEvacuationMode(chunk); - NonAtomicMarkingState* marking_state = collector_->non_atomic_marking_state(); + NonAtomicMarkingState* marking_state = heap_->non_atomic_marking_state(); *live_bytes = marking_state->live_bytes(chunk); TRACE_EVENT2(TRACE_DISABLED_BY_DEFAULT("v8.gc"), "FullEvacuator::RawEvacuatePage", "evacuation_mode", @@ -4363,13 +4221,17 @@ void FullEvacuator::RawEvacuatePage(MemoryChunk* chunk, intptr_t* live_bytes) { } else { // Aborted compaction page. Actual processing happens on the main // thread for simplicity reasons. - collector_->ReportAbortedEvacuationCandidateDueToOOM( - failed_object.address(), static_cast<Page*>(chunk)); + heap_->mark_compact_collector() + ->ReportAbortedEvacuationCandidateDueToOOM( + failed_object.address(), static_cast<Page*>(chunk)); + return false; } } break; } } + + return true; } class PageEvacuationJob : public v8::JobTask { @@ -4433,20 +4295,19 @@ class PageEvacuationJob : public v8::JobTask { }; namespace { -template <class Evacuator, class Collector> +template <class Evacuator> size_t CreateAndExecuteEvacuationTasks( - Collector* collector, + Heap* heap, std::vector<std::pair<ParallelWorkItem, MemoryChunk*>> evacuation_items, MigrationObserver* migration_observer) { base::Optional<ProfilingMigrationObserver> profiling_observer; - if (collector->isolate()->log_object_relocation()) { - profiling_observer.emplace(collector->heap()); + if (heap->isolate()->log_object_relocation()) { + profiling_observer.emplace(heap); } std::vector<std::unique_ptr<v8::internal::Evacuator>> evacuators; - const int wanted_num_tasks = - NumberOfParallelCompactionTasks(collector->heap()); + const int wanted_num_tasks = NumberOfParallelCompactionTasks(heap); for (int i = 0; i < wanted_num_tasks; i++) { - auto evacuator = std::make_unique<Evacuator>(collector); + auto evacuator = std::make_unique<Evacuator>(heap); if (profiling_observer) { evacuator->AddObserver(&profiling_observer.value()); } @@ -4458,7 +4319,7 @@ size_t CreateAndExecuteEvacuationTasks( V8::GetCurrentPlatform() ->CreateJob( v8::TaskPriority::kUserBlocking, - std::make_unique<PageEvacuationJob>(collector->isolate(), &evacuators, + std::make_unique<PageEvacuationJob>(heap->isolate(), &evacuators, std::move(evacuation_items))) ->Join(); for (auto& evacuator : evacuators) { @@ -4469,13 +4330,16 @@ size_t CreateAndExecuteEvacuationTasks( bool ShouldMovePage(Page* p, intptr_t live_bytes, intptr_t wasted_bytes, MemoryReductionMode memory_reduction_mode, - AlwaysPromoteYoung always_promote_young) { + AlwaysPromoteYoung always_promote_young, + PromoteUnusablePages promote_unusable_pages) { Heap* heap = p->heap(); return v8_flags.page_promotion && (memory_reduction_mode == MemoryReductionMode::kNone) && !p->NeverEvacuate() && - (live_bytes + wasted_bytes > - Evacuator::NewSpacePageEvacuationThreshold()) && + ((live_bytes + wasted_bytes > + Evacuator::NewSpacePageEvacuationThreshold()) || + (promote_unusable_pages == PromoteUnusablePages::kYes && + !p->WasUsedForAllocation())) && (always_promote_young == AlwaysPromoteYoung::kYes || heap->new_space()->IsPromotionCandidate(p)) && heap->CanExpandOldGeneration(live_bytes); @@ -4516,7 +4380,7 @@ void MarkCompactCollector::EvacuatePagesInParallel() { heap()->ShouldReduceMemory() ? MemoryReductionMode::kShouldReduceMemory : MemoryReductionMode::kNone; if (ShouldMovePage(page, live_bytes_on_page, 0, memory_reduction_mode, - AlwaysPromoteYoung::kYes) || + AlwaysPromoteYoung::kYes, PromoteUnusablePages::kNo) || force_page_promotion) { EvacuateNewSpacePageVisitor<NEW_TO_OLD>::Move(page); DCHECK_EQ(heap()->old_space(), page->owner()); @@ -4563,8 +4427,7 @@ void MarkCompactCollector::EvacuatePagesInParallel() { // Promote young generation large objects. if (auto* new_lo_space = heap()->new_lo_space()) { - auto* marking_state = - heap()->incremental_marking()->non_atomic_marking_state(); + auto* marking_state = heap()->non_atomic_marking_state(); for (auto it = new_lo_space->begin(); it != new_lo_space->end();) { LargePage* current = *(it++); HeapObject object = current->GetObject(); @@ -4587,7 +4450,7 @@ void MarkCompactCollector::EvacuatePagesInParallel() { evacuation_items.size()); wanted_num_tasks = CreateAndExecuteEvacuationTasks<FullEvacuator>( - this, std::move(evacuation_items), nullptr); + heap(), std::move(evacuation_items), nullptr); } const size_t aborted_pages = PostProcessAbortedEvacuationCandidates(); @@ -4667,7 +4530,7 @@ void LiveObjectVisitor::RecomputeLiveBytes(MemoryChunk* chunk, int new_live_size = 0; for (auto object_and_size : LiveObjectRange<kAllLiveObjects>(chunk, marking_state->bitmap(chunk))) { - new_live_size += object_and_size.second; + new_live_size += ALIGN_TO_ALLOCATION_ALIGNMENT(object_and_size.second); } marking_state->SetLiveBytes(chunk, new_live_size); } @@ -4683,19 +4546,12 @@ void MarkCompactCollector::Evacuate() { { TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_EVACUATE_COPY); - EvacuationScope evacuation_scope(this); + EvacuationScope evacuation_scope(heap()); EvacuatePagesInParallel(); } UpdatePointersAfterEvacuation(); - if (heap()->new_space()) { - TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_EVACUATE_REBALANCE); - if (!heap()->new_space()->EnsureCurrentCapacity()) { - heap()->FatalProcessOutOfMemory("NewSpace::Rebalance"); - } - } - { TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_EVACUATE_CLEAN_UP); @@ -4709,11 +4565,31 @@ void MarkCompactCollector::Evacuate() { } else if (v8_flags.minor_mc) { // Sweep non-promoted pages to add them back to the free list. DCHECK_EQ(NEW_SPACE, p->owner_identity()); - sweeper()->AddPage(NEW_SPACE, p, Sweeper::REGULAR); + DCHECK_EQ(0, non_atomic_marking_state()->live_bytes(p)); + DCHECK(p->SweepingDone()); + PagedNewSpace* space = heap()->paged_new_space(); + if (is_new_space_shrinking_ && space->ShouldReleasePage()) { + space->ReleasePage(p); + } else { + sweeper()->AddNewSpacePage(p); + } } } new_space_evacuation_pages_.clear(); + if (is_new_space_shrinking_) { + DCHECK(v8_flags.minor_mc); + heap()->paged_new_space()->FinishShrinking(); + is_new_space_shrinking_ = false; + } + + if (heap()->new_space()) { + TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_EVACUATE_REBALANCE); + if (!heap()->new_space()->EnsureCurrentCapacity()) { + heap()->FatalProcessOutOfMemory("NewSpace::Rebalance"); + } + } + for (LargePage* p : promoted_large_pages_) { DCHECK(p->IsFlagSet(Page::PAGE_NEW_OLD_PROMOTION)); p->ClearFlag(Page::PAGE_NEW_OLD_PROMOTION); @@ -4874,7 +4750,9 @@ class RememberedSetUpdatingItem : public UpdatingItem { : heap_(heap), marking_state_(marking_state), chunk_(chunk), - updating_mode_(updating_mode) {} + updating_mode_(updating_mode), + record_old_to_shared_slots_(heap->isolate()->has_shared_heap() && + !chunk->InSharedHeap()) {} ~RememberedSetUpdatingItem() override = default; void Process() override { @@ -4988,7 +4866,6 @@ class RememberedSetUpdatingItem : public UpdatingItem { } void UpdateUntypedPointers() { - const bool has_shared_isolate = this->heap_->isolate()->shared_isolate(); const PtrComprCageBase cage_base = heap_->isolate(); if (chunk_->slot_set<OLD_TO_NEW, AccessMode::NON_ATOMIC>() != nullptr) { // Marking bits are cleared already when the page is already swept. This @@ -5003,12 +4880,12 @@ class RememberedSetUpdatingItem : public UpdatingItem { InvalidatedSlotsFilter::OldToNew(chunk_, liveness_check); int slots = RememberedSet<OLD_TO_NEW>::Iterate( chunk_, - [this, &filter, has_shared_isolate, cage_base](MaybeObjectSlot slot) { + [this, &filter, cage_base](MaybeObjectSlot slot) { if (!filter.IsValid(slot.address())) return REMOVE_SLOT; SlotCallbackResult result = CheckAndUpdateOldToNewSlot(slot); // A new space string might have been promoted into the shared heap // during GC. - if (has_shared_isolate) { + if (record_old_to_shared_slots_) { CheckSlotForOldToSharedUntyped(cage_base, chunk_, slot); } return result; @@ -5034,12 +4911,12 @@ class RememberedSetUpdatingItem : public UpdatingItem { chunk_, InvalidatedSlotsFilter::LivenessCheck::kNo); RememberedSet<OLD_TO_OLD>::Iterate( chunk_, - [this, has_shared_isolate, &filter, cage_base](MaybeObjectSlot slot) { + [this, &filter, cage_base](MaybeObjectSlot slot) { if (filter.IsValid(slot.address())) { UpdateSlot<AccessMode::NON_ATOMIC>(cage_base, slot); // A string might have been promoted into the shared heap during // GC. - if (has_shared_isolate) { + if (record_old_to_shared_slots_) { CheckSlotForOldToSharedUntyped(cage_base, chunk_, slot); } } @@ -5104,7 +4981,6 @@ class RememberedSetUpdatingItem : public UpdatingItem { } void UpdateTypedPointers() { - const bool has_shared_isolate = heap_->isolate()->shared_isolate(); if (chunk_->typed_slot_set<OLD_TO_NEW, AccessMode::NON_ATOMIC>() != nullptr) { CHECK_NE(chunk_->owner(), heap_->map_space()); @@ -5113,14 +4989,13 @@ class RememberedSetUpdatingItem : public UpdatingItem { return CheckAndUpdateOldToNewSlot(slot); }; RememberedSet<OLD_TO_NEW>::IterateTyped( - chunk_, - [this, has_shared_isolate, &check_and_update_old_to_new_slot_fn]( - SlotType slot_type, Address slot) { + chunk_, [this, &check_and_update_old_to_new_slot_fn]( + SlotType slot_type, Address slot) { SlotCallbackResult result = UpdateTypedSlotHelper::UpdateTypedSlot( heap_, slot_type, slot, check_and_update_old_to_new_slot_fn); // A new space string might have been promoted into the shared heap // during GC. - if (has_shared_isolate) { + if (record_old_to_shared_slots_) { CheckSlotForOldToSharedTyped(chunk_, slot_type, slot); } return result; @@ -5131,7 +5006,7 @@ class RememberedSetUpdatingItem : public UpdatingItem { nullptr)) { CHECK_NE(chunk_->owner(), heap_->map_space()); RememberedSet<OLD_TO_OLD>::IterateTyped( - chunk_, [this, has_shared_isolate](SlotType slot_type, Address slot) { + chunk_, [this](SlotType slot_type, Address slot) { // Using UpdateStrongSlot is OK here, because there are no weak // typed slots. PtrComprCageBase cage_base = heap_->isolate(); @@ -5143,7 +5018,7 @@ class RememberedSetUpdatingItem : public UpdatingItem { return KEEP_SLOT; }); // A string might have been promoted into the shared heap during GC. - if (has_shared_isolate) { + if (record_old_to_shared_slots_) { CheckSlotForOldToSharedTyped(chunk_, slot_type, slot); } return result; @@ -5156,6 +5031,7 @@ class RememberedSetUpdatingItem : public UpdatingItem { MarkingState* marking_state_; MemoryChunk* chunk_; RememberedSetUpdatingMode updating_mode_; + const bool record_old_to_shared_slots_; }; std::unique_ptr<UpdatingItem> @@ -5294,11 +5170,21 @@ void MarkCompactCollector::UpdatePointersAfterEvacuation() { CollectRememberedSetUpdatingItems(this, &updating_items, heap()->code_space(), RememberedSetUpdatingMode::ALL); + if (heap()->shared_space()) { + CollectRememberedSetUpdatingItems(this, &updating_items, + heap()->shared_space(), + RememberedSetUpdatingMode::ALL); + } CollectRememberedSetUpdatingItems(this, &updating_items, heap()->lo_space(), RememberedSetUpdatingMode::ALL); CollectRememberedSetUpdatingItems(this, &updating_items, heap()->code_lo_space(), RememberedSetUpdatingMode::ALL); + if (heap()->shared_lo_space()) { + CollectRememberedSetUpdatingItems(this, &updating_items, + heap()->shared_lo_space(), + RememberedSetUpdatingMode::ALL); + } if (heap()->map_space()) { CollectRememberedSetUpdatingItems(this, &updating_items, heap()->map_space(), @@ -5336,10 +5222,12 @@ void MarkCompactCollector::UpdatePointersAfterEvacuation() { } void MarkCompactCollector::UpdatePointersInClientHeaps() { - if (!isolate()->is_shared()) return; + if (!isolate()->is_shared_heap_isolate()) return; - isolate()->global_safepoint()->IterateClientIsolates( - [this](Isolate* client) { UpdatePointersInClientHeap(client); }); + isolate()->global_safepoint()->IterateClientIsolates([this](Isolate* client) { + if (client->is_shared_heap_isolate()) return; + UpdatePointersInClientHeap(client); + }); } void MarkCompactCollector::UpdatePointersInClientHeap(Isolate* client) { @@ -5372,25 +5260,10 @@ void MarkCompactCollector::UpdatePointersInClientHeap(Isolate* client) { }); if (chunk->InYoungGeneration()) chunk->ReleaseTypedSlotSet<OLD_TO_SHARED>(); } - -#ifdef VERIFY_HEAP - if (v8_flags.verify_heap) { - ClientHeapVerifier verifier_visitor(client->heap()); - - HeapObjectIterator iterator(client->heap(), - HeapObjectIterator::kNoFiltering); - for (HeapObject obj = iterator.Next(); !obj.is_null(); - obj = iterator.Next()) { - obj.IterateFast(cage_base, &verifier_visitor); - } - } -#endif // VERIFY_HEAP } void MarkCompactCollector::ReportAbortedEvacuationCandidateDueToOOM( Address failed_start, Page* page) { - DCHECK(!page->IsFlagSet(Page::COMPACTION_WAS_ABORTED)); - page->SetFlag(Page::COMPACTION_WAS_ABORTED); base::MutexGuard guard(&mutex_); aborted_evacuation_candidates_due_to_oom_.push_back( std::make_pair(failed_start, page)); @@ -5407,11 +5280,10 @@ void MarkCompactCollector::ReportAbortedEvacuationCandidateDueToFlags( namespace { -void ReRecordPage(Heap* heap, - v8::internal::NonAtomicMarkingState* marking_state, - Address failed_start, Page* page) { +void ReRecordPage(Heap* heap, Address failed_start, Page* page) { DCHECK(page->IsFlagSet(Page::COMPACTION_WAS_ABORTED)); + NonAtomicMarkingState* marking_state = heap->non_atomic_marking_state(); // Aborted compaction page. We have to record slots here, since we // might not have recorded them in first place. @@ -5457,12 +5329,15 @@ size_t MarkCompactCollector::PostProcessAbortedEvacuationCandidates() { CHECK_IMPLIES(v8_flags.crash_on_aborted_evacuation, aborted_evacuation_candidates_due_to_oom_.empty()); for (auto start_and_page : aborted_evacuation_candidates_due_to_oom_) { - ReRecordPage(heap(), non_atomic_marking_state(), start_and_page.first, - start_and_page.second); + Page* page = start_and_page.second; + DCHECK(!page->IsFlagSet(Page::COMPACTION_WAS_ABORTED)); + page->SetFlag(Page::COMPACTION_WAS_ABORTED); + } + for (auto start_and_page : aborted_evacuation_candidates_due_to_oom_) { + ReRecordPage(heap(), start_and_page.first, start_and_page.second); } for (auto start_and_page : aborted_evacuation_candidates_due_to_flags_) { - ReRecordPage(heap(), non_atomic_marking_state(), start_and_page.first, - start_and_page.second); + ReRecordPage(heap(), start_and_page.first, start_and_page.second); } const size_t aborted_pages = aborted_evacuation_candidates_due_to_oom_.size() + @@ -5476,163 +5351,72 @@ size_t MarkCompactCollector::PostProcessAbortedEvacuationCandidates() { // the evacuation candidate flag the page is again in a regular state. p->ClearEvacuationCandidate(); aborted_pages_verified++; - } else { - DCHECK(p->IsEvacuationCandidate()); - DCHECK(p->SweepingDone()); - } - } - DCHECK_EQ(aborted_pages_verified, aborted_pages); - USE(aborted_pages_verified); - return aborted_pages; -} - -void MarkCompactCollector::ReleaseEvacuationCandidates() { - for (Page* p : old_space_evacuation_pages_) { - if (!p->IsEvacuationCandidate()) continue; - PagedSpace* space = static_cast<PagedSpace*>(p->owner()); - non_atomic_marking_state()->SetLiveBytes(p, 0); - CHECK(p->SweepingDone()); - space->memory_chunk_list().Remove(p); - space->ReleasePage(p); - } - old_space_evacuation_pages_.clear(); - compacting_ = false; -} - -void MarkCompactCollector::SweepLargeSpace(LargeObjectSpace* space) { - auto* marking_state = - heap()->incremental_marking()->non_atomic_marking_state(); - PtrComprCageBase cage_base(heap()->isolate()); - size_t surviving_object_size = 0; - for (auto it = space->begin(); it != space->end();) { - LargePage* current = *(it++); - HeapObject object = current->GetObject(); - DCHECK(!marking_state->IsGrey(object)); - if (!marking_state->IsBlack(object)) { - // Object is dead and page can be released. - space->RemovePage(current); - heap()->memory_allocator()->Free(MemoryAllocator::FreeMode::kConcurrently, - current); - - continue; - } - Marking::MarkWhite(non_atomic_marking_state()->MarkBitFrom(object)); - current->ProgressBar().ResetIfEnabled(); - non_atomic_marking_state()->SetLiveBytes(current, 0); - surviving_object_size += static_cast<size_t>(object.Size(cage_base)); - } - space->set_objects_size(surviving_object_size); -} - -void MarkCompactCollector::StartSweepSpace(PagedSpace* space) { - space->ClearAllocatorState(); - - int will_be_swept = 0; - bool unused_page_present = false; - - // Loop needs to support deletion if live bytes == 0 for a page. - for (auto it = space->begin(); it != space->end();) { - Page* p = *(it++); - DCHECK(p->SweepingDone()); - - if (p->IsEvacuationCandidate()) { - // Will be processed in Evacuate. - DCHECK(!evacuation_candidates_.empty()); - continue; - } - - // One unused page is kept, all further are released before sweeping them. - if (non_atomic_marking_state()->live_bytes(p) == 0) { - if (unused_page_present) { - if (v8_flags.gc_verbose) { - PrintIsolate(isolate(), "sweeping: released page: %p", - static_cast<void*>(p)); - } - space->memory_chunk_list().Remove(p); - space->ReleasePage(p); - continue; - } - unused_page_present = true; - } - - sweeper()->AddPage(space->identity(), p, Sweeper::REGULAR); - will_be_swept++; - } - - if (v8_flags.gc_verbose) { - PrintIsolate(isolate(), "sweeping: space=%s initialized_for_sweeping=%d", - space->name(), will_be_swept); - } -} - -void MarkCompactCollector::StartSweepNewSpace() { - PagedSpaceBase* paged_space = heap()->paged_new_space()->paged_space(); - paged_space->ClearAllocatorState(); - - int will_be_swept = 0; - - for (auto it = paged_space->begin(); it != paged_space->end();) { - Page* p = *(it++); - DCHECK(p->SweepingDone()); - - if (non_atomic_marking_state()->live_bytes(p) > 0) { - // Non-empty pages will be evacuated/promoted. - continue; + } else { + DCHECK(p->IsEvacuationCandidate()); + DCHECK(p->SweepingDone()); } - - // New space preallocates all its pages. Don't free empty pages since they - // will just be reallocated. - DCHECK_EQ(NEW_SPACE, paged_space->identity()); - sweeper_->AddPage(NEW_SPACE, p, Sweeper::REGULAR); - will_be_swept++; } + DCHECK_EQ(aborted_pages_verified, aborted_pages); + USE(aborted_pages_verified); + return aborted_pages; +} - if (v8_flags.gc_verbose) { - PrintIsolate(isolate(), "sweeping: space=%s initialized_for_sweeping=%d", - paged_space->name(), will_be_swept); +void MarkCompactCollector::ReleaseEvacuationCandidates() { + for (Page* p : old_space_evacuation_pages_) { + if (!p->IsEvacuationCandidate()) continue; + PagedSpace* space = static_cast<PagedSpace*>(p->owner()); + non_atomic_marking_state()->SetLiveBytes(p, 0); + CHECK(p->SweepingDone()); + space->ReleasePage(p); } + old_space_evacuation_pages_.clear(); + compacting_ = false; } void MarkCompactCollector::Sweep() { + DCHECK(!sweeper()->sweeping_in_progress()); TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_SWEEP); #ifdef DEBUG state_ = SWEEP_SPACES; #endif { - { - GCTracer::Scope sweep_scope( - heap()->tracer(), GCTracer::Scope::MC_SWEEP_LO, ThreadKind::kMain); - SweepLargeSpace(heap()->lo_space()); - } - { - GCTracer::Scope sweep_scope(heap()->tracer(), - GCTracer::Scope::MC_SWEEP_CODE_LO, - ThreadKind::kMain); - SweepLargeSpace(heap()->code_lo_space()); - } - { - GCTracer::Scope sweep_scope( - heap()->tracer(), GCTracer::Scope::MC_SWEEP_OLD, ThreadKind::kMain); - StartSweepSpace(heap()->old_space()); - } - { - GCTracer::Scope sweep_scope( - heap()->tracer(), GCTracer::Scope::MC_SWEEP_CODE, ThreadKind::kMain); - StartSweepSpace(heap()->code_space()); - } - if (heap()->map_space()) { - GCTracer::Scope sweep_scope( - heap()->tracer(), GCTracer::Scope::MC_SWEEP_MAP, ThreadKind::kMain); - StartSweepSpace(heap()->map_space()); - } - if (v8_flags.minor_mc && heap()->new_space()) { - GCTracer::Scope sweep_scope( - heap()->tracer(), GCTracer::Scope::MC_SWEEP_NEW, ThreadKind::kMain); - StartSweepNewSpace(); - } - sweeper()->StartSweeping(); + GCTracer::Scope sweep_scope(heap()->tracer(), GCTracer::Scope::MC_SWEEP_LO, + ThreadKind::kMain); + SweepLargeSpace(heap()->lo_space()); + } + { + GCTracer::Scope sweep_scope( + heap()->tracer(), GCTracer::Scope::MC_SWEEP_CODE_LO, ThreadKind::kMain); + SweepLargeSpace(heap()->code_lo_space()); + } + { + GCTracer::Scope sweep_scope(heap()->tracer(), GCTracer::Scope::MC_SWEEP_OLD, + ThreadKind::kMain); + StartSweepSpace(heap()->old_space()); + } + { + GCTracer::Scope sweep_scope( + heap()->tracer(), GCTracer::Scope::MC_SWEEP_CODE, ThreadKind::kMain); + StartSweepSpace(heap()->code_space()); + } + if (heap()->map_space()) { + GCTracer::Scope sweep_scope(heap()->tracer(), GCTracer::Scope::MC_SWEEP_MAP, + ThreadKind::kMain); + StartSweepSpace(heap()->map_space()); + } + if (heap()->shared_space()) { + GCTracer::Scope sweep_scope(heap()->tracer(), GCTracer::Scope::MC_SWEEP_MAP, + ThreadKind::kMain); + StartSweepSpace(heap()->shared_space()); } + if (v8_flags.minor_mc && heap()->new_space()) { + GCTracer::Scope sweep_scope(heap()->tracer(), GCTracer::Scope::MC_SWEEP_NEW, + ThreadKind::kMain); + StartSweepNewSpace(); + } + + sweeper()->StartSweeping(garbage_collector_); } namespace { @@ -5643,8 +5427,7 @@ class YoungGenerationMarkingVerifier : public MarkingVerifier { public: explicit YoungGenerationMarkingVerifier(Heap* heap) : MarkingVerifier(heap), - marking_state_( - heap->minor_mark_compact_collector()->non_atomic_marking_state()) {} + marking_state_(heap->non_atomic_marking_state()) {} ConcurrentBitmap<AccessMode::NON_ATOMIC>* bitmap( const MemoryChunk* chunk) override { @@ -5708,75 +5491,15 @@ class YoungGenerationMarkingVerifier : public MarkingVerifier { } } - NonAtomicMarkingState* marking_state_; -}; - -class YoungGenerationEvacuationVerifier : public EvacuationVerifier { - public: - explicit YoungGenerationEvacuationVerifier(Heap* heap) - : EvacuationVerifier(heap) {} - - void Run() override { - DCHECK(!heap_->mark_compact_collector()->sweeping_in_progress()); - DCHECK(!heap_->minor_mark_compact_collector()->sweeping_in_progress()); - VerifyRoots(); - VerifyEvacuation(heap_->new_space()); - VerifyEvacuation(heap_->old_space()); - VerifyEvacuation(heap_->code_space()); - if (heap_->map_space()) VerifyEvacuation(heap_->map_space()); - } - - protected: - V8_INLINE void VerifyHeapObjectImpl(HeapObject heap_object) { - CHECK_IMPLIES(Heap::InYoungGeneration(heap_object), - Heap::InToPage(heap_object)); - } - - template <typename TSlot> - void VerifyPointersImpl(TSlot start, TSlot end) { - for (TSlot current = start; current < end; ++current) { - typename TSlot::TObject object = current.load(cage_base()); - HeapObject heap_object; - if (object.GetHeapObject(&heap_object)) { - VerifyHeapObjectImpl(heap_object); - } - } - } - void VerifyMap(Map map) override { VerifyHeapObjectImpl(map); } - void VerifyPointers(ObjectSlot start, ObjectSlot end) override { - VerifyPointersImpl(start, end); - } - void VerifyPointers(MaybeObjectSlot start, MaybeObjectSlot end) override { - VerifyPointersImpl(start, end); - } - void VerifyCodePointer(CodeObjectSlot slot) override { - CHECK(V8_EXTERNAL_CODE_SPACE_BOOL); - Object maybe_code = slot.load(code_cage_base()); - HeapObject code; - // The slot might contain smi during CodeDataContainer creation, so skip it. - if (maybe_code.GetHeapObject(&code)) { - VerifyHeapObjectImpl(code); - } - } - void VisitCodeTarget(Code host, RelocInfo* rinfo) override { - Code target = Code::GetCodeFromTargetAddress(rinfo->target_address()); - VerifyHeapObjectImpl(target); - } - void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) override { - VerifyHeapObjectImpl(rinfo->target_object(cage_base())); - } - void VerifyRootPointers(FullObjectSlot start, FullObjectSlot end) override { - VerifyPointersImpl(start, end); - } + NonAtomicMarkingState* const marking_state_; }; #endif // VERIFY_HEAP bool IsUnmarkedObjectForYoungGeneration(Heap* heap, FullObjectSlot p) { DCHECK_IMPLIES(Heap::InYoungGeneration(*p), Heap::InToPage(*p)); - return Heap::InYoungGeneration(*p) && !heap->minor_mark_compact_collector() - ->non_atomic_marking_state() - ->IsBlack(HeapObject::cast(*p)); + return Heap::InYoungGeneration(*p) && + !heap->non_atomic_marking_state()->IsBlack(HeapObject::cast(*p)); } } // namespace @@ -5821,7 +5544,7 @@ constexpr size_t MinorMarkCompactCollector::kMaxParallelTasks; MinorMarkCompactCollector::MinorMarkCompactCollector(Heap* heap) : CollectorBase(heap, GarbageCollector::MINOR_MARK_COMPACTOR), page_parallel_job_semaphore_(0), - sweeper_(std::make_unique<Sweeper>(heap_, non_atomic_marking_state())) {} + sweeper_(heap_->sweeper()) {} std::pair<size_t, size_t> MinorMarkCompactCollector::ProcessMarkingWorklist( size_t bytes_to_process) { @@ -5872,8 +5595,7 @@ class YoungGenerationMigrationObserver final : public MigrationObserver { // Migrate color to old generation marking in case the object survived // young generation garbage collection. if (heap_->incremental_marking()->IsMarking()) { - DCHECK( - heap_->incremental_marking()->atomic_marking_state()->IsWhite(dst)); + DCHECK(heap_->atomic_marking_state()->IsWhite(dst)); heap_->incremental_marking()->TransferColor(src, dst); } } @@ -5886,9 +5608,8 @@ class YoungGenerationMigrationObserver final : public MigrationObserver { class YoungGenerationRecordMigratedSlotVisitor final : public RecordMigratedSlotVisitor { public: - explicit YoungGenerationRecordMigratedSlotVisitor( - MarkCompactCollector* collector) - : RecordMigratedSlotVisitor(collector, nullptr) {} + explicit YoungGenerationRecordMigratedSlotVisitor(Heap* heap) + : RecordMigratedSlotVisitor(heap, nullptr) {} void VisitCodeTarget(Code host, RelocInfo* rinfo) final { UNREACHABLE(); } void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) final { @@ -5904,7 +5625,7 @@ class YoungGenerationRecordMigratedSlotVisitor final // Only record slots for host objects that are considered as live by the // full collector. inline bool IsLive(HeapObject object) { - return collector_->non_atomic_marking_state()->IsBlack(object); + return heap_->non_atomic_marking_state()->IsBlack(object); } inline void RecordMigratedSlot(HeapObject host, MaybeObject value, @@ -5928,6 +5649,10 @@ class YoungGenerationRecordMigratedSlotVisitor final RememberedSet<OLD_TO_OLD>::Insert<AccessMode::NON_ATOMIC>( MemoryChunk::FromHeapObject(host), slot); } + } else if (p->InSharedHeap()) { + DCHECK(!host.InSharedWritableHeap()); + RememberedSet<OLD_TO_SHARED>::Insert<AccessMode::NON_ATOMIC>( + MemoryChunk::FromHeapObject(host), slot); } } } @@ -5939,25 +5664,28 @@ void MinorMarkCompactCollector::UpdatePointersAfterEvacuation() { std::vector<std::unique_ptr<UpdatingItem>> updating_items; - // Create batches of global handles. - CollectRememberedSetUpdatingItems(this, &updating_items, heap()->old_space(), - RememberedSetUpdatingMode::OLD_TO_NEW_ONLY); - CollectRememberedSetUpdatingItems(this, &updating_items, heap()->code_space(), - RememberedSetUpdatingMode::OLD_TO_NEW_ONLY); - if (heap()->map_space()) { - CollectRememberedSetUpdatingItems( - this, &updating_items, heap()->map_space(), - RememberedSetUpdatingMode::OLD_TO_NEW_ONLY); - } - CollectRememberedSetUpdatingItems(this, &updating_items, heap()->lo_space(), - RememberedSetUpdatingMode::OLD_TO_NEW_ONLY); - CollectRememberedSetUpdatingItems(this, &updating_items, - heap()->code_lo_space(), - RememberedSetUpdatingMode::OLD_TO_NEW_ONLY); - { TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_EVACUATE_UPDATE_POINTERS_SLOTS); + // Create batches of global handles. + CollectRememberedSetUpdatingItems( + this, &updating_items, heap()->old_space(), + RememberedSetUpdatingMode::OLD_TO_NEW_ONLY); + CollectRememberedSetUpdatingItems( + this, &updating_items, heap()->code_space(), + RememberedSetUpdatingMode::OLD_TO_NEW_ONLY); + if (heap()->map_space()) { + CollectRememberedSetUpdatingItems( + this, &updating_items, heap()->map_space(), + RememberedSetUpdatingMode::OLD_TO_NEW_ONLY); + } + CollectRememberedSetUpdatingItems( + this, &updating_items, heap()->lo_space(), + RememberedSetUpdatingMode::OLD_TO_NEW_ONLY); + CollectRememberedSetUpdatingItems( + this, &updating_items, heap()->code_lo_space(), + RememberedSetUpdatingMode::OLD_TO_NEW_ONLY); + V8::GetCurrentPlatform() ->CreateJob( v8::TaskPriority::kUserBlocking, @@ -6027,17 +5755,26 @@ void MinorMarkCompactCollector::StartMarking() { } void MinorMarkCompactCollector::Finish() { - TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_FINISH); - { - // Keep new space sweeping atomic. - GCTracer::Scope sweep_scope(heap()->tracer(), - GCTracer::Scope::MC_FINISH_SWEEP_NEW, - ThreadKind::kMain); - sweeper_->EnsureCompleted(Sweeper::SweepingMode::kEagerDuringGC); - heap()->paged_new_space()->paged_space()->RefillFreeList(sweeper()); + TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_SWEEP); + { + DCHECK_NOT_NULL(heap()->new_lo_space()); + TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_SWEEP_NEW_LO); + SweepLargeSpace(heap()->new_lo_space()); + } + + { + // Keep new space sweeping atomic. + GCTracer::Scope sweep_scope(heap()->tracer(), + GCTracer::Scope::MINOR_MC_SWEEP_FINISH_NEW, + ThreadKind::kMain); + sweeper_->EnsureCompleted(Sweeper::SweepingMode::kEagerDuringGC); + heap()->paged_new_space()->paged_space()->RefillFreeList(); + } } + TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_FINISH); + local_marking_worklists_.reset(); main_marking_visitor_.reset(); } @@ -6070,25 +5807,6 @@ void MinorMarkCompactCollector::CollectGarbage() { } #endif // VERIFY_HEAP - { - TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARKING_DEQUE); - heap()->incremental_marking()->UpdateMarkingWorklistAfterYoungGenGC(); - } - - { - TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_RESET_LIVENESS); - // Since we promote all surviving large objects immediately, all remaining - // large objects must be dead. - NonAtomicMarkingState* marking_state = non_atomic_marking_state(); - heap()->new_lo_space()->FreeDeadObjects([marking_state](HeapObject obj) { - // New large object space is not swept and markbits for non-promoted - // objects are still in tact. - USE(marking_state); - DCHECK(marking_state->IsWhite(obj)); - return true; - }); - } - CleanupPromotedPages(); SweepArrayBufferExtensions(); @@ -6101,7 +5819,6 @@ void MinorMarkCompactCollector::MakeIterable( CHECK(!p->IsLargePage()); // We have to clear the full collectors markbits for the areas that we // remove here. - MarkCompactCollector* full_collector = heap()->mark_compact_collector(); Address free_start = p->area_start(); for (auto object_and_size : @@ -6112,7 +5829,7 @@ void MinorMarkCompactCollector::MakeIterable( if (free_end != free_start) { CHECK_GT(free_end, free_start); size_t size = static_cast<size_t>(free_end - free_start); - full_collector->non_atomic_marking_state()->bitmap(p)->ClearRange( + heap()->non_atomic_marking_state()->bitmap(p)->ClearRange( p->AddressToMarkbitIndex(free_start), p->AddressToMarkbitIndex(free_end)); if (free_space_mode == FreeSpaceTreatmentMode::kZapFreeSpace) { @@ -6129,7 +5846,7 @@ void MinorMarkCompactCollector::MakeIterable( if (free_start != p->area_end()) { CHECK_GT(p->area_end(), free_start); size_t size = static_cast<size_t>(p->area_end() - free_start); - full_collector->non_atomic_marking_state()->bitmap(p)->ClearRange( + heap()->non_atomic_marking_state()->bitmap(p)->ClearRange( p->AddressToMarkbitIndex(free_start), p->AddressToMarkbitIndex(p->area_end())); if (free_space_mode == FreeSpaceTreatmentMode::kZapFreeSpace) { @@ -6144,10 +5861,8 @@ namespace { // Helper class for pruning the string table. class YoungGenerationExternalStringTableCleaner : public RootVisitor { public: - explicit YoungGenerationExternalStringTableCleaner( - MinorMarkCompactCollector* collector) - : heap_(collector->heap()), - marking_state_(collector->non_atomic_marking_state()) {} + explicit YoungGenerationExternalStringTableCleaner(Heap* heap) + : heap_(heap), marking_state_(heap_->non_atomic_marking_state()) {} void VisitRootPointers(Root root, const char* description, FullObjectSlot start, FullObjectSlot end) override { @@ -6173,8 +5888,8 @@ class YoungGenerationExternalStringTableCleaner : public RootVisitor { } private: - Heap* heap_; - NonAtomicMarkingState* marking_state_; + Heap* const heap_; + NonAtomicMarkingState* const marking_state_; }; } // namespace @@ -6186,7 +5901,7 @@ void MinorMarkCompactCollector::ClearNonLiveReferences() { TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_CLEAR_STRING_TABLE); // Internalized strings are always stored in old space, so there is no // need to clean them here. - YoungGenerationExternalStringTableCleaner external_visitor(this); + YoungGenerationExternalStringTableCleaner external_visitor(heap()); heap()->external_string_table_.IterateYoung(&external_visitor); heap()->external_string_table_.CleanUpYoung(); } @@ -6220,16 +5935,14 @@ MinorMarkCompactCollector::CreateRememberedSetUpdatingItem( class PageMarkingItem; class RootMarkingItem; -class YoungGenerationMarkingTask; class YoungGenerationMarkingTask { public: - YoungGenerationMarkingTask(Isolate* isolate, - MinorMarkCompactCollector* collector, + YoungGenerationMarkingTask(Isolate* isolate, Heap* heap, MarkingWorklists* global_worklists) : marking_worklists_local_( std::make_unique<MarkingWorklists::Local>(global_worklists)), - marking_state_(collector->marking_state()), + marking_state_(heap->marking_state()), visitor_(isolate, marking_state_, marking_worklists_local()) {} void MarkObject(Object object) { @@ -6250,6 +5963,8 @@ class YoungGenerationMarkingTask { } } + void PublishMarkingWorklist() { marking_worklists_local_->Publish(); } + MarkingWorklists::Local* marking_worklists_local() { return marking_worklists_local_.get(); } @@ -6260,155 +5975,138 @@ class YoungGenerationMarkingTask { YoungGenerationMainMarkingVisitor visitor_; }; -class PageMarkingItem : public ParallelWorkItem { - public: - explicit PageMarkingItem(MemoryChunk* chunk) : chunk_(chunk) {} - ~PageMarkingItem() = default; - - void Process(YoungGenerationMarkingTask* task) { - TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.gc"), - "PageMarkingItem::Process"); - base::MutexGuard guard(chunk_->mutex()); - MarkUntypedPointers(task); - MarkTypedPointers(task); - } - - private: - inline Heap* heap() { return chunk_->heap(); } - - void MarkUntypedPointers(YoungGenerationMarkingTask* task) { - InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToNew( - chunk_, InvalidatedSlotsFilter::LivenessCheck::kNo); - RememberedSet<OLD_TO_NEW>::Iterate( - chunk_, - [this, task, &filter](MaybeObjectSlot slot) { - if (!filter.IsValid(slot.address())) return REMOVE_SLOT; - return CheckAndMarkObject(task, slot); - }, - SlotSet::FREE_EMPTY_BUCKETS); - } +void PageMarkingItem::Process(YoungGenerationMarkingTask* task) { + TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.gc"), "PageMarkingItem::Process"); + base::MutexGuard guard(chunk_->mutex()); + MarkUntypedPointers(task); + MarkTypedPointers(task); +} + +void PageMarkingItem::MarkUntypedPointers(YoungGenerationMarkingTask* task) { + InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToNew( + chunk_, InvalidatedSlotsFilter::LivenessCheck::kNo); + RememberedSet<OLD_TO_NEW>::Iterate( + chunk_, + [this, task, &filter](MaybeObjectSlot slot) { + if (!filter.IsValid(slot.address())) return REMOVE_SLOT; + return CheckAndMarkObject(task, slot); + }, + SlotSet::FREE_EMPTY_BUCKETS); +} - void MarkTypedPointers(YoungGenerationMarkingTask* task) { - RememberedSet<OLD_TO_NEW>::IterateTyped( - chunk_, [=](SlotType slot_type, Address slot) { - return UpdateTypedSlotHelper::UpdateTypedSlot( - heap(), slot_type, slot, [this, task](FullMaybeObjectSlot slot) { - return CheckAndMarkObject(task, slot); - }); - }); - } +void PageMarkingItem::MarkTypedPointers(YoungGenerationMarkingTask* task) { + RememberedSet<OLD_TO_NEW>::IterateTyped( + chunk_, [=](SlotType slot_type, Address slot) { + return UpdateTypedSlotHelper::UpdateTypedSlot( + heap(), slot_type, slot, [this, task](FullMaybeObjectSlot slot) { + return CheckAndMarkObject(task, slot); + }); + }); +} - template <typename TSlot> - V8_INLINE SlotCallbackResult - CheckAndMarkObject(YoungGenerationMarkingTask* task, TSlot slot) { - static_assert( - std::is_same<TSlot, FullMaybeObjectSlot>::value || - std::is_same<TSlot, MaybeObjectSlot>::value, - "Only FullMaybeObjectSlot and MaybeObjectSlot are expected here"); - MaybeObject object = *slot; - if (Heap::InYoungGeneration(object)) { - // Marking happens before flipping the young generation, so the object - // has to be in a to page. - DCHECK(Heap::InToPage(object)); - HeapObject heap_object; - bool success = object.GetHeapObject(&heap_object); - USE(success); - DCHECK(success); - task->MarkObject(heap_object); - return KEEP_SLOT; - } - return REMOVE_SLOT; +template <typename TSlot> +V8_INLINE SlotCallbackResult PageMarkingItem::CheckAndMarkObject( + YoungGenerationMarkingTask* task, TSlot slot) { + static_assert( + std::is_same<TSlot, FullMaybeObjectSlot>::value || + std::is_same<TSlot, MaybeObjectSlot>::value, + "Only FullMaybeObjectSlot and MaybeObjectSlot are expected here"); + MaybeObject object = *slot; + if (Heap::InYoungGeneration(object)) { + // Marking happens before flipping the young generation, so the object + // has to be in a to page. + DCHECK(Heap::InToPage(object)); + HeapObject heap_object; + bool success = object.GetHeapObject(&heap_object); + USE(success); + DCHECK(success); + task->MarkObject(heap_object); + return KEEP_SLOT; } + return REMOVE_SLOT; +} - MemoryChunk* chunk_; -}; - -class YoungGenerationMarkingJob : public v8::JobTask { - public: - YoungGenerationMarkingJob(Isolate* isolate, - MinorMarkCompactCollector* collector, - MarkingWorklists* global_worklists, - std::vector<PageMarkingItem> marking_items) - : isolate_(isolate), - collector_(collector), - global_worklists_(global_worklists), - marking_items_(std::move(marking_items)), - remaining_marking_items_(marking_items_.size()), - generator_(marking_items_.size()) {} - - void Run(JobDelegate* delegate) override { - if (delegate->IsJoiningThread()) { - TRACE_GC(collector_->heap()->tracer(), - GCTracer::Scope::MINOR_MC_MARK_PARALLEL); - ProcessItems(delegate); - } else { - TRACE_GC_EPOCH(collector_->heap()->tracer(), - GCTracer::Scope::MINOR_MC_BACKGROUND_MARKING, - ThreadKind::kBackground); - ProcessItems(delegate); - } +void YoungGenerationMarkingJob::Run(JobDelegate* delegate) { + if (delegate->IsJoiningThread()) { + TRACE_GC(heap_->tracer(), GCTracer::Scope::MINOR_MC_MARK_PARALLEL); + ProcessItems(delegate); + } else { + TRACE_GC_EPOCH(heap_->tracer(), + GCTracer::Scope::MINOR_MC_BACKGROUND_MARKING, + ThreadKind::kBackground); + ProcessItems(delegate); } +} - size_t GetMaxConcurrency(size_t worker_count) const override { - // Pages are not private to markers but we can still use them to estimate - // the amount of marking that is required. - const int kPagesPerTask = 2; - size_t items = remaining_marking_items_.load(std::memory_order_relaxed); - size_t num_tasks = std::max( +size_t YoungGenerationMarkingJob::GetMaxConcurrency(size_t worker_count) const { + // Pages are not private to markers but we can still use them to estimate + // the amount of marking that is required. + const int kPagesPerTask = 2; + size_t items = remaining_marking_items_.load(std::memory_order_relaxed); + size_t num_tasks; + if (!incremental()) { + num_tasks = std::max( (items + 1) / kPagesPerTask, global_worklists_->shared()->Size() + global_worklists_->on_hold() ->Size()); // TODO(v8:13012): If this is used with concurrent // marking, we need to remove on_hold() here. - if (!v8_flags.parallel_marking) { - num_tasks = std::min<size_t>(1, num_tasks); - } - return std::min<size_t>(num_tasks, - MinorMarkCompactCollector::kMaxParallelTasks); + } else { + num_tasks = (items + 1) / kPagesPerTask; } - private: - void ProcessItems(JobDelegate* delegate) { - double marking_time = 0.0; - { - TimedScope scope(&marking_time); - YoungGenerationMarkingTask task(isolate_, collector_, global_worklists_); - ProcessMarkingItems(&task); - task.EmptyMarkingWorklist(); - } - if (v8_flags.trace_minor_mc_parallel_marking) { - PrintIsolate(collector_->isolate(), "marking[%p]: time=%f\n", - static_cast<void*>(this), marking_time); - } + if (!v8_flags.parallel_marking) { + num_tasks = std::min<size_t>(1, num_tasks); } + return std::min<size_t>(num_tasks, + MinorMarkCompactCollector::kMaxParallelTasks); +} - void ProcessMarkingItems(YoungGenerationMarkingTask* task) { - while (remaining_marking_items_.load(std::memory_order_relaxed) > 0) { - base::Optional<size_t> index = generator_.GetNext(); - if (!index) return; - for (size_t i = *index; i < marking_items_.size(); ++i) { - auto& work_item = marking_items_[i]; - if (!work_item.TryAcquire()) break; - work_item.Process(task); +void YoungGenerationMarkingJob::ProcessItems(JobDelegate* delegate) { + double marking_time = 0.0; + { + TimedScope scope(&marking_time); + YoungGenerationMarkingTask task(isolate_, heap_, global_worklists_); + ProcessMarkingItems(&task); + if (!incremental()) { + task.EmptyMarkingWorklist(); + } else { + task.PublishMarkingWorklist(); + } + } + if (v8_flags.trace_minor_mc_parallel_marking) { + PrintIsolate(isolate_, "marking[%p]: time=%f\n", static_cast<void*>(this), + marking_time); + } +} + +void YoungGenerationMarkingJob::ProcessMarkingItems( + YoungGenerationMarkingTask* task) { + // TODO(v8:13012): YoungGenerationMarkingJob is generally used to compute the + // transitive closure. In the context of concurrent MinorMC, it currently only + // seeds the worklists from the old-to-new remembered set, but does not empty + // them (this is done concurrently). The class should be refactored to make + // this clearer. + while (remaining_marking_items_.load(std::memory_order_relaxed) > 0) { + base::Optional<size_t> index = generator_.GetNext(); + if (!index) return; + for (size_t i = *index; i < marking_items_.size(); ++i) { + auto& work_item = marking_items_[i]; + if (!work_item.TryAcquire()) break; + work_item.Process(task); + if (!incremental()) { task->EmptyMarkingWorklist(); - if (remaining_marking_items_.fetch_sub(1, std::memory_order_relaxed) <= - 1) { - return; - } + } + if (remaining_marking_items_.fetch_sub(1, std::memory_order_relaxed) <= + 1) { + return; } } } - - Isolate* isolate_; - MinorMarkCompactCollector* collector_; - MarkingWorklists* global_worklists_; - std::vector<PageMarkingItem> marking_items_; - std::atomic_size_t remaining_marking_items_{0}; - IndexGenerator generator_; -}; +} void MinorMarkCompactCollector::MarkRootSetInParallel( - RootMarkingVisitor* root_visitor) { + RootMarkingVisitor* root_visitor, bool was_marked_incrementally) { { std::vector<PageMarkingItem> marking_items; @@ -6426,11 +6124,14 @@ void MinorMarkCompactCollector::MarkRootSetInParallel( SkipRoot::kOldGeneration}); isolate()->global_handles()->IterateYoungStrongAndDependentRoots( root_visitor); - // Create items for each page. - RememberedSet<OLD_TO_NEW>::IterateMemoryChunks( - heap(), [&marking_items](MemoryChunk* chunk) { - marking_items.emplace_back(chunk); - }); + + if (!was_marked_incrementally) { + // Create items for each page. + RememberedSet<OLD_TO_NEW>::IterateMemoryChunks( + heap(), [&marking_items](MemoryChunk* chunk) { + marking_items.emplace_back(chunk); + }); + } } // Add tasks and run in parallel. @@ -6439,12 +6140,14 @@ void MinorMarkCompactCollector::MarkRootSetInParallel( // 0. Flush to ensure these items are visible globally and picked up // by the job. local_marking_worklists_->Publish(); - TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_ROOTS); + TRACE_GC(heap()->tracer(), + GCTracer::Scope::MINOR_MC_MARK_CLOSURE_PARALLEL); V8::GetCurrentPlatform() - ->CreateJob(v8::TaskPriority::kUserBlocking, - std::make_unique<YoungGenerationMarkingJob>( - isolate(), this, marking_worklists(), - std::move(marking_items))) + ->CreateJob( + v8::TaskPriority::kUserBlocking, + std::make_unique<YoungGenerationMarkingJob>( + isolate(), heap(), marking_worklists(), + std::move(marking_items), YoungMarkingJobType::kAtomic)) ->Join(); DCHECK(local_marking_worklists_->IsEmpty()); @@ -6462,7 +6165,8 @@ void MinorMarkCompactCollector::MarkLiveObjects() { bool was_marked_incrementally = false; { - // TODO(v8:13012): TRACE_GC with MINOR_MC_MARK_FINISH_INCREMENTAL. + TRACE_GC(heap()->tracer(), + GCTracer::Scope::MINOR_MC_MARK_FINISH_INCREMENTAL); if (heap_->incremental_marking()->Stop()) { MarkingBarrier::PublishAll(heap()); // TODO(v8:13012): TRACE_GC with MINOR_MC_MARK_FULL_CLOSURE_PARALLEL_JOIN. @@ -6475,11 +6179,11 @@ void MinorMarkCompactCollector::MarkLiveObjects() { RootMarkingVisitor root_visitor(this); - MarkRootSetInParallel(&root_visitor); + MarkRootSetInParallel(&root_visitor, was_marked_incrementally); // Mark rest on the main thread. { - TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_WEAK); + TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_MARK_CLOSURE); DrainMarkingWorklist(); } @@ -6577,18 +6281,12 @@ void MinorMarkCompactCollector::Evacuate() { { TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_EVACUATE_COPY); + EvacuationScope evacuation_scope(heap()); EvacuatePagesInParallel(); } UpdatePointersAfterEvacuation(); - { - TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_EVACUATE_REBALANCE); - if (!heap()->new_space()->EnsureCurrentCapacity()) { - heap()->FatalProcessOutOfMemory("NewSpace::Rebalance"); - } - } - { TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_EVACUATE_CLEAN_UP); for (Page* p : new_space_evacuation_pages_) { @@ -6598,12 +6296,24 @@ void MinorMarkCompactCollector::Evacuate() { } else { // Page was not promoted. Sweep it instead. DCHECK_EQ(NEW_SPACE, p->owner_identity()); - sweeper()->AddPage(NEW_SPACE, p, Sweeper::REGULAR); + sweeper()->AddNewSpacePage(p); } } new_space_evacuation_pages_.clear(); } + if (is_new_space_shrinking_) { + heap()->paged_new_space()->FinishShrinking(); + is_new_space_shrinking_ = false; + } + + { + TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_EVACUATE_REBALANCE); + if (!heap()->new_space()->EnsureCurrentCapacity()) { + heap()->FatalProcessOutOfMemory("NewSpace::Rebalance"); + } + } + { TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_EVACUATE_EPILOGUE); EvacuateEpilogue(); @@ -6614,13 +6324,13 @@ namespace { class YoungGenerationEvacuator : public Evacuator { public: - explicit YoungGenerationEvacuator(MinorMarkCompactCollector* collector) - : Evacuator(collector->heap(), &record_visitor_, &local_allocator_, + explicit YoungGenerationEvacuator(Heap* heap) + : Evacuator(heap, &record_visitor_, &local_allocator_, AlwaysPromoteYoung::kNo), - record_visitor_(collector->heap()->mark_compact_collector()), + record_visitor_(heap_), local_allocator_( heap_, CompactionSpaceKind::kCompactionSpaceForMinorMarkCompact), - collector_(collector) {} + collector_(heap_->minor_mark_compact_collector()) {} GCTracer::Scope::ScopeId GetBackgroundTracingScope() override { return GCTracer::Scope::MINOR_MC_BACKGROUND_EVACUATE_COPY; @@ -6631,18 +6341,18 @@ class YoungGenerationEvacuator : public Evacuator { } protected: - void RawEvacuatePage(MemoryChunk* chunk, intptr_t* live_bytes) override; + bool RawEvacuatePage(MemoryChunk* chunk, intptr_t* live_bytes) override; YoungGenerationRecordMigratedSlotVisitor record_visitor_; EvacuationAllocator local_allocator_; MinorMarkCompactCollector* collector_; }; -void YoungGenerationEvacuator::RawEvacuatePage(MemoryChunk* chunk, +bool YoungGenerationEvacuator::RawEvacuatePage(MemoryChunk* chunk, intptr_t* live_bytes) { TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.gc"), "YoungGenerationEvacuator::RawEvacuatePage"); - NonAtomicMarkingState* marking_state = collector_->non_atomic_marking_state(); + NonAtomicMarkingState* marking_state = heap_->non_atomic_marking_state(); *live_bytes = marking_state->live_bytes(chunk); DCHECK_EQ(kPageNewToOld, ComputeEvacuationMode(chunk)); LiveObjectVisitor::VisitBlackObjectsNoFail(chunk, marking_state, @@ -6662,6 +6372,8 @@ void YoungGenerationEvacuator::RawEvacuatePage(MemoryChunk* chunk, FreeSpaceTreatmentMode::kIgnoreFreeSpace); } } + + return true; } } // namespace @@ -6675,7 +6387,10 @@ void MinorMarkCompactCollector::EvacuatePagesInParallel() { DCHECK_LT(0, live_bytes_on_page); live_bytes += live_bytes_on_page; if (ShouldMovePage(page, live_bytes_on_page, page->wasted_memory(), - MemoryReductionMode::kNone, AlwaysPromoteYoung::kNo)) { + MemoryReductionMode::kNone, AlwaysPromoteYoung::kNo, + heap()->tracer()->IsCurrentGCDueToAllocationFailure() + ? PromoteUnusablePages::kYes + : PromoteUnusablePages::kNo)) { EvacuateNewSpacePageVisitor<NEW_TO_OLD>::Move(page); evacuation_items.emplace_back(ParallelWorkItem{}, page); } @@ -6687,7 +6402,7 @@ void MinorMarkCompactCollector::EvacuatePagesInParallel() { LargePage* current = *it; it++; HeapObject object = current->GetObject(); - if (non_atomic_marking_state_.IsBlack(object)) { + if (non_atomic_marking_state()->IsBlack(object)) { heap_->lo_space()->PromoteNewLargeObject(current); current->SetFlag(Page::PAGE_NEW_OLD_PROMOTION); promoted_large_pages_.push_back(current); @@ -6702,43 +6417,15 @@ void MinorMarkCompactCollector::EvacuatePagesInParallel() { const auto pages_count = evacuation_items.size(); const auto wanted_num_tasks = CreateAndExecuteEvacuationTasks<YoungGenerationEvacuator>( - this, std::move(evacuation_items), &observer); + heap(), std::move(evacuation_items), &observer); if (v8_flags.trace_evacuation) { TraceEvacuation(isolate(), pages_count, wanted_num_tasks, live_bytes, 0); } } -void MinorMarkCompactCollector::StartSweepNewSpace() { - PagedSpaceBase* paged_space = heap()->paged_new_space()->paged_space(); - paged_space->ClearAllocatorState(); - - int will_be_swept = 0; - - // Loop needs to support deletion if live bytes == 0 for a page. - for (auto it = paged_space->begin(); it != paged_space->end();) { - Page* p = *(it++); - DCHECK(p->SweepingDone()); - - if (non_atomic_marking_state()->live_bytes(p) > 0) { - // Non-empty pages will be evacuated/promoted. - continue; - } - - // New space preallocates all its pages. Don't free empty pages since they - // will just be reallocated. - DCHECK_EQ(NEW_SPACE, paged_space->identity()); - sweeper_->AddPage(NEW_SPACE, p, Sweeper::REGULAR); - will_be_swept++; - } - - if (v8_flags.gc_verbose) { - PrintIsolate(isolate(), "sweeping: space=%s initialized_for_sweeping=%d", - paged_space->name(), will_be_swept); - } -} - void MinorMarkCompactCollector::Sweep() { + DCHECK(!sweeper()->sweeping_in_progress()); TRACE_GC(heap()->tracer(), GCTracer::Scope::MINOR_MC_SWEEP); { GCTracer::Scope sweep_scope(heap()->tracer(), @@ -6746,7 +6433,7 @@ void MinorMarkCompactCollector::Sweep() { ThreadKind::kMain); StartSweepNewSpace(); } - sweeper_->StartSweeping(); + sweeper_->StartSweeping(garbage_collector_); } } // namespace internal diff --git a/deps/v8/src/heap/mark-compact.h b/deps/v8/src/heap/mark-compact.h index 9d960cd3602adb..40b6019de5ca18 100644 --- a/deps/v8/src/heap/mark-compact.h +++ b/deps/v8/src/heap/mark-compact.h @@ -11,6 +11,7 @@ #include "include/v8-internal.h" #include "src/heap/base/worklist.h" #include "src/heap/concurrent-marking.h" +#include "src/heap/marking-state.h" #include "src/heap/marking-visitor.h" #include "src/heap/marking-worklist.h" #include "src/heap/marking.h" @@ -173,79 +174,11 @@ class LiveObjectVisitor : AllStatic { }; enum class AlwaysPromoteYoung { kYes, kNo }; +enum class PromoteUnusablePages { kYes, kNo }; enum class MemoryReductionMode { kNone, kShouldReduceMemory }; enum PageEvacuationMode { NEW_TO_NEW, NEW_TO_OLD }; enum class RememberedSetUpdatingMode { ALL, OLD_TO_NEW_ONLY }; -// This is used by marking visitors. -class MarkingState final - : public MarkingStateBase<MarkingState, AccessMode::ATOMIC> { - public: - explicit MarkingState(PtrComprCageBase cage_base) - : MarkingStateBase(cage_base) {} - - ConcurrentBitmap<AccessMode::ATOMIC>* bitmap( - const BasicMemoryChunk* chunk) const { - return chunk->marking_bitmap<AccessMode::ATOMIC>(); - } - - // Concurrent marking uses local live bytes so we may do these accesses - // non-atomically. - void IncrementLiveBytes(MemoryChunk* chunk, intptr_t by) { - chunk->live_byte_count_.fetch_add(by, std::memory_order_relaxed); - } - - intptr_t live_bytes(const MemoryChunk* chunk) const { - return chunk->live_byte_count_.load(std::memory_order_relaxed); - } - - void SetLiveBytes(MemoryChunk* chunk, intptr_t value) { - chunk->live_byte_count_.store(value, std::memory_order_relaxed); - } -}; - -// This is used by Scavenger and Evacuator in TransferColor. -// Live byte increments have to be atomic. -class AtomicMarkingState final - : public MarkingStateBase<AtomicMarkingState, AccessMode::ATOMIC> { - public: - explicit AtomicMarkingState(PtrComprCageBase cage_base) - : MarkingStateBase(cage_base) {} - - ConcurrentBitmap<AccessMode::ATOMIC>* bitmap( - const BasicMemoryChunk* chunk) const { - return chunk->marking_bitmap<AccessMode::ATOMIC>(); - } - - void IncrementLiveBytes(MemoryChunk* chunk, intptr_t by) { - chunk->live_byte_count_.fetch_add(by); - } -}; - -class NonAtomicMarkingState final - : public MarkingStateBase<NonAtomicMarkingState, AccessMode::NON_ATOMIC> { - public: - explicit NonAtomicMarkingState(PtrComprCageBase cage_base) - : MarkingStateBase(cage_base) {} - - ConcurrentBitmap<AccessMode::NON_ATOMIC>* bitmap( - const BasicMemoryChunk* chunk) const { - return chunk->marking_bitmap<AccessMode::NON_ATOMIC>(); - } - - void IncrementLiveBytes(MemoryChunk* chunk, intptr_t by) { - chunk->live_byte_count_.fetch_add(by, std::memory_order_relaxed); - } - - intptr_t live_bytes(const MemoryChunk* chunk) const { - return chunk->live_byte_count_.load(std::memory_order_relaxed); - } - - void SetLiveBytes(MemoryChunk* chunk, intptr_t value) { - chunk->live_byte_count_.store(value, std::memory_order_relaxed); - } -}; - // This visitor is used for marking on the main thread. It is cheaper than // the concurrent marking visitor because it does not snapshot JSObjects. template <typename MarkingState> @@ -328,15 +261,6 @@ class CollectorBase { virtual void Prepare() = 0; virtual void StartMarking() = 0; - MarkingState* marking_state() { return &marking_state_; } - - NonAtomicMarkingState* non_atomic_marking_state() { - return &non_atomic_marking_state_; - } - - inline Heap* heap() const { return heap_; } - inline Isolate* isolate(); - MarkingWorklists* marking_worklists() { return &marking_worklists_; } MarkingWorklists::Local* local_marking_worklists() { @@ -352,8 +276,6 @@ class CollectorBase { // Used by incremental marking for object that change their layout. virtual void VisitObject(HeapObject obj) = 0; - virtual bool sweeping_in_progress() const = 0; - virtual void Finish() = 0; bool IsMajorMC(); @@ -363,14 +285,29 @@ class CollectorBase { std::vector<LargePage*> promoted_large_pages_; protected: + inline Heap* heap() const { return heap_; } + inline Isolate* isolate(); + + MarkingState* marking_state() { return marking_state_; } + + NonAtomicMarkingState* non_atomic_marking_state() { + return non_atomic_marking_state_; + } + + void StartSweepSpace(PagedSpace* space); + void StartSweepNewSpace(); + void SweepLargeSpace(LargeObjectSpace* space); + Heap* heap_; GarbageCollector garbage_collector_; MarkingWorklists marking_worklists_; std::unique_ptr<MarkingWorklists::Local> local_marking_worklists_; - MarkingState marking_state_; - NonAtomicMarkingState non_atomic_marking_state_; + MarkingState* const marking_state_; + NonAtomicMarkingState* const non_atomic_marking_state_; + + bool is_new_space_shrinking_ = false; explicit CollectorBase(Heap* heap, GarbageCollector collector); virtual ~CollectorBase() = default; @@ -459,35 +396,9 @@ class MarkCompactCollector final : public CollectorBase { void RecordLiveSlotsOnPage(Page* page); bool is_compacting() const { return compacting_; } - bool is_shared_heap() const { return is_shared_heap_; } - - void FinishSweepingIfOutOfWork(); - - enum class SweepingForcedFinalizationMode { kUnifiedHeap, kV8Only }; - - // Ensures that sweeping is finished. - // - // Note: Can only be called safely from main thread. - V8_EXPORT_PRIVATE void EnsureSweepingCompleted( - SweepingForcedFinalizationMode mode); - - void EnsurePageIsSwept(Page* page); - - void DrainSweepingWorklistForSpace(AllocationSpace space); - - // Checks if sweeping is in progress right now on any space. - bool sweeping_in_progress() const final { - return sweeper_->sweeping_in_progress(); - } - - void set_evacuation(bool evacuation) { evacuation_ = evacuation; } - - bool evacuation() const { return evacuation_; } inline void AddTransitionArray(TransitionArray array); - Sweeper* sweeper() { return sweeper_; } - #ifdef DEBUG // Checks whether performing mark-compact collection. bool in_use() { return state_ > PREPARE_GC; } @@ -544,10 +455,12 @@ class MarkCompactCollector final : public CollectorBase { // `kNullAddress` if the parameter does not point to (the interior of) a valid // heap object, or if it points to (the interior of) some object that is // already marked as live (black or grey). - Address FindBasePtrForMarking(Address maybe_inner_ptr); + V8_EXPORT_PRIVATE Address FindBasePtrForMarking(Address maybe_inner_ptr); #endif // V8_ENABLE_INNER_POINTER_RESOLUTION_MB private: + Sweeper* sweeper() { return sweeper_; } + void ComputeEvacuationHeuristics(size_t area_size, int* target_fragmentation_percent, size_t* max_evacuated_bytes); @@ -560,9 +473,6 @@ class MarkCompactCollector final : public CollectorBase { // Free unmarked ArrayBufferExtensions. void SweepArrayBufferExtensions(); - // Free unmarked entries in the ExternalPointerTable. - void SweepExternalPointerTable(); - void MarkLiveObjects(); // Marks the object grey and adds it to the marking work list. @@ -677,9 +587,6 @@ class MarkCompactCollector final : public CollectorBase { // Starts sweeping of spaces by contributing on the main thread and setting // up other pages for sweeping. Does not start sweeper tasks. void Sweep(); - void StartSweepSpace(PagedSpace* space); - void StartSweepNewSpace(); - void SweepLargeSpace(LargeObjectSpace* space); void EvacuatePrologue(); void EvacuateEpilogue(); @@ -701,6 +608,8 @@ class MarkCompactCollector final : public CollectorBase { void RightTrimDescriptorArray(DescriptorArray array, int descriptors_to_trim); + V8_INLINE bool ShouldMarkObject(HeapObject) const; + base::Mutex mutex_; base::Semaphore page_parallel_job_semaphore_{0}; @@ -717,9 +626,9 @@ class MarkCompactCollector final : public CollectorBase { CollectorState state_; #endif - const bool is_shared_heap_; + const bool uses_shared_heap_; + const bool is_shared_heap_isolate_; - bool evacuation_ = false; // True if we are collecting slots to perform evacuation from evacuation // candidates. bool compacting_ = false; @@ -746,7 +655,7 @@ class MarkCompactCollector final : public CollectorBase { aborted_evacuation_candidates_due_to_flags_; std::vector<LargePage*> promoted_large_pages_; - Sweeper* sweeper_; + Sweeper* const sweeper_; // Counts the number of major mark-compact collections. The counter is // incremented right after marking. This is used for: @@ -765,19 +674,6 @@ class MarkCompactCollector final : public CollectorBase { friend class RecordMigratedSlotVisitor; }; -class V8_NODISCARD EvacuationScope { - public: - explicit EvacuationScope(MarkCompactCollector* collector) - : collector_(collector) { - collector_->set_evacuation(true); - } - - ~EvacuationScope() { collector_->set_evacuation(false); } - - private: - MarkCompactCollector* collector_; -}; - // Collector for young-generation only. class MinorMarkCompactCollector final : public CollectorBase { public: @@ -807,9 +703,6 @@ class MinorMarkCompactCollector final : public CollectorBase { void Finish() final; - Sweeper* sweeper() { return sweeper_.get(); } - bool sweeping_in_progress() const { return sweeper_->sweeping_in_progress(); } - void VisitObject(HeapObject obj) final; private: @@ -818,15 +711,17 @@ class MinorMarkCompactCollector final : public CollectorBase { static const int kNumMarkers = 8; static const int kMainMarker = 0; + Sweeper* sweeper() { return sweeper_; } + void MarkLiveObjects(); - void MarkRootSetInParallel(RootMarkingVisitor* root_visitor); + void MarkRootSetInParallel(RootMarkingVisitor* root_visitor, + bool was_marked_incrementally); V8_INLINE void MarkRootObject(HeapObject obj); void DrainMarkingWorklist(); void TraceFragmentation(); void ClearNonLiveReferences(); void Sweep(); - void StartSweepNewSpace(); void EvacuatePrologue(); void EvacuateEpilogue(); @@ -844,7 +739,7 @@ class MinorMarkCompactCollector final : public CollectorBase { std::vector<Page*> promoted_pages_; std::vector<LargePage*> promoted_large_pages_; - std::unique_ptr<Sweeper> sweeper_; + Sweeper* const sweeper_; friend class YoungGenerationMarkingTask; friend class YoungGenerationMarkingJob; diff --git a/deps/v8/src/heap/marking-barrier-inl.h b/deps/v8/src/heap/marking-barrier-inl.h index 5f50081c4e1ec7..4d83a533e02b9c 100644 --- a/deps/v8/src/heap/marking-barrier-inl.h +++ b/deps/v8/src/heap/marking-barrier-inl.h @@ -28,8 +28,7 @@ bool MarkingBarrier::MarkValue(HeapObject host, HeapObject value) { // visits the host object. return false; } - BasicMemoryChunk* target_page = BasicMemoryChunk::FromHeapObject(value); - if (is_shared_heap_ != target_page->InSharedHeap()) return false; + if (!ShouldMarkObject(value)) return false; if (is_minor()) { // We do not need to insert into RememberedSet<OLD_TO_NEW> here because the @@ -48,6 +47,16 @@ bool MarkingBarrier::MarkValue(HeapObject host, HeapObject value) { } } +bool MarkingBarrier::ShouldMarkObject(HeapObject object) const { + if (V8_LIKELY(!uses_shared_heap_)) return true; + if (v8_flags.shared_space) { + if (is_shared_heap_isolate_) return true; + return !object.InSharedHeap(); + } else { + return is_shared_heap_isolate_ == object.InSharedHeap(); + } +} + template <typename TSlot> inline void MarkingBarrier::MarkRange(HeapObject host, TSlot start, TSlot end) { auto* isolate = heap_->isolate(); diff --git a/deps/v8/src/heap/marking-barrier.cc b/deps/v8/src/heap/marking-barrier.cc index e028a67295fae5..c66bf9d4b2019c 100644 --- a/deps/v8/src/heap/marking-barrier.cc +++ b/deps/v8/src/heap/marking-barrier.cc @@ -31,7 +31,8 @@ MarkingBarrier::MarkingBarrier(LocalHeap* local_heap) minor_worklist_(*minor_collector_->marking_worklists()->shared()), marking_state_(heap_->isolate()), is_main_thread_barrier_(local_heap->is_main_thread()), - is_shared_heap_(heap_->IsShared()) {} + uses_shared_heap_(heap_->isolate()->has_shared_heap()), + is_shared_heap_isolate_(heap_->isolate()->is_shared_heap_isolate()) {} MarkingBarrier::~MarkingBarrier() { DCHECK(typed_slots_map_.empty()); } @@ -212,6 +213,9 @@ void MarkingBarrier::Deactivate() { if (heap_->map_space()) DeactivateSpace(heap_->map_space()); DeactivateSpace(heap_->code_space()); DeactivateSpace(heap_->new_space()); + if (heap_->shared_space()) { + DeactivateSpace(heap_->shared_space()); + } for (LargePage* p : *heap_->new_lo_space()) { p->SetYoungGenerationPageFlags(false); DCHECK(p->IsLargePage()); @@ -222,6 +226,11 @@ void MarkingBarrier::Deactivate() { for (LargePage* p : *heap_->code_lo_space()) { p->SetOldGenerationPageFlags(false); } + if (heap_->shared_lo_space()) { + for (LargePage* p : *heap_->shared_lo_space()) { + p->SetOldGenerationPageFlags(false); + } + } } DCHECK(typed_slots_map_.empty()); DCHECK(current_worklist_->IsLocalEmpty()); @@ -259,6 +268,9 @@ void MarkingBarrier::Activate(bool is_compacting, ActivateSpace(heap_->code_space()); } ActivateSpace(heap_->new_space()); + if (heap_->shared_space()) { + ActivateSpace(heap_->shared_space()); + } for (LargePage* p : *heap_->new_lo_space()) { p->SetYoungGenerationPageFlags(true); @@ -276,6 +288,12 @@ void MarkingBarrier::Activate(bool is_compacting, p->SetOldGenerationPageFlags(true); } } + + if (heap_->shared_lo_space()) { + for (LargePage* p : *heap_->shared_lo_space()) { + p->SetOldGenerationPageFlags(true); + } + } } } diff --git a/deps/v8/src/heap/marking-barrier.h b/deps/v8/src/heap/marking-barrier.h index 8d04a25d105c42..96d0b329266998 100644 --- a/deps/v8/src/heap/marking-barrier.h +++ b/deps/v8/src/heap/marking-barrier.h @@ -50,6 +50,7 @@ class MarkingBarrier { } private: + inline bool ShouldMarkObject(HeapObject value) const; inline bool WhiteToGreyAndPush(HeapObject value); void RecordRelocSlot(Code host, RelocInfo* rinfo, HeapObject target); @@ -83,7 +84,8 @@ class MarkingBarrier { bool is_compacting_ = false; bool is_activated_ = false; bool is_main_thread_barrier_; - bool is_shared_heap_; + const bool uses_shared_heap_; + const bool is_shared_heap_isolate_; MarkingBarrierType marking_barrier_type_; }; diff --git a/deps/v8/src/heap/marking-state-inl.h b/deps/v8/src/heap/marking-state-inl.h new file mode 100644 index 00000000000000..0ab19a91a1bb9e --- /dev/null +++ b/deps/v8/src/heap/marking-state-inl.h @@ -0,0 +1,155 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef V8_HEAP_MARKING_STATE_INL_H_ +#define V8_HEAP_MARKING_STATE_INL_H_ + +#include "src/heap/marking-state.h" +#include "src/heap/memory-chunk.h" + +namespace v8 { +namespace internal { + +template <typename ConcreteState, AccessMode access_mode> +MarkBit MarkingStateBase<ConcreteState, access_mode>::MarkBitFrom( + const HeapObject obj) const { + return MarkBitFrom(BasicMemoryChunk::FromHeapObject(obj), obj.ptr()); +} + +template <typename ConcreteState, AccessMode access_mode> +MarkBit MarkingStateBase<ConcreteState, access_mode>::MarkBitFrom( + const BasicMemoryChunk* p, Address addr) const { + return static_cast<const ConcreteState*>(this)->bitmap(p)->MarkBitFromIndex( + p->AddressToMarkbitIndex(addr)); +} + +template <typename ConcreteState, AccessMode access_mode> +Marking::ObjectColor MarkingStateBase<ConcreteState, access_mode>::Color( + const HeapObject obj) const { + return Marking::Color(MarkBitFrom(obj)); +} + +template <typename ConcreteState, AccessMode access_mode> +bool MarkingStateBase<ConcreteState, access_mode>::IsImpossible( + const HeapObject obj) const { + return Marking::IsImpossible<access_mode>(MarkBitFrom(obj)); +} + +template <typename ConcreteState, AccessMode access_mode> +bool MarkingStateBase<ConcreteState, access_mode>::IsBlack( + const HeapObject obj) const { + return Marking::IsBlack<access_mode>(MarkBitFrom(obj)); +} + +template <typename ConcreteState, AccessMode access_mode> +bool MarkingStateBase<ConcreteState, access_mode>::IsWhite( + const HeapObject obj) const { + return Marking::IsWhite<access_mode>(MarkBitFrom(obj)); +} + +template <typename ConcreteState, AccessMode access_mode> +bool MarkingStateBase<ConcreteState, access_mode>::IsGrey( + const HeapObject obj) const { + return Marking::IsGrey<access_mode>(MarkBitFrom(obj)); +} + +template <typename ConcreteState, AccessMode access_mode> +bool MarkingStateBase<ConcreteState, access_mode>::IsBlackOrGrey( + const HeapObject obj) const { + return Marking::IsBlackOrGrey<access_mode>(MarkBitFrom(obj)); +} + +template <typename ConcreteState, AccessMode access_mode> +bool MarkingStateBase<ConcreteState, access_mode>::WhiteToGrey(HeapObject obj) { + return Marking::WhiteToGrey<access_mode>(MarkBitFrom(obj)); +} + +template <typename ConcreteState, AccessMode access_mode> +bool MarkingStateBase<ConcreteState, access_mode>::WhiteToBlack( + HeapObject obj) { + return WhiteToGrey(obj) && GreyToBlack(obj); +} + +template <typename ConcreteState, AccessMode access_mode> +bool MarkingStateBase<ConcreteState, access_mode>::GreyToBlack(HeapObject obj) { + BasicMemoryChunk* chunk = BasicMemoryChunk::FromHeapObject(obj); + MarkBit markbit = MarkBitFrom(chunk, obj.address()); + if (!Marking::GreyToBlack<access_mode>(markbit)) return false; + static_cast<ConcreteState*>(this)->IncrementLiveBytes( + MemoryChunk::cast(chunk), + ALIGN_TO_ALLOCATION_ALIGNMENT(obj.Size(cage_base()))); + return true; +} + +template <typename ConcreteState, AccessMode access_mode> +bool MarkingStateBase<ConcreteState, access_mode>::GreyToBlackUnaccounted( + HeapObject obj) { + return Marking::GreyToBlack<access_mode>(MarkBitFrom(obj)); +} + +template <typename ConcreteState, AccessMode access_mode> +void MarkingStateBase<ConcreteState, access_mode>::ClearLiveness( + MemoryChunk* chunk) { + static_cast<ConcreteState*>(this)->bitmap(chunk)->Clear(); + static_cast<ConcreteState*>(this)->SetLiveBytes(chunk, 0); +} + +ConcurrentBitmap<AccessMode::ATOMIC>* MarkingState::bitmap( + const BasicMemoryChunk* chunk) const { + return chunk->marking_bitmap<AccessMode::ATOMIC>(); +} + +void MarkingState::IncrementLiveBytes(MemoryChunk* chunk, intptr_t by) { + DCHECK_IMPLIES(V8_COMPRESS_POINTERS_8GB_BOOL, + IsAligned(by, kObjectAlignment8GbHeap)); + chunk->live_byte_count_.fetch_add(by, std::memory_order_relaxed); +} + +intptr_t MarkingState::live_bytes(const MemoryChunk* chunk) const { + return chunk->live_byte_count_.load(std::memory_order_relaxed); +} + +void MarkingState::SetLiveBytes(MemoryChunk* chunk, intptr_t value) { + DCHECK_IMPLIES(V8_COMPRESS_POINTERS_8GB_BOOL, + IsAligned(value, kObjectAlignment8GbHeap)); + chunk->live_byte_count_.store(value, std::memory_order_relaxed); +} + +ConcurrentBitmap<AccessMode::NON_ATOMIC>* NonAtomicMarkingState::bitmap( + const BasicMemoryChunk* chunk) const { + return chunk->marking_bitmap<AccessMode::NON_ATOMIC>(); +} + +void NonAtomicMarkingState::IncrementLiveBytes(MemoryChunk* chunk, + intptr_t by) { + DCHECK_IMPLIES(V8_COMPRESS_POINTERS_8GB_BOOL, + IsAligned(by, kObjectAlignment8GbHeap)); + chunk->live_byte_count_.fetch_add(by, std::memory_order_relaxed); +} + +intptr_t NonAtomicMarkingState::live_bytes(const MemoryChunk* chunk) const { + return chunk->live_byte_count_.load(std::memory_order_relaxed); +} + +void NonAtomicMarkingState::SetLiveBytes(MemoryChunk* chunk, intptr_t value) { + DCHECK_IMPLIES(V8_COMPRESS_POINTERS_8GB_BOOL, + IsAligned(value, kObjectAlignment8GbHeap)); + chunk->live_byte_count_.store(value, std::memory_order_relaxed); +} + +ConcurrentBitmap<AccessMode::ATOMIC>* AtomicMarkingState::bitmap( + const BasicMemoryChunk* chunk) const { + return chunk->marking_bitmap<AccessMode::ATOMIC>(); +} + +void AtomicMarkingState::IncrementLiveBytes(MemoryChunk* chunk, intptr_t by) { + DCHECK_IMPLIES(V8_COMPRESS_POINTERS_8GB_BOOL, + IsAligned(by, kObjectAlignment8GbHeap)); + chunk->live_byte_count_.fetch_add(by); +} + +} // namespace internal +} // namespace v8 + +#endif // V8_HEAP_MARKING_STATE_INL_H_ diff --git a/deps/v8/src/heap/marking-state.h b/deps/v8/src/heap/marking-state.h new file mode 100644 index 00000000000000..c197c102436e40 --- /dev/null +++ b/deps/v8/src/heap/marking-state.h @@ -0,0 +1,137 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef V8_HEAP_MARKING_STATE_H_ +#define V8_HEAP_MARKING_STATE_H_ + +#include "src/common/globals.h" +#include "src/heap/marking.h" +#include "src/objects/heap-object.h" + +namespace v8 { +namespace internal { + +class BasicMemoryChunk; +class MemoryChunk; + +template <typename ConcreteState, AccessMode access_mode> +class MarkingStateBase { + public: + // Declares that this marking state is not collecting retainers, so the + // marking visitor may update the heap state to store information about + // progress, and may avoid fully visiting an object if it is safe to do so. + static constexpr bool kCollectRetainers = false; + + explicit MarkingStateBase(PtrComprCageBase cage_base) +#if V8_COMPRESS_POINTERS + : cage_base_(cage_base) +#endif + { + } + + // The pointer compression cage base value used for decompression of all + // tagged values except references to Code objects. + V8_INLINE PtrComprCageBase cage_base() const { +#if V8_COMPRESS_POINTERS + return cage_base_; +#else + return PtrComprCageBase{}; +#endif // V8_COMPRESS_POINTERS + } + + V8_INLINE MarkBit MarkBitFrom(const HeapObject obj) const; + + // {addr} may be tagged or aligned. + V8_INLINE MarkBit MarkBitFrom(const BasicMemoryChunk* p, Address addr) const; + + V8_INLINE Marking::ObjectColor Color(const HeapObject obj) const; + + V8_INLINE bool IsImpossible(const HeapObject obj) const; + + V8_INLINE bool IsBlack(const HeapObject obj) const; + + V8_INLINE bool IsWhite(const HeapObject obj) const; + + V8_INLINE bool IsGrey(const HeapObject obj) const; + + V8_INLINE bool IsBlackOrGrey(const HeapObject obj) const; + + V8_INLINE bool WhiteToGrey(HeapObject obj); + + V8_INLINE bool WhiteToBlack(HeapObject obj); + + V8_INLINE bool GreyToBlack(HeapObject obj); + + V8_INLINE bool GreyToBlackUnaccounted(HeapObject obj); + + V8_INLINE void ClearLiveness(MemoryChunk* chunk); + + void AddStrongReferenceForReferenceSummarizer(HeapObject host, + HeapObject obj) { + // This is not a reference summarizer, so there is nothing to do here. + } + + void AddWeakReferenceForReferenceSummarizer(HeapObject host, HeapObject obj) { + // This is not a reference summarizer, so there is nothing to do here. + } + + private: +#if V8_COMPRESS_POINTERS + const PtrComprCageBase cage_base_; +#endif // V8_COMPRESS_POINTERS +}; + +// This is used by marking visitors. +class MarkingState final + : public MarkingStateBase<MarkingState, AccessMode::ATOMIC> { + public: + explicit MarkingState(PtrComprCageBase cage_base) + : MarkingStateBase(cage_base) {} + + V8_INLINE ConcurrentBitmap<AccessMode::ATOMIC>* bitmap( + const BasicMemoryChunk* chunk) const; + + // Concurrent marking uses local live bytes so we may do these accesses + // non-atomically. + V8_INLINE void IncrementLiveBytes(MemoryChunk* chunk, intptr_t by); + + V8_INLINE intptr_t live_bytes(const MemoryChunk* chunk) const; + + V8_INLINE void SetLiveBytes(MemoryChunk* chunk, intptr_t value); +}; + +class NonAtomicMarkingState final + : public MarkingStateBase<NonAtomicMarkingState, AccessMode::NON_ATOMIC> { + public: + explicit NonAtomicMarkingState(PtrComprCageBase cage_base) + : MarkingStateBase(cage_base) {} + + V8_INLINE ConcurrentBitmap<AccessMode::NON_ATOMIC>* bitmap( + const BasicMemoryChunk* chunk) const; + + V8_INLINE void IncrementLiveBytes(MemoryChunk* chunk, intptr_t by); + + V8_INLINE intptr_t live_bytes(const MemoryChunk* chunk) const; + + V8_INLINE void SetLiveBytes(MemoryChunk* chunk, intptr_t value); +}; + +// This is used by Scavenger and Evacuator in TransferColor. +// Live byte increments have to be atomic. +class AtomicMarkingState final + : public MarkingStateBase<AtomicMarkingState, AccessMode::ATOMIC> { + public: + explicit AtomicMarkingState(PtrComprCageBase cage_base) + : MarkingStateBase(cage_base) {} + + V8_INLINE ConcurrentBitmap<AccessMode::ATOMIC>* bitmap( + const BasicMemoryChunk* chunk) const; + + V8_INLINE void IncrementLiveBytes(MemoryChunk* chunk, intptr_t by); +}; + +} // namespace internal +} // namespace v8 + +#endif // V8_HEAP_MARKING_STATE_H_ diff --git a/deps/v8/src/heap/marking-visitor-inl.h b/deps/v8/src/heap/marking-visitor-inl.h index dfaa739317dea4..64053c60425f41 100644 --- a/deps/v8/src/heap/marking-visitor-inl.h +++ b/deps/v8/src/heap/marking-visitor-inl.h @@ -5,6 +5,7 @@ #ifndef V8_HEAP_MARKING_VISITOR_INL_H_ #define V8_HEAP_MARKING_VISITOR_INL_H_ +#include "src/heap/marking-state-inl.h" #include "src/heap/marking-visitor.h" #include "src/heap/marking-worklist.h" #include "src/heap/objects-visiting-inl.h" @@ -44,7 +45,7 @@ template <typename THeapObjectSlot> void MarkingVisitorBase<ConcreteVisitor, MarkingState>::ProcessStrongHeapObject( HeapObject host, THeapObjectSlot slot, HeapObject heap_object) { SynchronizePageAccess(heap_object); - if (!is_shared_heap_ && heap_object.InSharedHeap()) return; + if (!ShouldMarkObject(heap_object)) return; MarkObject(host, heap_object); concrete_visitor()->RecordSlot(host, slot, heap_object); } @@ -56,7 +57,7 @@ template <typename THeapObjectSlot> void MarkingVisitorBase<ConcreteVisitor, MarkingState>::ProcessWeakHeapObject( HeapObject host, THeapObjectSlot slot, HeapObject heap_object) { SynchronizePageAccess(heap_object); - if (!is_shared_heap_ && heap_object.InSharedHeap()) return; + if (!ShouldMarkObject(heap_object)) return; if (concrete_visitor()->marking_state()->IsBlackOrGrey(heap_object)) { // Weak references with live values are directly processed here to // reduce the processing time of weak cells during the main GC @@ -116,7 +117,7 @@ void MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitEmbeddedPointer( DCHECK(RelocInfo::IsEmbeddedObjectMode(rinfo->rmode())); HeapObject object = rinfo->target_object(ObjectVisitorWithCageBases::cage_base()); - if (!is_shared_heap_ && object.InSharedHeap()) return; + if (!ShouldMarkObject(object)) return; if (!concrete_visitor()->marking_state()->IsBlackOrGrey(object)) { if (host.IsWeakObject(object)) { @@ -136,7 +137,7 @@ void MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitCodeTarget( DCHECK(RelocInfo::IsCodeTargetMode(rinfo->rmode())); Code target = Code::GetCodeFromTargetAddress(rinfo->target_address()); - if (!is_shared_heap_ && target.InSharedHeap()) return; + if (!ShouldMarkObject(target)) return; MarkObject(host, target); concrete_visitor()->RecordRelocSlot(host, rinfo, target); } @@ -371,7 +372,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitEphemeronHashTable( ObjectSlot value_slot = table.RawFieldOfElementAt(EphemeronHashTable::EntryToValueIndex(i)); - if ((!is_shared_heap_ && key.InSharedHeap()) || + if (!ShouldMarkObject(key) || concrete_visitor()->marking_state()->IsBlackOrGrey(key)) { VisitPointer(table, value_slot); } else { @@ -383,7 +384,7 @@ int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitEphemeronHashTable( concrete_visitor()->RecordSlot(table, value_slot, value); AddWeakReferenceForReferenceSummarizer(table, value); - if (!is_shared_heap_ && value.InSharedHeap()) continue; + if (!ShouldMarkObject(value)) continue; // Revisit ephemerons with both key and value unreachable at end // of concurrent marking cycle. diff --git a/deps/v8/src/heap/marking-visitor.h b/deps/v8/src/heap/marking-visitor.h index 81c81d24b8cd67..3e6937bb41c552 100644 --- a/deps/v8/src/heap/marking-visitor.h +++ b/deps/v8/src/heap/marking-visitor.h @@ -6,9 +6,8 @@ #define V8_HEAP_MARKING_VISITOR_H_ #include "src/common/globals.h" +#include "src/heap/marking-state.h" #include "src/heap/marking-worklist.h" -#include "src/heap/marking.h" -#include "src/heap/memory-chunk.h" #include "src/heap/objects-visiting.h" #include "src/heap/spaces.h" #include "src/heap/weak-object-worklists.h" @@ -22,106 +21,6 @@ struct EphemeronMarking { size_t newly_discovered_limit; }; -template <typename ConcreteState, AccessMode access_mode> -class MarkingStateBase { - public: - // Declares that this marking state is not collecting retainers, so the - // marking visitor may update the heap state to store information about - // progress, and may avoid fully visiting an object if it is safe to do so. - static constexpr bool kCollectRetainers = false; - - explicit MarkingStateBase(PtrComprCageBase cage_base) -#if V8_COMPRESS_POINTERS - : cage_base_(cage_base) -#endif - { - } - - // The pointer compression cage base value used for decompression of all - // tagged values except references to Code objects. - V8_INLINE PtrComprCageBase cage_base() const { -#if V8_COMPRESS_POINTERS - return cage_base_; -#else - return PtrComprCageBase{}; -#endif // V8_COMPRESS_POINTERS - } - - V8_INLINE MarkBit MarkBitFrom(const HeapObject obj) const { - return MarkBitFrom(BasicMemoryChunk::FromHeapObject(obj), obj.ptr()); - } - - // {addr} may be tagged or aligned. - V8_INLINE MarkBit MarkBitFrom(const BasicMemoryChunk* p, Address addr) const { - return static_cast<const ConcreteState*>(this)->bitmap(p)->MarkBitFromIndex( - p->AddressToMarkbitIndex(addr)); - } - - Marking::ObjectColor Color(const HeapObject obj) const { - return Marking::Color(MarkBitFrom(obj)); - } - - V8_INLINE bool IsImpossible(const HeapObject obj) const { - return Marking::IsImpossible<access_mode>(MarkBitFrom(obj)); - } - - V8_INLINE bool IsBlack(const HeapObject obj) const { - return Marking::IsBlack<access_mode>(MarkBitFrom(obj)); - } - - V8_INLINE bool IsWhite(const HeapObject obj) const { - return Marking::IsWhite<access_mode>(MarkBitFrom(obj)); - } - - V8_INLINE bool IsGrey(const HeapObject obj) const { - return Marking::IsGrey<access_mode>(MarkBitFrom(obj)); - } - - V8_INLINE bool IsBlackOrGrey(const HeapObject obj) const { - return Marking::IsBlackOrGrey<access_mode>(MarkBitFrom(obj)); - } - - V8_INLINE bool WhiteToGrey(HeapObject obj) { - return Marking::WhiteToGrey<access_mode>(MarkBitFrom(obj)); - } - - V8_INLINE bool WhiteToBlack(HeapObject obj) { - return WhiteToGrey(obj) && GreyToBlack(obj); - } - - V8_INLINE bool GreyToBlack(HeapObject obj) { - BasicMemoryChunk* chunk = BasicMemoryChunk::FromHeapObject(obj); - MarkBit markbit = MarkBitFrom(chunk, obj.address()); - if (!Marking::GreyToBlack<access_mode>(markbit)) return false; - static_cast<ConcreteState*>(this)->IncrementLiveBytes( - MemoryChunk::cast(chunk), obj.Size(cage_base())); - return true; - } - - V8_INLINE bool GreyToBlackUnaccounted(HeapObject obj) { - return Marking::GreyToBlack<access_mode>(MarkBitFrom(obj)); - } - - void ClearLiveness(MemoryChunk* chunk) { - static_cast<ConcreteState*>(this)->bitmap(chunk)->Clear(); - static_cast<ConcreteState*>(this)->SetLiveBytes(chunk, 0); - } - - void AddStrongReferenceForReferenceSummarizer(HeapObject host, - HeapObject obj) { - // This is not a reference summarizer, so there is nothing to do here. - } - - void AddWeakReferenceForReferenceSummarizer(HeapObject host, HeapObject obj) { - // This is not a reference summarizer, so there is nothing to do here. - } - - private: -#if V8_COMPRESS_POINTERS - const PtrComprCageBase cage_base_; -#endif // V8_COMPRESS_POINTERS -}; - // The base class for all marking visitors. It implements marking logic with // support of bytecode flushing, embedder tracing, weak and references. // @@ -153,7 +52,7 @@ class MarkingVisitorBase : public HeapVisitor<int, ConcreteVisitor> { code_flush_mode_(code_flush_mode), is_embedder_tracing_enabled_(is_embedder_tracing_enabled), should_keep_ages_unchanged_(should_keep_ages_unchanged), - is_shared_heap_(heap->IsShared()) + should_mark_shared_heap_(heap->ShouldMarkSharedHeap()) #ifdef V8_ENABLE_SANDBOX , external_pointer_table_(&heap->isolate()->external_pointer_table()), @@ -220,7 +119,10 @@ class MarkingVisitorBase : public HeapVisitor<int, ConcreteVisitor> { #endif } - bool is_shared_heap() { return is_shared_heap_; } + bool ShouldMarkObject(HeapObject object) const { + if (should_mark_shared_heap_) return true; + return !object.InSharedHeap(); + } // Marks the object grey and pushes it on the marking work list. V8_INLINE void MarkObject(HeapObject host, HeapObject obj); @@ -289,7 +191,7 @@ class MarkingVisitorBase : public HeapVisitor<int, ConcreteVisitor> { const base::EnumSet<CodeFlushMode> code_flush_mode_; const bool is_embedder_tracing_enabled_; const bool should_keep_ages_unchanged_; - const bool is_shared_heap_; + const bool should_mark_shared_heap_; #ifdef V8_ENABLE_SANDBOX ExternalPointerTable* const external_pointer_table_; ExternalPointerTable* const shared_external_pointer_table_; diff --git a/deps/v8/src/heap/memory-allocator.cc b/deps/v8/src/heap/memory-allocator.cc index 381eba7bf64987..dd9afbdfa6a5a2 100644 --- a/deps/v8/src/heap/memory-allocator.cc +++ b/deps/v8/src/heap/memory-allocator.cc @@ -777,7 +777,6 @@ const MemoryChunk* MemoryAllocator::LookupChunkContainingAddress( it != normal_pages_.end()) { // The chunk is a normal page. DCHECK_LE(chunk->address(), addr); - DCHECK_GT(chunk->area_end(), addr); if (chunk->Contains(addr)) return *it; } else if (auto it = large_pages_.upper_bound(static_cast<LargePage*>(chunk)); it != large_pages_.begin()) { diff --git a/deps/v8/src/heap/memory-allocator.h b/deps/v8/src/heap/memory-allocator.h index 0485a7fe8c7972..ed6e4c82fabad1 100644 --- a/deps/v8/src/heap/memory-allocator.h +++ b/deps/v8/src/heap/memory-allocator.h @@ -265,7 +265,8 @@ class MemoryAllocator { // Return the normal or large page that contains this address, if it is owned // by this heap, otherwise a nullptr. - const MemoryChunk* LookupChunkContainingAddress(Address addr) const; + V8_EXPORT_PRIVATE const MemoryChunk* LookupChunkContainingAddress( + Address addr) const; // Insert and remove normal and large pages that are owned by this heap. void RecordNormalPageCreated(const Page& page); diff --git a/deps/v8/src/heap/memory-chunk-layout.cc b/deps/v8/src/heap/memory-chunk-layout.cc index ff2dbd915f7b4e..e81aaec8f3a165 100644 --- a/deps/v8/src/heap/memory-chunk-layout.cc +++ b/deps/v8/src/heap/memory-chunk-layout.cc @@ -42,7 +42,8 @@ size_t MemoryChunkLayout::AllocatableMemoryInCodePage() { } intptr_t MemoryChunkLayout::ObjectStartOffsetInDataPage() { - return RoundUp(MemoryChunk::kHeaderSize + Bitmap::kSize, kDoubleSize); + return RoundUp(MemoryChunk::kHeaderSize + Bitmap::kSize, + ALIGN_TO_ALLOCATION_ALIGNMENT(kDoubleSize)); } size_t MemoryChunkLayout::ObjectStartOffsetInMemoryChunk( diff --git a/deps/v8/src/heap/memory-chunk-layout.h b/deps/v8/src/heap/memory-chunk-layout.h index 053f35f3091328..8c771f8b2b411d 100644 --- a/deps/v8/src/heap/memory-chunk-layout.h +++ b/deps/v8/src/heap/memory-chunk-layout.h @@ -37,8 +37,13 @@ using ActiveSystemPages = ::heap::base::ActiveSystemPages; class V8_EXPORT_PRIVATE MemoryChunkLayout { public: - static const int kNumSets = NUMBER_OF_REMEMBERED_SET_TYPES; - static const int kNumTypes = ExternalBackingStoreType::kNumTypes; + static constexpr int kNumSets = NUMBER_OF_REMEMBERED_SET_TYPES; + static constexpr int kNumTypes = ExternalBackingStoreType::kNumTypes; +#if V8_CC_MSVC && V8_TARGET_ARCH_IA32 + static constexpr int kMemoryChunkAlignment = 8; +#else + static constexpr int kMemoryChunkAlignment = sizeof(size_t); +#endif // V8_CC_MSVC && V8_TARGET_ARCH_IA32 #define FIELD(Type, Name) \ k##Name##Offset, k##Name##End = k##Name##Offset + sizeof(Type) - 1 enum Header { @@ -72,12 +77,19 @@ class V8_EXPORT_PRIVATE MemoryChunkLayout { #ifdef V8_ENABLE_INNER_POINTER_RESOLUTION_OSB FIELD(ObjectStartBitmap, ObjectStartBitmap), #endif // V8_ENABLE_INNER_POINTER_RESOLUTION_OSB + FIELD(size_t, WasUsedForAllocation), kMarkingBitmapOffset, - kMemoryChunkHeaderSize = kMarkingBitmapOffset, + kMemoryChunkHeaderSize = + kMarkingBitmapOffset + + ((kMarkingBitmapOffset % kMemoryChunkAlignment) == 0 + ? 0 + : kMemoryChunkAlignment - + (kMarkingBitmapOffset % kMemoryChunkAlignment)), kMemoryChunkHeaderStart = kSlotSetOffset, kBasicMemoryChunkHeaderSize = kMemoryChunkHeaderStart, kBasicMemoryChunkHeaderStart = 0, }; +#undef FIELD static size_t CodePageGuardStartOffset(); static size_t CodePageGuardSize(); static intptr_t ObjectStartOffsetInCodePage(); @@ -89,6 +101,8 @@ class V8_EXPORT_PRIVATE MemoryChunkLayout { static size_t AllocatableMemoryInMemoryChunk(AllocationSpace space); static int MaxRegularCodeObjectSize(); + + static_assert(kMemoryChunkHeaderSize % alignof(size_t) == 0); }; } // namespace internal diff --git a/deps/v8/src/heap/memory-chunk.cc b/deps/v8/src/heap/memory-chunk.cc index 0a5ffa17b94917..fd26d5d73b95a1 100644 --- a/deps/v8/src/heap/memory-chunk.cc +++ b/deps/v8/src/heap/memory-chunk.cc @@ -9,6 +9,7 @@ #include "src/common/globals.h" #include "src/heap/basic-memory-chunk.h" #include "src/heap/code-object-registry.h" +#include "src/heap/marking-state-inl.h" #include "src/heap/memory-allocator.h" #include "src/heap/memory-chunk-inl.h" #include "src/heap/memory-chunk-layout.h" @@ -162,8 +163,7 @@ MemoryChunk::MemoryChunk(Heap* heap, BaseSpace* space, size_t chunk_size, categories_ = nullptr; - heap->incremental_marking()->non_atomic_marking_state()->SetLiveBytes(this, - 0); + heap->non_atomic_marking_state()->SetLiveBytes(this, 0); if (executable == EXECUTABLE) { SetFlag(IS_EXECUTABLE); if (heap->write_protect_code_memory()) { @@ -195,7 +195,10 @@ MemoryChunk::MemoryChunk(Heap* heap, BaseSpace* space, size_t chunk_size, } // All pages of a shared heap need to be marked with this flag. - if (heap->IsShared()) SetFlag(MemoryChunk::IN_SHARED_HEAP); + if (heap->IsShared() || owner()->identity() == SHARED_SPACE || + owner()->identity() == SHARED_LO_SPACE) { + SetFlag(MemoryChunk::IN_SHARED_HEAP); + } #ifdef DEBUG ValidateOffsets(this); @@ -233,6 +236,7 @@ void MemoryChunk::SetYoungGenerationPageFlags(bool is_marking) { // MemoryChunk implementation void MemoryChunk::ReleaseAllocatedMemoryNeededForWritableChunk() { + DCHECK(SweepingDone()); if (mutex_ != nullptr) { delete mutex_; mutex_ = nullptr; @@ -492,6 +496,17 @@ void MemoryChunk::ValidateOffsets(MemoryChunk* chunk) { DCHECK_EQ(reinterpret_cast<Address>(&chunk->possibly_empty_buckets_) - chunk->address(), MemoryChunkLayout::kPossiblyEmptyBucketsOffset); + DCHECK_EQ(reinterpret_cast<Address>(&chunk->active_system_pages_) - + chunk->address(), + MemoryChunkLayout::kActiveSystemPagesOffset); +#ifdef V8_ENABLE_INNER_POINTER_RESOLUTION_OSB + DCHECK_EQ(reinterpret_cast<Address>(&chunk->object_start_bitmap_) - + chunk->address(), + MemoryChunkLayout::kObjectStartBitmapOffset); +#endif // V8_ENABLE_INNER_POINTER_RESOLUTION_OSB + DCHECK_EQ(reinterpret_cast<Address>(&chunk->was_used_for_allocation_) - + chunk->address(), + MemoryChunkLayout::kWasUsedForAllocationOffset); } #endif diff --git a/deps/v8/src/heap/memory-chunk.h b/deps/v8/src/heap/memory-chunk.h index 042072450d5bc8..906ff6a23e9061 100644 --- a/deps/v8/src/heap/memory-chunk.h +++ b/deps/v8/src/heap/memory-chunk.h @@ -220,6 +220,10 @@ class MemoryChunk : public BasicMemoryChunk { } #endif // V8_ENABLE_INNER_POINTER_RESOLUTION_OSB + void MarkWasUsedForAllocation() { was_used_for_allocation_ = true; } + void ClearWasUsedForAllocation() { was_used_for_allocation_ = false; } + bool WasUsedForAllocation() const { return was_used_for_allocation_; } + protected: // Release all memory allocated by the chunk. Should be called when memory // chunk is about to be freed. @@ -287,6 +291,10 @@ class MemoryChunk : public BasicMemoryChunk { ObjectStartBitmap object_start_bitmap_; #endif // V8_ENABLE_INNER_POINTER_RESOLUTION_OSB + // Marks a chunk that was used for allocation since it was last swept. Used + // only for new space pages. + size_t was_used_for_allocation_ = false; + private: friend class ConcurrentMarkingState; friend class MarkingState; diff --git a/deps/v8/src/heap/new-spaces-inl.h b/deps/v8/src/heap/new-spaces-inl.h index e4648502d6ddab..14c675380457f0 100644 --- a/deps/v8/src/heap/new-spaces-inl.h +++ b/deps/v8/src/heap/new-spaces-inl.h @@ -61,6 +61,7 @@ V8_WARN_UNUSED_RESULT inline AllocationResult NewSpace::AllocateRawSynchronized( V8_INLINE bool SemiSpaceNewSpace::EnsureAllocation( int size_in_bytes, AllocationAlignment alignment, AllocationOrigin origin, int* out_max_aligned_size) { + size_in_bytes = ALIGN_TO_ALLOCATION_ALIGNMENT(size_in_bytes); DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_); #if DEBUG VerifyTop(); @@ -116,22 +117,21 @@ V8_INLINE bool PagedSpaceForNewSpace::EnsureAllocation( // ----------------------------------------------------------------------------- // SemiSpaceObjectIterator +SemiSpaceObjectIterator::SemiSpaceObjectIterator(const SemiSpaceNewSpace* space) + : current_(space->first_allocatable_address()) {} + HeapObject SemiSpaceObjectIterator::Next() { - while (current_ != limit_) { + while (true) { if (Page::IsAlignedToPageSize(current_)) { Page* page = Page::FromAllocationAreaAddress(current_); page = page->next_page(); - DCHECK(page); + if (page == nullptr) return HeapObject(); current_ = page->area_start(); - if (current_ == limit_) return HeapObject(); } HeapObject object = HeapObject::FromAddress(current_); - current_ += object.Size(); - if (!object.IsFreeSpaceOrFiller()) { - return object; - } + current_ += ALIGN_TO_ALLOCATION_ALIGNMENT(object.Size()); + if (!object.IsFreeSpaceOrFiller()) return object; } - return HeapObject(); } } // namespace internal diff --git a/deps/v8/src/heap/new-spaces.cc b/deps/v8/src/heap/new-spaces.cc index 4ba1f7889122d0..ad69308c730ef4 100644 --- a/deps/v8/src/heap/new-spaces.cc +++ b/deps/v8/src/heap/new-spaces.cc @@ -7,9 +7,12 @@ #include "src/common/globals.h" #include "src/heap/allocation-observer.h" #include "src/heap/array-buffer-sweeper.h" +#include "src/heap/gc-tracer-inl.h" #include "src/heap/heap-inl.h" #include "src/heap/incremental-marking.h" #include "src/heap/mark-compact.h" +#include "src/heap/marking-state-inl.h" +#include "src/heap/marking-state.h" #include "src/heap/memory-allocator.h" #include "src/heap/paged-spaces.h" #include "src/heap/safepoint.h" @@ -26,10 +29,7 @@ Page* SemiSpace::InitializePage(MemoryChunk* chunk) { page->SetYoungGenerationPageFlags(heap()->incremental_marking()->IsMarking()); page->list_node().Initialize(); if (v8_flags.minor_mc) { - heap() - ->minor_mark_compact_collector() - ->non_atomic_marking_state() - ->ClearLiveness(page); + heap()->non_atomic_marking_state()->ClearLiveness(page); } page->InitializationMemoryFence(); return page; @@ -76,8 +76,7 @@ bool SemiSpace::EnsureCurrentCapacity() { } // Add more pages if we have less than expected_pages. - NonAtomicMarkingState* marking_state = - heap()->incremental_marking()->non_atomic_marking_state(); + NonAtomicMarkingState* marking_state = heap()->non_atomic_marking_state(); while (actual_pages < expected_pages) { actual_pages++; current_page = heap()->memory_allocator()->AllocatePage( @@ -133,6 +132,8 @@ bool SemiSpace::Commit() { } memory_chunk_list_.PushBack(new_page); IncrementCommittedPhysicalMemory(new_page->CommittedPhysicalMemory()); + heap()->CreateFillerObjectAt(new_page->area_start(), + static_cast<int>(new_page->area_size())); } Reset(); AccountCommitted(target_capacity_); @@ -181,8 +182,7 @@ bool SemiSpace::GrowTo(size_t new_capacity) { DCHECK(IsAligned(delta, AllocatePageSize())); const int delta_pages = static_cast<int>(delta / Page::kPageSize); DCHECK(last_page()); - NonAtomicMarkingState* marking_state = - heap()->incremental_marking()->non_atomic_marking_state(); + NonAtomicMarkingState* marking_state = heap()->non_atomic_marking_state(); for (int pages_added = 0; pages_added < delta_pages; pages_added++) { Page* new_page = heap()->memory_allocator()->AllocatePage( MemoryAllocator::AllocationMode::kUsePool, this, NOT_EXECUTABLE); @@ -195,6 +195,8 @@ bool SemiSpace::GrowTo(size_t new_capacity) { IncrementCommittedPhysicalMemory(new_page->CommittedPhysicalMemory()); // Duplicate the flags that was set on the old page. new_page->SetFlags(last_page()->GetFlags(), Page::kCopyOnFlipFlagsMask); + heap()->CreateFillerObjectAt(new_page->area_start(), + static_cast<int>(new_page->area_size())); } AccountCommitted(delta); target_capacity_ = new_capacity; @@ -426,20 +428,6 @@ void SemiSpace::AssertValidRange(Address start, Address end) { } #endif -// ----------------------------------------------------------------------------- -// SemiSpaceObjectIterator implementation. - -SemiSpaceObjectIterator::SemiSpaceObjectIterator( - const SemiSpaceNewSpace* space) { - Initialize(space->first_allocatable_address(), space->top()); -} - -void SemiSpaceObjectIterator::Initialize(Address start, Address end) { - SemiSpace::AssertValidRange(start, end); - current_ = start; - limit_ = end; -} - // ----------------------------------------------------------------------------- // NewSpace implementation @@ -481,8 +469,7 @@ void NewSpace::VerifyTop() const { // We do not use the SemiSpaceObjectIterator because verification doesn't assume // that it works (it depends on the invariants we are checking). void NewSpace::VerifyImpl(Isolate* isolate, const Page* current_page, - Address current_address, - Address stop_iteration_at_address) const { + Address current_address) const { DCHECK(current_page->ContainsLimit(current_address)); size_t external_space_bytes[kNumTypes]; @@ -496,13 +483,8 @@ void NewSpace::VerifyImpl(Isolate* isolate, const Page* current_page, PtrComprCageBase cage_base(isolate); VerifyPointersVisitor visitor(heap()); const Page* page = current_page; - while (current_address != stop_iteration_at_address) { + while (true) { if (!Page::IsAlignedToPageSize(current_address)) { - // The allocation pointer should not be in the middle of an object. - CHECK_IMPLIES(!v8_flags.minor_mc, - !Page::FromAddress(current_address)->ContainsLimit(top()) || - current_address < top()); - HeapObject object = HeapObject::FromAddress(current_address); // The first word should be a map, and we expect all map pointers to @@ -530,7 +512,7 @@ void NewSpace::VerifyImpl(Isolate* isolate, const Page* current_page, string_size; } - current_address += size; + current_address += ALIGN_TO_ALLOCATION_ALIGNMENT(size); } else { // At end of page, switch to next page. page = page->next_page(); @@ -554,7 +536,7 @@ void NewSpace::VerifyImpl(Isolate* isolate, const Page* current_page, } #ifdef V8_ENABLE_INNER_POINTER_RESOLUTION_OSB - page->object_start_bitmap()->Verify(); + current_page->object_start_bitmap()->Verify(); #endif // V8_ENABLE_INNER_POINTER_RESOLUTION_OSB } #endif // VERIFY_HEAP @@ -562,6 +544,7 @@ void NewSpace::VerifyImpl(Isolate* isolate, const Page* current_page, void NewSpace::PromotePageToOldSpace(Page* page) { DCHECK(!page->IsFlagSet(Page::PAGE_NEW_OLD_PROMOTION)); DCHECK(page->InYoungGeneration()); + page->ClearWasUsedForAllocation(); RemovePage(page); Page* new_page = Page::ConvertNewToOld(page); DCHECK(!new_page->InYoungGeneration()); @@ -660,6 +643,10 @@ void SemiSpaceNewSpace::UpdateLinearAllocationArea(Address known_top) { linear_area_original_data_.set_original_top_release(top()); } + // The linear allocation area should reach the end of the page, so no filler + // object is needed there to make the page iterable. + DCHECK_EQ(limit(), to_space_.page_high()); + to_space_.AddRangeToActiveSystemPages(top(), limit()); DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_); @@ -670,8 +657,7 @@ void SemiSpaceNewSpace::ResetLinearAllocationArea() { to_space_.Reset(); UpdateLinearAllocationArea(); // Clear all mark-bits in the to-space. - NonAtomicMarkingState* marking_state = - heap()->incremental_marking()->non_atomic_marking_state(); + NonAtomicMarkingState* marking_state = heap()->non_atomic_marking_state(); for (Page* p : to_space_) { marking_state->ClearLiveness(p); // Concurrent marking may have local live bytes for this page. @@ -680,12 +666,18 @@ void SemiSpaceNewSpace::ResetLinearAllocationArea() { } void SemiSpaceNewSpace::UpdateInlineAllocationLimit(size_t min_size) { - Address new_limit = ComputeLimit(top(), to_space_.page_high(), min_size); + Address new_limit = ComputeLimit(top(), to_space_.page_high(), + ALIGN_TO_ALLOCATION_ALIGNMENT(min_size)); DCHECK_LE(top(), new_limit); DCHECK_LE(new_limit, to_space_.page_high()); allocation_info_.SetLimit(new_limit); DCHECK_SEMISPACE_ALLOCATION_INFO(allocation_info_, to_space_); + // Add a filler object after the linear allocation area (if there is space + // left), to ensure that the page will be iterable. + heap()->CreateFillerObjectAt( + limit(), static_cast<int>(to_space_.page_high() - limit())); + #if DEBUG VerifyTop(); #endif @@ -770,7 +762,7 @@ void SemiSpaceNewSpace::Verify(Isolate* isolate) const { Address current = to_space_.first_page()->area_start(); CHECK_EQ(current, to_space_.space_start()); - VerifyImpl(isolate, Page::FromAllocationAreaAddress(current), current, top()); + VerifyImpl(isolate, Page::FromAllocationAreaAddress(current), current); // Check semi-spaces. CHECK_EQ(from_space_.id(), kFromSpace); @@ -780,6 +772,37 @@ void SemiSpaceNewSpace::Verify(Isolate* isolate) const { } #endif // VERIFY_HEAP +void SemiSpaceNewSpace::MakeIterable() { + MakeAllPagesInFromSpaceIterable(); + MakeUnusedPagesInToSpaceIterable(); +} + +void SemiSpaceNewSpace::MakeAllPagesInFromSpaceIterable() { + if (!IsFromSpaceCommitted()) return; + + // Fix all pages in the "from" semispace. + for (Page* page : from_space()) { + heap()->CreateFillerObjectAt(page->area_start(), + static_cast<int>(page->area_size())); + } +} + +void SemiSpaceNewSpace::MakeUnusedPagesInToSpaceIterable() { + PageIterator it(to_space().current_page()); + + // Fix the current page, above the LAB. + DCHECK_NOT_NULL(*it); + DCHECK((*it)->Contains(limit())); + heap()->CreateFillerObjectAt(limit(), + static_cast<int>((*it)->area_end() - limit())); + + // Fix the remaining unused pages in the "to" semispace. + for (Page* page = *(++it); page != nullptr; page = *(++it)) { + heap()->CreateFillerObjectAt(page->area_start(), + static_cast<int>(page->area_size())); + } +} + #ifdef V8_ENABLE_INNER_POINTER_RESOLUTION_OSB void SemiSpaceNewSpace::ClearUnusedObjectStartBitmaps() { if (!IsFromSpaceCommitted()) return; @@ -918,10 +941,7 @@ Page* PagedSpaceForNewSpace::InitializePage(MemoryChunk* chunk) { page->ResetAllocationStatistics(); page->SetFlags(Page::TO_PAGE); page->SetYoungGenerationPageFlags(heap()->incremental_marking()->IsMarking()); - heap() - ->minor_mark_compact_collector() - ->non_atomic_marking_state() - ->ClearLiveness(page); + heap()->non_atomic_marking_state()->ClearLiveness(page); page->AllocateFreeListCategories(); page->InitializeFreeListCategories(); page->list_node().Initialize(); @@ -941,23 +961,29 @@ void PagedSpaceForNewSpace::Grow() { CHECK(EnsureCurrentCapacity()); } -void PagedSpaceForNewSpace::Shrink() { - target_capacity_ = +bool PagedSpaceForNewSpace::StartShrinking() { + DCHECK_EQ(current_capacity_, target_capacity_); + DCHECK(heap()->tracer()->IsInAtomicPause()); + size_t new_target_capacity = RoundUp(std::max(initial_capacity_, 2 * Size()), Page::kPageSize); - if (target_capacity_ < current_capacity_) { - // Try to shrink by freeing empty pages. - for (Page* page = first_page(); - page != last_page() && (current_capacity_ > target_capacity_);) { - Page* current_page = page; - page = page->next_page(); - if (current_page->allocated_bytes() == 0) { - memory_chunk_list().Remove(current_page); - ReleasePage(current_page); - } + if (new_target_capacity > target_capacity_) return false; + target_capacity_ = new_target_capacity; + return true; +} + +void PagedSpaceForNewSpace::FinishShrinking() { + DCHECK(heap()->tracer()->IsInAtomicPause()); + if (current_capacity_ > target_capacity_) { +#if DEBUG + // If `current_capacity_` is higher than `target_capacity_`, i.e. the + // space could not be shrunk all the way down to `target_capacity_`, it + // must mean that all pages contain live objects. + for (Page* page : *this) { + DCHECK_NE(0, heap()->non_atomic_marking_state()->live_bytes(page)); } +#endif // DEBUG + target_capacity_ = current_capacity_; } - // Shrinking to target capacity may not have been possible. - target_capacity_ = current_capacity_; } void PagedSpaceForNewSpace::UpdateInlineAllocationLimit(size_t size_in_bytes) { @@ -982,15 +1008,7 @@ void PagedSpaceForNewSpace::ReleasePage(Page* page) { PagedSpaceBase::ReleasePage(page); } -bool PagedSpaceForNewSpace::AddFreshPage() { - DCHECK_LE(TotalCapacity(), MaximumCapacity()); - if (current_capacity_ >= target_capacity_) return false; - return EnsureCurrentCapacity(); -} - bool PagedSpaceForNewSpace::PreallocatePages() { - // Verify that the free space map is already initialized. Otherwise, new free - // list entries will be invalid. while (current_capacity_ < target_capacity_) { if (!TryExpandImpl()) return false; } @@ -1001,7 +1019,8 @@ bool PagedSpaceForNewSpace::PreallocatePages() { bool PagedSpaceForNewSpace::EnsureCurrentCapacity() { // Verify that the free space map is already initialized. Otherwise, new free // list entries will be invalid. - DCHECK_NE(0, heap()->isolate()->root(RootIndex::kFreeSpaceMap).ptr()); + DCHECK_NE(kNullAddress, + heap()->isolate()->root(RootIndex::kFreeSpaceMap).ptr()); return PreallocatePages(); } @@ -1022,6 +1041,10 @@ void PagedSpaceForNewSpace::Verify(Isolate* isolate, } #endif // VERIFY_HEAP +bool PagedSpaceForNewSpace::ShouldReleasePage() const { + return current_capacity_ > target_capacity_; +} + // ----------------------------------------------------------------------------- // PagedNewSpace implementation @@ -1044,10 +1067,7 @@ PagedNewSpace::~PagedNewSpace() { void PagedNewSpace::Verify(Isolate* isolate) const { const Page* first_page = paged_space_.first_page(); - if (first_page) { - // No bailout needed since all pages are iterable. - VerifyImpl(isolate, first_page, first_page->area_start(), kNullAddress); - } + if (first_page) VerifyImpl(isolate, first_page, first_page->area_start()); // Check paged-spaces. VerifyPointersVisitor visitor(heap()); diff --git a/deps/v8/src/heap/new-spaces.h b/deps/v8/src/heap/new-spaces.h index ebfca7edc0c209..b0e61af300e882 100644 --- a/deps/v8/src/heap/new-spaces.h +++ b/deps/v8/src/heap/new-spaces.h @@ -8,6 +8,7 @@ #include <atomic> #include <memory> +#include "src/base/logging.h" #include "src/base/macros.h" #include "src/base/platform/mutex.h" #include "src/common/globals.h" @@ -217,24 +218,17 @@ class SemiSpace final : public Space { }; // A SemiSpaceObjectIterator is an ObjectIterator that iterates over the active -// semispace of the heap's new space. It iterates over the objects in the -// semispace from a given start address (defaulting to the bottom of the -// semispace) to the top of the semispace. New objects allocated after the -// iterator is created are not iterated. +// semispace of the heap's new space. class SemiSpaceObjectIterator : public ObjectIterator { public: - // Create an iterator over the allocated objects in the given to-space. - explicit SemiSpaceObjectIterator(const SemiSpaceNewSpace* space); + // Create an iterator over the objects in the given to-space. + inline explicit SemiSpaceObjectIterator(const SemiSpaceNewSpace* space); inline HeapObject Next() final; private: - void Initialize(Address start, Address end); - // The current iteration point. Address current_; - // The end of iteration. - Address limit_; }; class NewSpace : NON_EXPORTED_BASE(public SpaceWithLinearArea) { @@ -294,14 +288,15 @@ class NewSpace : NON_EXPORTED_BASE(public SpaceWithLinearArea) { #ifdef VERIFY_HEAP virtual void Verify(Isolate* isolate) const = 0; - // VerifyImpl verifies objects on the space starting from |page| and - // |address|. |address| should be a valid limit on |page| (see - // BasicMemoryChunk::ContainsLimit). + // VerifyImpl verifies objects on the space starting from |current_page| and + // |current_address|. |current_address| should be a valid limit on + // |current_page| (see BasicMemoryChunk::ContainsLimit). void VerifyImpl(Isolate* isolate, const Page* current_page, - Address current_address, - Address stop_iteration_at_address) const; + Address current_address) const; #endif + virtual void MakeIterable() = 0; + #ifdef V8_ENABLE_INNER_POINTER_RESOLUTION_OSB virtual void ClearUnusedObjectStartBitmaps() = 0; #endif // V8_ENABLE_INNER_POINTER_RESOLUTION_OSB @@ -483,6 +478,11 @@ class V8_EXPORT_PRIVATE SemiSpaceNewSpace final : public NewSpace { void Print() override { to_space_.Print(); } #endif + void MakeIterable() override; + + void MakeAllPagesInFromSpaceIterable(); + void MakeUnusedPagesInToSpaceIterable(); + #ifdef V8_ENABLE_INNER_POINTER_RESOLUTION_OSB void ClearUnusedObjectStartBitmaps() override; #endif // V8_ENABLE_INNER_POINTER_RESOLUTION_OSB @@ -565,7 +565,9 @@ class V8_EXPORT_PRIVATE PagedSpaceForNewSpace final : public PagedSpaceBase { void Grow(); // Shrink the capacity of the space. - void Shrink(); + void Shrink() { UNREACHABLE(); } + bool StartShrinking(); + void FinishShrinking(); size_t AllocatedSinceLastGC() const { // allocated since last gc is compiuted as allocated linear areas minus @@ -599,7 +601,7 @@ class V8_EXPORT_PRIVATE PagedSpaceForNewSpace final : public PagedSpaceBase { // Returns false if this isn't possible or reasonable (i.e., there // are no pages, or the current page is already empty), or true // if successful. - bool AddFreshPage(); + bool AddFreshPage() { return false; } bool EnsureAllocation(int size_in_bytes, AllocationAlignment alignment, AllocationOrigin origin, @@ -625,10 +627,14 @@ class V8_EXPORT_PRIVATE PagedSpaceForNewSpace final : public PagedSpaceBase { void Verify(Isolate* isolate, ObjectVisitor* visitor) const final; #endif + void MakeIterable() { free_list()->RepairLists(heap()); } + #ifdef V8_ENABLE_INNER_POINTER_RESOLUTION_OSB void ClearUnusedObjectStartBitmaps() {} #endif // V8_ENABLE_INNER_POINTER_RESOLUTION_OSB + bool ShouldReleasePage() const; + private: bool PreallocatePages(); @@ -664,11 +670,13 @@ class V8_EXPORT_PRIVATE PagedNewSpace final : public NewSpace { // Shrink the capacity of the space. void Shrink() final { paged_space_.Shrink(); } + bool StartShrinking() { return paged_space_.StartShrinking(); } + void FinishShrinking() { paged_space_.FinishShrinking(); } // Return the allocated bytes in the active space. size_t Size() const final { return paged_space_.Size(); } - size_t SizeOfObjects() const final { return Size(); } + size_t SizeOfObjects() const final { return paged_space_.SizeOfObjects(); } // Return the allocatable capacity of the space. size_t Capacity() const final { return paged_space_.Capacity(); } @@ -781,7 +789,9 @@ class V8_EXPORT_PRIVATE PagedNewSpace final : public NewSpace { paged_space_.MakeLinearAllocationAreaIterable(); } - PagedSpaceBase* paged_space() { return &paged_space_; } + PagedSpaceForNewSpace* paged_space() { return &paged_space_; } + + void MakeIterable() override { paged_space_.MakeIterable(); } #ifdef V8_ENABLE_INNER_POINTER_RESOLUTION_OSB void ClearUnusedObjectStartBitmaps() override { @@ -789,6 +799,12 @@ class V8_EXPORT_PRIVATE PagedNewSpace final : public NewSpace { } #endif // V8_ENABLE_INNER_POINTER_RESOLUTION_OSB + // All operations on `memory_chunk_list_` should go through `paged_space_`. + heap::List<MemoryChunk>& memory_chunk_list() final { UNREACHABLE(); } + + bool ShouldReleasePage() const { return paged_space_.ShouldReleasePage(); } + void ReleasePage(Page* page) { paged_space_.ReleasePage(page); } + private: bool EnsureAllocation(int size_in_bytes, AllocationAlignment alignment, AllocationOrigin origin, diff --git a/deps/v8/src/heap/object-stats.cc b/deps/v8/src/heap/object-stats.cc index d28d03dc494191..e15b5f332e746b 100644 --- a/deps/v8/src/heap/object-stats.cc +++ b/deps/v8/src/heap/object-stats.cc @@ -15,6 +15,7 @@ #include "src/heap/combined-heap.h" #include "src/heap/heap-inl.h" #include "src/heap/mark-compact.h" +#include "src/heap/marking-state-inl.h" #include "src/logging/counters.h" #include "src/objects/compilation-cache-table-inl.h" #include "src/objects/heap-object.h" @@ -448,9 +449,9 @@ class ObjectStatsCollectorImpl { return field_stats_collector_.cage_base(); } - Heap* heap_; - ObjectStats* stats_; - NonAtomicMarkingState* marking_state_; + Heap* const heap_; + ObjectStats* const stats_; + NonAtomicMarkingState* const marking_state_; std::unordered_set<HeapObject, Object::Hasher, Object::KeyEqualSafe> virtual_objects_; std::unordered_set<Address> external_resources_; @@ -461,8 +462,7 @@ ObjectStatsCollectorImpl::ObjectStatsCollectorImpl(Heap* heap, ObjectStats* stats) : heap_(heap), stats_(stats), - marking_state_( - heap->mark_compact_collector()->non_atomic_marking_state()), + marking_state_(heap->non_atomic_marking_state()), field_stats_collector_( heap_, &stats->tagged_fields_count_, &stats->embedder_fields_count_, &stats->inobject_smi_fields_count_, @@ -1089,8 +1089,7 @@ class ObjectStatsVisitor { ObjectStatsCollectorImpl::Phase phase) : live_collector_(live_collector), dead_collector_(dead_collector), - marking_state_( - heap->mark_compact_collector()->non_atomic_marking_state()), + marking_state_(heap->non_atomic_marking_state()), phase_(phase) {} void Visit(HeapObject obj) { @@ -1105,9 +1104,9 @@ class ObjectStatsVisitor { } private: - ObjectStatsCollectorImpl* live_collector_; - ObjectStatsCollectorImpl* dead_collector_; - NonAtomicMarkingState* marking_state_; + ObjectStatsCollectorImpl* const live_collector_; + ObjectStatsCollectorImpl* const dead_collector_; + NonAtomicMarkingState* const marking_state_; ObjectStatsCollectorImpl::Phase phase_; }; diff --git a/deps/v8/src/heap/paged-spaces-inl.h b/deps/v8/src/heap/paged-spaces-inl.h index 341cc40569e027..6283e00540a33d 100644 --- a/deps/v8/src/heap/paged-spaces-inl.h +++ b/deps/v8/src/heap/paged-spaces-inl.h @@ -29,7 +29,7 @@ HeapObject PagedSpaceObjectIterator::Next() { HeapObject PagedSpaceObjectIterator::FromCurrentPage() { while (cur_addr_ != cur_end_) { HeapObject obj = HeapObject::FromAddress(cur_addr_); - const int obj_size = obj.Size(cage_base()); + const int obj_size = ALIGN_TO_ALLOCATION_ALIGNMENT(obj.Size(cage_base())); cur_addr_ += obj_size; DCHECK_LE(cur_addr_, cur_end_); if (!obj.IsFreeSpaceOrFiller(cage_base())) { diff --git a/deps/v8/src/heap/paged-spaces.cc b/deps/v8/src/heap/paged-spaces.cc index c2c62658244907..7159a1e8779a03 100644 --- a/deps/v8/src/heap/paged-spaces.cc +++ b/deps/v8/src/heap/paged-spaces.cc @@ -14,8 +14,11 @@ #include "src/execution/vm-state-inl.h" #include "src/heap/allocation-observer.h" #include "src/heap/array-buffer-sweeper.h" +#include "src/heap/gc-tracer-inl.h" +#include "src/heap/gc-tracer.h" #include "src/heap/heap.h" #include "src/heap/incremental-marking.h" +#include "src/heap/marking-state-inl.h" #include "src/heap/memory-allocator.h" #include "src/heap/memory-chunk-inl.h" #include "src/heap/memory-chunk-layout.h" @@ -139,12 +142,14 @@ void PagedSpaceBase::TearDown() { accounting_stats_.Clear(); } -void PagedSpaceBase::RefillFreeList(Sweeper* sweeper) { +void PagedSpaceBase::RefillFreeList() { // Any PagedSpace might invoke RefillFreeList. We filter all but our old // generation spaces out. DCHECK(identity() == OLD_SPACE || identity() == CODE_SPACE || - identity() == MAP_SPACE || identity() == NEW_SPACE); + identity() == MAP_SPACE || identity() == NEW_SPACE || + identity() == SHARED_SPACE); + Sweeper* sweeper = heap()->sweeper(); size_t added = 0; { @@ -283,8 +288,7 @@ bool PagedSpaceBase::ContainsSlow(Address addr) const { void PagedSpaceBase::RefineAllocatedBytesAfterSweeping(Page* page) { CHECK(page->SweepingDone()); - auto marking_state = - heap()->mark_compact_collector()->non_atomic_marking_state(); + auto marking_state = heap()->non_atomic_marking_state(); // The live_byte on the page was accounted in the space allocated // bytes counter. After sweeping allocated_bytes() contains the // accurate live byte count on the page. @@ -426,9 +430,14 @@ int PagedSpaceBase::CountTotalPages() const { void PagedSpaceBase::SetLinearAllocationArea(Address top, Address limit) { SetTopAndLimit(top, limit); - if (top != kNullAddress && top != limit && identity() != NEW_SPACE && - heap()->incremental_marking()->black_allocation()) { - Page::FromAllocationAreaAddress(top)->CreateBlackArea(top, limit); + if (top != kNullAddress && top != limit) { + Page* page = Page::FromAllocationAreaAddress(top); + if (identity() == NEW_SPACE) { + page->MarkWasUsedForAllocation(); + } else if (heap()->incremental_marking()->black_allocation()) { + DCHECK_NE(NEW_SPACE, identity()); + page->CreateBlackArea(top, limit); + } } } @@ -534,21 +543,22 @@ void PagedSpaceBase::FreeLinearAllocationArea() { GetUnprotectMemoryOrigin(is_compaction_space())); } - DCHECK_IMPLIES(current_limit - current_top >= 2 * kTaggedSize, - heap()->incremental_marking()->marking_state()->IsWhite( - HeapObject::FromAddress(current_top))); + DCHECK_IMPLIES( + current_limit - current_top >= 2 * kTaggedSize, + heap()->marking_state()->IsWhite(HeapObject::FromAddress(current_top))); Free(current_top, current_limit - current_top, SpaceAccountingMode::kSpaceAccounted); } void PagedSpaceBase::ReleasePage(Page* page) { - DCHECK_EQ( - 0, heap()->incremental_marking()->non_atomic_marking_state()->live_bytes( - page)); + DCHECK(page->SweepingDone()); + DCHECK_EQ(0, heap()->non_atomic_marking_state()->live_bytes(page)); DCHECK_EQ(page->owner(), this); DCHECK_IMPLIES(identity() == NEW_SPACE, page->IsFlagSet(Page::TO_PAGE)); + memory_chunk_list().Remove(page); + free_list_->EvictFreeListItems(page); if (Page::FromAllocationAreaAddress(allocation_info_.top()) == page) { @@ -648,76 +658,6 @@ bool PagedSpaceBase::TryAllocationFromFreeListMain(size_t size_in_bytes, return true; } -base::Optional<std::pair<Address, size_t>> -PagedSpaceBase::RawAllocateBackground(LocalHeap* local_heap, - size_t min_size_in_bytes, - size_t max_size_in_bytes, - AllocationOrigin origin) { - DCHECK(!is_compaction_space()); - DCHECK(identity() == OLD_SPACE || identity() == CODE_SPACE || - identity() == MAP_SPACE); - DCHECK(origin == AllocationOrigin::kRuntime || - origin == AllocationOrigin::kGC); - DCHECK_IMPLIES(!local_heap, origin == AllocationOrigin::kGC); - - base::Optional<std::pair<Address, size_t>> result = - TryAllocationFromFreeListBackground(min_size_in_bytes, max_size_in_bytes, - origin); - if (result) return result; - - MarkCompactCollector* collector = heap()->mark_compact_collector(); - // Sweeping is still in progress. - if (collector->sweeping_in_progress()) { - // First try to refill the free-list, concurrent sweeper threads - // may have freed some objects in the meantime. - RefillFreeList(collector->sweeper()); - - // Retry the free list allocation. - result = TryAllocationFromFreeListBackground(min_size_in_bytes, - max_size_in_bytes, origin); - if (result) return result; - - if (IsSweepingAllowedOnThread(local_heap)) { - // Now contribute to sweeping from background thread and then try to - // reallocate. - const int kMaxPagesToSweep = 1; - int max_freed = collector->sweeper()->ParallelSweepSpace( - identity(), Sweeper::SweepingMode::kLazyOrConcurrent, - static_cast<int>(min_size_in_bytes), kMaxPagesToSweep); - - // Keep new space sweeping atomic. - RefillFreeList(collector->sweeper()); - - if (static_cast<size_t>(max_freed) >= min_size_in_bytes) { - result = TryAllocationFromFreeListBackground(min_size_in_bytes, - max_size_in_bytes, origin); - if (result) return result; - } - } - } - - if (heap()->ShouldExpandOldGenerationOnSlowAllocation(local_heap) && - heap()->CanExpandOldGenerationBackground(local_heap, AreaSize())) { - result = TryExpandBackground(max_size_in_bytes); - if (result) return result; - } - - if (collector->sweeping_in_progress()) { - // Complete sweeping for this space. - if (IsSweepingAllowedOnThread(local_heap)) { - collector->DrainSweepingWorklistForSpace(identity()); - } - - RefillFreeList(collector->sweeper()); - - // Last try to acquire memory from free list. - return TryAllocationFromFreeListBackground(min_size_in_bytes, - max_size_in_bytes, origin); - } - - return {}; -} - base::Optional<std::pair<Address, size_t>> PagedSpaceBase::TryAllocationFromFreeListBackground(size_t min_size_in_bytes, size_t max_size_in_bytes, @@ -725,7 +665,7 @@ PagedSpaceBase::TryAllocationFromFreeListBackground(size_t min_size_in_bytes, base::MutexGuard lock(&space_mutex_); DCHECK_LE(min_size_in_bytes, max_size_in_bytes); DCHECK(identity() == OLD_SPACE || identity() == CODE_SPACE || - identity() == MAP_SPACE); + identity() == MAP_SPACE || identity() == SHARED_SPACE); size_t new_node_size = 0; FreeSpace new_node = @@ -743,8 +683,6 @@ PagedSpaceBase::TryAllocationFromFreeListBackground(size_t min_size_in_bytes, Page* page = Page::FromHeapObject(new_node); IncreaseAllocatedBytes(new_node_size, page); - heap()->StartIncrementalMarkingIfAllocationLimitIsReachedBackground(); - size_t used_size_in_bytes = std::min(new_node_size, max_size_in_bytes); Address start = new_node.address(); @@ -764,12 +702,6 @@ PagedSpaceBase::TryAllocationFromFreeListBackground(size_t min_size_in_bytes, return std::make_pair(start, used_size_in_bytes); } -bool PagedSpaceBase::IsSweepingAllowedOnThread(LocalHeap* local_heap) const { - // Code space sweeping is only allowed on main thread. - return (local_heap && local_heap->is_main_thread()) || - identity() != CODE_SPACE; -} - #ifdef DEBUG void PagedSpaceBase::Print() {} #endif @@ -788,6 +720,7 @@ void PagedSpaceBase::Verify(Isolate* isolate, ObjectVisitor* visitor) const { PtrComprCageBase cage_base(isolate); for (const Page* page : *this) { CHECK_EQ(page->owner(), this); + CHECK_IMPLIES(identity() != NEW_SPACE, !page->WasUsedForAllocation()); for (int i = 0; i < kNumTypes; i++) { external_page_bytes[static_cast<ExternalBackingStoreType>(i)] = 0; @@ -866,7 +799,7 @@ void PagedSpaceBase::Verify(Isolate* isolate, ObjectVisitor* visitor) const { } void PagedSpaceBase::VerifyLiveBytes() const { - MarkingState* marking_state = heap()->incremental_marking()->marking_state(); + MarkingState* marking_state = heap()->marking_state(); PtrComprCageBase cage_base(heap()->isolate()); for (const Page* page : *this) { CHECK(page->SweepingDone()); @@ -895,7 +828,7 @@ void PagedSpaceBase::VerifyCountersAfterSweeping(Heap* heap) const { size_t real_allocated = 0; for (HeapObject object = it.Next(); !object.is_null(); object = it.Next()) { if (!object.IsFreeSpaceOrFiller()) { - real_allocated += object.Size(cage_base); + real_allocated += ALIGN_TO_ALLOCATION_ALIGNMENT(object.Size(cage_base)); } } total_allocated += page->allocated_bytes(); @@ -911,8 +844,7 @@ void PagedSpaceBase::VerifyCountersAfterSweeping(Heap* heap) const { void PagedSpaceBase::VerifyCountersBeforeConcurrentSweeping() const { size_t total_capacity = 0; size_t total_allocated = 0; - auto marking_state = - heap()->incremental_marking()->non_atomic_marking_state(); + auto marking_state = heap()->non_atomic_marking_state(); for (const Page* page : *this) { size_t page_allocated = page->SweepingDone() @@ -940,11 +872,6 @@ void PagedSpaceBase::UpdateInlineAllocationLimit(size_t min_size) { // ----------------------------------------------------------------------------- // OldSpace implementation -void PagedSpaceBase::PrepareForMarkCompact() { - // Clear the free list before a full GC---it will be rebuilt afterward. - free_list_->Reset(); -} - bool PagedSpaceBase::RefillLabMain(int size_in_bytes, AllocationOrigin origin) { VMState<GC> state(heap()->isolate()); RCS_SCOPE(heap()->isolate(), @@ -989,21 +916,33 @@ bool PagedSpaceBase::RawRefillLabMain(int size_in_bytes, return false; } - MarkCompactCollector* collector = heap()->mark_compact_collector(); + const bool is_main_thread = + heap()->IsMainThread() || heap()->IsSharedMainThread(); + const auto sweeping_scope_id = is_main_thread + ? GCTracer::Scope::MC_SWEEP + : GCTracer::Scope::MC_BACKGROUND_SWEEPING; + const auto sweeping_scope_kind = + is_main_thread ? ThreadKind::kMain : ThreadKind::kBackground; // Sweeping is still in progress. - if (collector->sweeping_in_progress()) { + if (heap()->sweeping_in_progress()) { // First try to refill the free-list, concurrent sweeper threads // may have freed some objects in the meantime. - RefillFreeList(collector->sweeper()); + { + TRACE_GC_EPOCH(heap()->tracer(), sweeping_scope_id, sweeping_scope_kind); + RefillFreeList(); + } // Retry the free list allocation. if (TryAllocationFromFreeListMain(static_cast<size_t>(size_in_bytes), origin)) return true; - if (ContributeToSweepingMain(size_in_bytes, kMaxPagesToSweep, size_in_bytes, - origin)) - return true; + { + TRACE_GC_EPOCH(heap()->tracer(), sweeping_scope_id, sweeping_scope_kind); + if (ContributeToSweepingMain(size_in_bytes, kMaxPagesToSweep, + size_in_bytes, origin)) + return true; + } } if (is_compaction_space()) { @@ -1029,8 +968,9 @@ bool PagedSpaceBase::RawRefillLabMain(int size_in_bytes, } // Try sweeping all pages. - if (ContributeToSweepingMain(0, 0, size_in_bytes, origin)) { - return true; + { + TRACE_GC_EPOCH(heap()->tracer(), sweeping_scope_id, sweeping_scope_kind); + if (ContributeToSweepingMain(0, 0, size_in_bytes, origin)) return true; } if (heap()->gc_state() != Heap::NOT_IN_GC && !heap()->force_oom()) { @@ -1054,11 +994,10 @@ bool PagedSpaceBase::ContributeToSweepingMain(int required_freed_bytes, is_compaction_space() ? Sweeper::SweepingMode::kEagerDuringGC : Sweeper::SweepingMode::kLazyOrConcurrent; - MarkCompactCollector* collector = heap()->mark_compact_collector(); - if (collector->sweeping_in_progress()) { - collector->sweeper()->ParallelSweepSpace(identity(), sweeping_mode, - required_freed_bytes, max_pages); - RefillFreeList(collector->sweeper()); + if (heap()->sweeping_in_progress()) { + heap()->sweeper()->ParallelSweepSpace(identity(), sweeping_mode, + required_freed_bytes, max_pages); + RefillFreeList(); return TryAllocationFromFreeListMain(size_in_bytes, origin); } return false; diff --git a/deps/v8/src/heap/paged-spaces.h b/deps/v8/src/heap/paged-spaces.h index 7241a29b0e3476..70da63e53d6da6 100644 --- a/deps/v8/src/heap/paged-spaces.h +++ b/deps/v8/src/heap/paged-spaces.h @@ -110,9 +110,6 @@ class V8_EXPORT_PRIVATE PagedSpaceBase // Does the space need executable memory? Executability executable() const { return executable_; } - // Prepares for a mark-compact GC. - void PrepareForMarkCompact(); - // Current capacity without growing (Size() + Available()). size_t Capacity() const { return accounting_stats_.Capacity(); } @@ -280,7 +277,7 @@ class V8_EXPORT_PRIVATE PagedSpaceBase // Refills the free list from the corresponding free list filled by the // sweeper. - void RefillFreeList(Sweeper* sweeper); + void RefillFreeList(); base::Mutex* mutex() { return &space_mutex_; } @@ -321,6 +318,19 @@ class V8_EXPORT_PRIVATE PagedSpaceBase void ReduceActiveSystemPages(Page* page, ActiveSystemPages active_system_pages); + // Allocates memory with the given size constraints from the space's free + // list. + V8_WARN_UNUSED_RESULT base::Optional<std::pair<Address, size_t>> + TryAllocationFromFreeListBackground(size_t min_size_in_bytes, + size_t max_size_in_bytes, + AllocationOrigin origin); + + // Expands the space by a single page from a background thread and allocates + // a memory area of the given size in it. If successful the method returns + // the address and size of the area. + base::Optional<std::pair<Address, size_t>> TryExpandBackground( + size_t size_in_bytes); + private: class ConcurrentAllocationMutex { public: @@ -353,10 +363,6 @@ class V8_EXPORT_PRIVATE PagedSpaceBase bool HasPages() const { return first_page() != nullptr; } - // Returns whether sweeping of this space is safe on this thread. Code space - // sweeping is only allowed on the main thread. - bool IsSweepingAllowedOnThread(LocalHeap* local_heap) const; - // Cleans up the space, frees all pages in this space except those belonging // to the initial chunk, uncommits addresses in the initial chunk. void TearDown(); @@ -366,12 +372,6 @@ class V8_EXPORT_PRIVATE PagedSpaceBase // size limit has been hit. virtual Page* TryExpandImpl(); - // Expands the space by a single page from a background thread and allocates - // a memory area of the given size in it. If successful the method returns - // the address and size of the area. - base::Optional<std::pair<Address, size_t>> TryExpandBackground( - size_t size_in_bytes); - bool EnsureAllocation(int size_in_bytes, AllocationAlignment alignment, AllocationOrigin origin, int* out_max_aligned_size) override; @@ -395,11 +395,6 @@ class V8_EXPORT_PRIVATE PagedSpaceBase V8_WARN_UNUSED_RESULT bool RawRefillLabMain(int size_in_bytes, AllocationOrigin origin); - V8_WARN_UNUSED_RESULT base::Optional<std::pair<Address, size_t>> - TryAllocationFromFreeListBackground(size_t min_size_in_bytes, - size_t max_size_in_bytes, - AllocationOrigin origin); - V8_WARN_UNUSED_RESULT bool TryExpand(int size_in_bytes, AllocationOrigin origin); @@ -483,7 +478,9 @@ class CompactionSpaceCollection : public Malloced { map_space_(heap, MAP_SPACE, Executability::NOT_EXECUTABLE, compaction_space_kind), code_space_(heap, CODE_SPACE, Executability::EXECUTABLE, - compaction_space_kind) {} + compaction_space_kind), + shared_space_(heap, SHARED_SPACE, Executability::NOT_EXECUTABLE, + compaction_space_kind) {} CompactionSpace* Get(AllocationSpace space) { switch (space) { @@ -493,6 +490,8 @@ class CompactionSpaceCollection : public Malloced { return &map_space_; case CODE_SPACE: return &code_space_; + case SHARED_SPACE: + return &shared_space_; default: UNREACHABLE(); } @@ -503,6 +502,7 @@ class CompactionSpaceCollection : public Malloced { CompactionSpace old_space_; CompactionSpace map_space_; CompactionSpace code_space_; + CompactionSpace shared_space_; }; // ----------------------------------------------------------------------------- @@ -554,7 +554,9 @@ class MapSpace final : public PagedSpace { paged_allocation_info_) {} int RoundSizeDownToObjectAlignment(int size) const override { - if (base::bits::IsPowerOfTwo(Map::kSize)) { + if (V8_COMPRESS_POINTERS_8GB_BOOL) { + return RoundDown(size, kObjectAlignment8GbHeap); + } else if (base::bits::IsPowerOfTwo(Map::kSize)) { return RoundDown(size, Map::kSize); } else { return (size / Map::kSize) * Map::kSize; @@ -571,6 +573,32 @@ class MapSpace final : public PagedSpace { LinearAllocationArea paged_allocation_info_; }; +// ----------------------------------------------------------------------------- +// Shared space regular object space. + +class SharedSpace final : public PagedSpace { + public: + // Creates an old space object. The constructor does not allocate pages + // from OS. + explicit SharedSpace(Heap* heap) + : PagedSpace(heap, SHARED_SPACE, NOT_EXECUTABLE, + FreeList::CreateFreeList(), allocation_info) {} + + static bool IsAtPageStart(Address addr) { + return static_cast<intptr_t>(addr & kPageAlignmentMask) == + MemoryChunkLayout::ObjectStartOffsetInDataPage(); + } + + size_t ExternalBackingStoreBytes(ExternalBackingStoreType type) const final { + if (type == ExternalBackingStoreType::kArrayBuffer) return 0; + DCHECK_EQ(type, ExternalBackingStoreType::kExternalString); + return external_backing_store_bytes_[type]; + } + + private: + LinearAllocationArea allocation_info; +}; + // Iterates over the chunks (pages and large object pages) that can contain // pointers to new space or to evacuation candidates. class OldGenerationMemoryChunkIterator { diff --git a/deps/v8/src/heap/pretenuring-handler-inl.h b/deps/v8/src/heap/pretenuring-handler-inl.h new file mode 100644 index 00000000000000..7447b08b8b9995 --- /dev/null +++ b/deps/v8/src/heap/pretenuring-handler-inl.h @@ -0,0 +1,112 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef V8_HEAP_PRETENURING_HANDLER_INL_H_ +#define V8_HEAP_PRETENURING_HANDLER_INL_H_ + +#include "src/base/sanitizer/msan.h" +#include "src/heap/memory-chunk.h" +#include "src/heap/new-spaces.h" +#include "src/heap/pretenuring-handler.h" +#include "src/heap/spaces.h" +#include "src/objects/allocation-site-inl.h" + +namespace v8 { +namespace internal { + +void PretenturingHandler::UpdateAllocationSite( + Map map, HeapObject object, PretenuringFeedbackMap* pretenuring_feedback) { + DCHECK_NE(pretenuring_feedback, &global_pretenuring_feedback_); +#ifdef DEBUG + BasicMemoryChunk* chunk = BasicMemoryChunk::FromHeapObject(object); + DCHECK_IMPLIES(chunk->IsToPage(), + v8_flags.minor_mc || + chunk->IsFlagSet(MemoryChunk::PAGE_NEW_NEW_PROMOTION)); + DCHECK_IMPLIES(!chunk->InYoungGeneration(), + chunk->IsFlagSet(MemoryChunk::PAGE_NEW_OLD_PROMOTION)); +#endif + if (!v8_flags.allocation_site_pretenuring || + !AllocationSite::CanTrack(map.instance_type())) { + return; + } + AllocationMemento memento_candidate = + FindAllocationMemento<kForGC>(map, object); + if (memento_candidate.is_null()) return; + + // Entering cached feedback is used in the parallel case. We are not allowed + // to dereference the allocation site and rather have to postpone all checks + // till actually merging the data. + Address key = memento_candidate.GetAllocationSiteUnchecked(); + (*pretenuring_feedback)[AllocationSite::unchecked_cast(Object(key))]++; +} + +template <PretenturingHandler::FindMementoMode mode> +AllocationMemento PretenturingHandler::FindAllocationMemento( + Map map, HeapObject object) { + Address object_address = object.address(); + Address memento_address = + object_address + ALIGN_TO_ALLOCATION_ALIGNMENT(object.SizeFromMap(map)); + Address last_memento_word_address = memento_address + kTaggedSize; + // If the memento would be on another page, bail out immediately. + if (!Page::OnSamePage(object_address, last_memento_word_address)) { + return AllocationMemento(); + } + HeapObject candidate = HeapObject::FromAddress(memento_address); + ObjectSlot candidate_map_slot = candidate.map_slot(); + // This fast check may peek at an uninitialized word. However, the slow check + // below (memento_address == top) ensures that this is safe. Mark the word as + // initialized to silence MemorySanitizer warnings. + MSAN_MEMORY_IS_INITIALIZED(candidate_map_slot.address(), kTaggedSize); + if (!candidate_map_slot.contains_map_value( + ReadOnlyRoots(heap_).allocation_memento_map().ptr())) { + return AllocationMemento(); + } + + // Bail out if the memento is below the age mark, which can happen when + // mementos survived because a page got moved within new space. + Page* object_page = Page::FromAddress(object_address); + if (object_page->IsFlagSet(Page::NEW_SPACE_BELOW_AGE_MARK)) { + Address age_mark = + reinterpret_cast<SemiSpace*>(object_page->owner())->age_mark(); + if (!object_page->Contains(age_mark)) { + return AllocationMemento(); + } + // Do an exact check in the case where the age mark is on the same page. + if (object_address < age_mark) { + return AllocationMemento(); + } + } + + AllocationMemento memento_candidate = AllocationMemento::cast(candidate); + + // Depending on what the memento is used for, we might need to perform + // additional checks. + Address top; + switch (mode) { + case kForGC: + return memento_candidate; + case kForRuntime: + if (memento_candidate.is_null()) return AllocationMemento(); + // Either the object is the last object in the new space, or there is + // another object of at least word size (the header map word) following + // it, so suffices to compare ptr and top here. + top = heap_->NewSpaceTop(); + DCHECK(memento_address >= heap_->new_space()->limit() || + memento_address + + ALIGN_TO_ALLOCATION_ALIGNMENT(AllocationMemento::kSize) <= + top); + if ((memento_address != top) && memento_candidate.IsValid()) { + return memento_candidate; + } + return AllocationMemento(); + default: + UNREACHABLE(); + } + UNREACHABLE(); +} + +} // namespace internal +} // namespace v8 + +#endif // V8_HEAP_PRETENURING_HANDLER_INL_H_ diff --git a/deps/v8/src/heap/pretenuring-handler.cc b/deps/v8/src/heap/pretenuring-handler.cc new file mode 100644 index 00000000000000..3276bdec01bbd8 --- /dev/null +++ b/deps/v8/src/heap/pretenuring-handler.cc @@ -0,0 +1,244 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "src/heap/pretenuring-handler.h" + +#include "src/execution/isolate.h" +#include "src/handles/global-handles-inl.h" +#include "src/heap/new-spaces.h" +#include "src/objects/allocation-site-inl.h" + +namespace v8 { +namespace internal { + +PretenturingHandler::PretenturingHandler(Heap* heap) + : heap_(heap), global_pretenuring_feedback_(kInitialFeedbackCapacity) {} + +PretenturingHandler::~PretenturingHandler() = default; + +void PretenturingHandler::MergeAllocationSitePretenuringFeedback( + const PretenuringFeedbackMap& local_pretenuring_feedback) { + PtrComprCageBase cage_base(heap_->isolate()); + AllocationSite site; + for (auto& site_and_count : local_pretenuring_feedback) { + site = site_and_count.first; + MapWord map_word = site.map_word(cage_base, kRelaxedLoad); + if (map_word.IsForwardingAddress()) { + site = AllocationSite::cast(map_word.ToForwardingAddress()); + } + + // We have not validated the allocation site yet, since we have not + // dereferenced the site during collecting information. + // This is an inlined check of AllocationMemento::IsValid. + if (!site.IsAllocationSite() || site.IsZombie()) continue; + + const int value = static_cast<int>(site_and_count.second); + DCHECK_LT(0, value); + if (site.IncrementMementoFoundCount(value)) { + // For sites in the global map the count is accessed through the site. + global_pretenuring_feedback_.insert(std::make_pair(site, 0)); + } + } +} + +bool PretenturingHandler::DeoptMaybeTenuredAllocationSites() const { + NewSpace* new_space = heap_->new_space(); + return new_space && new_space->IsAtMaximumCapacity() && + !heap_->MaximumSizeMinorGC(); +} + +namespace { + +inline bool MakePretenureDecision( + AllocationSite site, AllocationSite::PretenureDecision current_decision, + double ratio, bool maximum_size_scavenge) { + // Here we just allow state transitions from undecided or maybe tenure + // to don't tenure, maybe tenure, or tenure. + if ((current_decision == AllocationSite::kUndecided || + current_decision == AllocationSite::kMaybeTenure)) { + if (ratio >= AllocationSite::kPretenureRatio) { + // We just transition into tenure state when the semi-space was at + // maximum capacity. + if (maximum_size_scavenge) { + site.set_deopt_dependent_code(true); + site.set_pretenure_decision(AllocationSite::kTenure); + // Currently we just need to deopt when we make a state transition to + // tenure. + return true; + } + site.set_pretenure_decision(AllocationSite::kMaybeTenure); + } else { + site.set_pretenure_decision(AllocationSite::kDontTenure); + } + } + return false; +} + +// Clear feedback calculation fields until the next gc. +inline void ResetPretenuringFeedback(AllocationSite site) { + site.set_memento_found_count(0); + site.set_memento_create_count(0); +} + +inline bool DigestPretenuringFeedback(Isolate* isolate, AllocationSite site, + bool maximum_size_scavenge) { + bool deopt = false; + int create_count = site.memento_create_count(); + int found_count = site.memento_found_count(); + bool minimum_mementos_created = + create_count >= AllocationSite::kPretenureMinimumCreated; + double ratio = + minimum_mementos_created || v8_flags.trace_pretenuring_statistics + ? static_cast<double>(found_count) / create_count + : 0.0; + AllocationSite::PretenureDecision current_decision = + site.pretenure_decision(); + + if (minimum_mementos_created) { + deopt = MakePretenureDecision(site, current_decision, ratio, + maximum_size_scavenge); + } + + if (v8_flags.trace_pretenuring_statistics) { + PrintIsolate(isolate, + "pretenuring: AllocationSite(%p): (created, found, ratio) " + "(%d, %d, %f) %s => %s\n", + reinterpret_cast<void*>(site.ptr()), create_count, found_count, + ratio, site.PretenureDecisionName(current_decision), + site.PretenureDecisionName(site.pretenure_decision())); + } + + ResetPretenuringFeedback(site); + return deopt; +} + +bool PretenureAllocationSiteManually(Isolate* isolate, AllocationSite site) { + AllocationSite::PretenureDecision current_decision = + site.pretenure_decision(); + bool deopt = true; + if (current_decision == AllocationSite::kUndecided || + current_decision == AllocationSite::kMaybeTenure) { + site.set_deopt_dependent_code(true); + site.set_pretenure_decision(AllocationSite::kTenure); + } else { + deopt = false; + } + if (v8_flags.trace_pretenuring_statistics) { + PrintIsolate(isolate, + "pretenuring manually requested: AllocationSite(%p): " + "%s => %s\n", + reinterpret_cast<void*>(site.ptr()), + site.PretenureDecisionName(current_decision), + site.PretenureDecisionName(site.pretenure_decision())); + } + + ResetPretenuringFeedback(site); + return deopt; +} + +} // namespace + +void PretenturingHandler::RemoveAllocationSitePretenuringFeedback( + AllocationSite site) { + global_pretenuring_feedback_.erase(site); +} + +void PretenturingHandler::ProcessPretenuringFeedback() { + bool trigger_deoptimization = false; + if (v8_flags.allocation_site_pretenuring) { + int tenure_decisions = 0; + int dont_tenure_decisions = 0; + int allocation_mementos_found = 0; + int allocation_sites = 0; + int active_allocation_sites = 0; + + AllocationSite site; + + // Step 1: Digest feedback for recorded allocation sites. + bool maximum_size_scavenge = heap_->MaximumSizeMinorGC(); + for (auto& site_and_count : global_pretenuring_feedback_) { + allocation_sites++; + site = site_and_count.first; + // Count is always access through the site. + DCHECK_EQ(0, site_and_count.second); + int found_count = site.memento_found_count(); + // An entry in the storage does not imply that the count is > 0 because + // allocation sites might have been reset due to too many objects dying + // in old space. + if (found_count > 0) { + DCHECK(site.IsAllocationSite()); + active_allocation_sites++; + allocation_mementos_found += found_count; + if (DigestPretenuringFeedback(heap_->isolate(), site, + maximum_size_scavenge)) { + trigger_deoptimization = true; + } + if (site.GetAllocationType() == AllocationType::kOld) { + tenure_decisions++; + } else { + dont_tenure_decisions++; + } + } + } + + // Step 2: Pretenure allocation sites for manual requests. + if (allocation_sites_to_pretenure_) { + while (!allocation_sites_to_pretenure_->empty()) { + auto pretenure_site = allocation_sites_to_pretenure_->Pop(); + if (PretenureAllocationSiteManually(heap_->isolate(), pretenure_site)) { + trigger_deoptimization = true; + } + } + allocation_sites_to_pretenure_.reset(); + } + + // Step 3: Deopt maybe tenured allocation sites if necessary. + bool deopt_maybe_tenured = DeoptMaybeTenuredAllocationSites(); + if (deopt_maybe_tenured) { + heap_->ForeachAllocationSite( + heap_->allocation_sites_list(), + [&allocation_sites, &trigger_deoptimization](AllocationSite site) { + DCHECK(site.IsAllocationSite()); + allocation_sites++; + if (site.IsMaybeTenure()) { + site.set_deopt_dependent_code(true); + trigger_deoptimization = true; + } + }); + } + + if (trigger_deoptimization) { + heap_->isolate()->stack_guard()->RequestDeoptMarkedAllocationSites(); + } + + if (v8_flags.trace_pretenuring_statistics && + (allocation_mementos_found > 0 || tenure_decisions > 0 || + dont_tenure_decisions > 0)) { + PrintIsolate(heap_->isolate(), + "pretenuring: deopt_maybe_tenured=%d visited_sites=%d " + "active_sites=%d " + "mementos=%d tenured=%d not_tenured=%d\n", + deopt_maybe_tenured ? 1 : 0, allocation_sites, + active_allocation_sites, allocation_mementos_found, + tenure_decisions, dont_tenure_decisions); + } + + global_pretenuring_feedback_.clear(); + global_pretenuring_feedback_.reserve(kInitialFeedbackCapacity); + } +} + +void PretenturingHandler::PretenureAllocationSiteOnNextCollection( + AllocationSite site) { + if (!allocation_sites_to_pretenure_) { + allocation_sites_to_pretenure_.reset( + new GlobalHandleVector<AllocationSite>(heap_)); + } + allocation_sites_to_pretenure_->Push(site); +} + +void PretenturingHandler::reset() { allocation_sites_to_pretenure_.reset(); } + +} // namespace internal +} // namespace v8 diff --git a/deps/v8/src/heap/pretenuring-handler.h b/deps/v8/src/heap/pretenuring-handler.h new file mode 100644 index 00000000000000..4c31141fb83378 --- /dev/null +++ b/deps/v8/src/heap/pretenuring-handler.h @@ -0,0 +1,90 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef V8_HEAP_PRETENURING_HANDLER_H_ +#define V8_HEAP_PRETENURING_HANDLER_H_ + +#include <memory> + +#include "src/objects/allocation-site.h" +#include "src/objects/heap-object.h" +#include "src/objects/map.h" + +namespace v8 { +namespace internal { + +template <typename T> +class GlobalHandleVector; +class Heap; + +class PretenturingHandler final { + public: + static const int kInitialFeedbackCapacity = 256; + using PretenuringFeedbackMap = + std::unordered_map<AllocationSite, size_t, Object::Hasher>; + enum FindMementoMode { kForRuntime, kForGC }; + + explicit PretenturingHandler(Heap* heap); + ~PretenturingHandler(); + + void reset(); + + // If an object has an AllocationMemento trailing it, return it, otherwise + // return a null AllocationMemento. + template <FindMementoMode mode> + inline AllocationMemento FindAllocationMemento(Map map, HeapObject object); + + // =========================================================================== + // Allocation site tracking. ================================================= + // =========================================================================== + + // Updates the AllocationSite of a given {object}. The entry (including the + // count) is cached on the local pretenuring feedback. + inline void UpdateAllocationSite( + Map map, HeapObject object, PretenuringFeedbackMap* pretenuring_feedback); + + // Merges local pretenuring feedback into the global one. Note that this + // method needs to be called after evacuation, as allocation sites may be + // evacuated and this method resolves forward pointers accordingly. + void MergeAllocationSitePretenuringFeedback( + const PretenuringFeedbackMap& local_pretenuring_feedback); + + // Adds an allocation site to the list of sites to be pretenured during the + // next collection. Added allocation sites are pretenured independent of + // their feedback. + V8_EXPORT_PRIVATE void PretenureAllocationSiteOnNextCollection( + AllocationSite site); + + // =========================================================================== + // Pretenuring. ============================================================== + // =========================================================================== + + // Pretenuring decisions are made based on feedback collected during new space + // evacuation. Note that between feedback collection and calling this method + // object in old space must not move. + void ProcessPretenuringFeedback(); + + // Removes an entry from the global pretenuring storage. + void RemoveAllocationSitePretenuringFeedback(AllocationSite site); + + private: + bool DeoptMaybeTenuredAllocationSites() const; + + Heap* const heap_; + + // The feedback storage is used to store allocation sites (keys) and how often + // they have been visited (values) by finding a memento behind an object. The + // storage is only alive temporary during a GC. The invariant is that all + // pointers in this map are already fixed, i.e., they do not point to + // forwarding pointers. + PretenuringFeedbackMap global_pretenuring_feedback_; + + std::unique_ptr<GlobalHandleVector<AllocationSite>> + allocation_sites_to_pretenure_; +}; + +} // namespace internal +} // namespace v8 + +#endif // V8_HEAP_PRETENURING_HANDLER_H_ diff --git a/deps/v8/src/heap/read-only-heap.cc b/deps/v8/src/heap/read-only-heap.cc index a365fe38331f11..07db0d09da7b59 100644 --- a/deps/v8/src/heap/read-only-heap.cc +++ b/deps/v8/src/heap/read-only-heap.cc @@ -9,6 +9,7 @@ #include "src/base/lazy-instance.h" #include "src/base/platform/mutex.h" +#include "src/common/globals.h" #include "src/common/ptr-compr-inl.h" #include "src/heap/basic-memory-chunk.h" #include "src/heap/heap-write-barrier-inl.h" @@ -298,7 +299,7 @@ HeapObject ReadOnlyHeapObjectIterator::Next() { } HeapObject object = HeapObject::FromAddress(current_addr_); const int object_size = object.Size(); - current_addr_ += object_size; + current_addr_ += ALIGN_TO_ALLOCATION_ALIGNMENT(object_size); if (object.IsFreeSpaceOrFiller()) { continue; diff --git a/deps/v8/src/heap/read-only-spaces.cc b/deps/v8/src/heap/read-only-spaces.cc index 0277f18dd53176..7385fd23533c8e 100644 --- a/deps/v8/src/heap/read-only-spaces.cc +++ b/deps/v8/src/heap/read-only-spaces.cc @@ -15,6 +15,7 @@ #include "src/heap/allocation-stats.h" #include "src/heap/basic-memory-chunk.h" #include "src/heap/heap-inl.h" +#include "src/heap/marking-state-inl.h" #include "src/heap/memory-allocator.h" #include "src/heap/read-only-heap.h" #include "src/objects/objects-inl.h" @@ -185,7 +186,9 @@ ReadOnlyHeap* PointerCompressedReadOnlyArtifacts::GetReadOnlyHeapForIsolate( Address isolate_root = isolate->isolate_root(); for (Object original_object : original_cache) { Address original_address = original_object.ptr(); - Address new_address = isolate_root + CompressTagged(original_address); + Address new_address = + isolate_root + + V8HeapCompressionScheme::CompressTagged(original_address); Object new_object = Object(new_address); cache.push_back(new_object); } @@ -235,7 +238,8 @@ void PointerCompressedReadOnlyArtifacts::Initialize( pages_.push_back(new_page); shared_memory_.push_back(std::move(shared_memory)); // This is just CompressTagged but inlined so it will always compile. - Tagged_t compressed_address = CompressTagged(page->address()); + Tagged_t compressed_address = + V8HeapCompressionScheme::CompressTagged(page->address()); page_offsets_.push_back(compressed_address); // 3. Update the accounting stats so the allocated bytes are for the new @@ -332,10 +336,7 @@ ReadOnlyPage::ReadOnlyPage(Heap* heap, BaseSpace* space, size_t chunk_size, std::move(reservation)) { allocated_bytes_ = 0; SetFlags(Flag::NEVER_EVACUATE | Flag::READ_ONLY_HEAP); - heap->incremental_marking() - ->non_atomic_marking_state() - ->bitmap(this) - ->MarkAllBits(); + heap->non_atomic_marking_state()->bitmap(this)->MarkAllBits(); } void ReadOnlyPage::MakeHeaderRelocatable() { @@ -436,7 +437,7 @@ class ReadOnlySpaceObjectIterator : public ObjectIterator { } HeapObject obj = HeapObject::FromAddress(cur_addr_); const int obj_size = obj.Size(); - cur_addr_ += obj_size; + cur_addr_ += ALIGN_TO_ALLOCATION_ALIGNMENT(obj_size); DCHECK_LE(cur_addr_, cur_end_); if (!obj.IsFreeSpaceOrFiller()) { if (obj.IsCode()) { @@ -575,7 +576,7 @@ void ReadOnlySpace::FreeLinearAllocationArea() { // Clear the bits in the unused black area. ReadOnlyPage* page = pages_.back(); - heap()->incremental_marking()->marking_state()->bitmap(page)->ClearRange( + heap()->marking_state()->bitmap(page)->ClearRange( page->AddressToMarkbitIndex(top_), page->AddressToMarkbitIndex(limit_)); heap()->CreateFillerObjectAt(top_, static_cast<int>(limit_ - top_)); @@ -614,6 +615,7 @@ void ReadOnlySpace::EnsureSpaceForAllocation(int size_in_bytes) { HeapObject ReadOnlySpace::TryAllocateLinearlyAligned( int size_in_bytes, AllocationAlignment alignment) { + size_in_bytes = ALIGN_TO_ALLOCATION_ALIGNMENT(size_in_bytes); Address current_top = top_; int filler_size = Heap::GetFillToAlign(current_top, alignment); @@ -639,6 +641,7 @@ AllocationResult ReadOnlySpace::AllocateRawAligned( int size_in_bytes, AllocationAlignment alignment) { DCHECK(!v8_flags.enable_third_party_heap); DCHECK(!IsDetached()); + size_in_bytes = ALIGN_TO_ALLOCATION_ALIGNMENT(size_in_bytes); int allocation_size = size_in_bytes; HeapObject object = TryAllocateLinearlyAligned(allocation_size, alignment); @@ -658,6 +661,7 @@ AllocationResult ReadOnlySpace::AllocateRawAligned( AllocationResult ReadOnlySpace::AllocateRawUnaligned(int size_in_bytes) { DCHECK(!IsDetached()); + size_in_bytes = ALIGN_TO_ALLOCATION_ALIGNMENT(size_in_bytes); EnsureSpaceForAllocation(size_in_bytes); Address current_top = top_; Address new_top = current_top + size_in_bytes; @@ -684,7 +688,7 @@ AllocationResult ReadOnlySpace::AllocateRaw(int size_in_bytes, : AllocateRawUnaligned(size_in_bytes); HeapObject heap_obj; if (result.To(&heap_obj)) { - DCHECK(heap()->incremental_marking()->marking_state()->IsBlack(heap_obj)); + DCHECK(heap()->marking_state()->IsBlack(heap_obj)); } return result; } diff --git a/deps/v8/src/heap/remembered-set-inl.h b/deps/v8/src/heap/remembered-set-inl.h index fe446a6b8ca6e5..03e22cb806c2ea 100644 --- a/deps/v8/src/heap/remembered-set-inl.h +++ b/deps/v8/src/heap/remembered-set-inl.h @@ -34,18 +34,16 @@ SlotCallbackResult UpdateTypedSlotHelper::UpdateTypedSlot(Heap* heap, RelocInfo rinfo(addr, RelocInfo::FULL_EMBEDDED_OBJECT, 0, Code()); return UpdateEmbeddedPointer(heap, &rinfo, callback); } - case SlotType::kEmbeddedObjectData: { - RelocInfo rinfo(addr, RelocInfo::DATA_EMBEDDED_OBJECT, 0, Code()); - return UpdateEmbeddedPointer(heap, &rinfo, callback); - } case SlotType::kConstPoolEmbeddedObjectCompressed: { - HeapObject old_target = HeapObject::cast(Object( - DecompressTaggedAny(heap->isolate(), base::Memory<Tagged_t>(addr)))); + HeapObject old_target = + HeapObject::cast(Object(V8HeapCompressionScheme::DecompressTaggedAny( + heap->isolate(), base::Memory<Tagged_t>(addr)))); HeapObject new_target = old_target; SlotCallbackResult result = callback(FullMaybeObjectSlot(&new_target)); DCHECK(!HasWeakHeapObjectTag(new_target)); if (new_target != old_target) { - base::Memory<Tagged_t>(addr) = CompressTagged(new_target.ptr()); + base::Memory<Tagged_t>(addr) = + V8HeapCompressionScheme::CompressTagged(new_target.ptr()); } return result; } @@ -77,13 +75,9 @@ HeapObject UpdateTypedSlotHelper::GetTargetObject(Heap* heap, RelocInfo rinfo(addr, RelocInfo::FULL_EMBEDDED_OBJECT, 0, Code()); return rinfo.target_object(heap->isolate()); } - case SlotType::kEmbeddedObjectData: { - RelocInfo rinfo(addr, RelocInfo::DATA_EMBEDDED_OBJECT, 0, Code()); - return rinfo.target_object(heap->isolate()); - } case SlotType::kConstPoolEmbeddedObjectCompressed: { - Address full = - DecompressTaggedAny(heap->isolate(), base::Memory<Tagged_t>(addr)); + Address full = V8HeapCompressionScheme::DecompressTaggedAny( + heap->isolate(), base::Memory<Tagged_t>(addr)); return HeapObject::cast(Object(full)); } case SlotType::kConstPoolEmbeddedObjectFull: { diff --git a/deps/v8/src/heap/remembered-set.h b/deps/v8/src/heap/remembered-set.h index 08c5e88010c35a..d7dac8809d35a7 100644 --- a/deps/v8/src/heap/remembered-set.h +++ b/deps/v8/src/heap/remembered-set.h @@ -31,7 +31,10 @@ class RememberedSetOperations { static void Insert(SlotSet* slot_set, MemoryChunk* chunk, Address slot_addr) { DCHECK(chunk->Contains(slot_addr)); uintptr_t offset = slot_addr - chunk->address(); - slot_set->Insert<access_mode>(offset); + slot_set->Insert<access_mode == v8::internal::AccessMode::ATOMIC + ? v8::internal::SlotSet::AccessMode::ATOMIC + : v8::internal::SlotSet::AccessMode::NON_ATOMIC>( + offset); } template <typename Callback> diff --git a/deps/v8/src/heap/safepoint.cc b/deps/v8/src/heap/safepoint.cc index cec6ab7fcbdac3..ea36ba296d9243 100644 --- a/deps/v8/src/heap/safepoint.cc +++ b/deps/v8/src/heap/safepoint.cc @@ -72,7 +72,7 @@ class PerClientSafepointData final { void IsolateSafepoint::InitiateGlobalSafepointScope( Isolate* initiator, PerClientSafepointData* client_data) { - shared_isolate()->global_safepoint()->AssertActive(); + shared_heap_isolate()->global_safepoint()->AssertActive(); IgnoreLocalGCRequests ignore_gc_requests(initiator->heap()); LockMutex(initiator->main_thread_local_heap()); InitiateGlobalSafepointScopeRaw(initiator, client_data); @@ -80,7 +80,7 @@ void IsolateSafepoint::InitiateGlobalSafepointScope( void IsolateSafepoint::TryInitiateGlobalSafepointScope( Isolate* initiator, PerClientSafepointData* client_data) { - shared_isolate()->global_safepoint()->AssertActive(); + shared_heap_isolate()->global_safepoint()->AssertActive(); if (!local_heaps_mutex_.TryLock()) return; InitiateGlobalSafepointScopeRaw(initiator, client_data); } @@ -278,8 +278,8 @@ void IsolateSafepoint::AssertMainThreadIsOnlyThread() { Isolate* IsolateSafepoint::isolate() const { return heap_->isolate(); } -Isolate* IsolateSafepoint::shared_isolate() const { - return isolate()->shared_isolate(); +Isolate* IsolateSafepoint::shared_heap_isolate() const { + return isolate()->shared_heap_isolate(); } SafepointScope::SafepointScope(Heap* heap) : safepoint_(heap->safepoint()) { @@ -289,7 +289,7 @@ SafepointScope::SafepointScope(Heap* heap) : safepoint_(heap->safepoint()) { SafepointScope::~SafepointScope() { safepoint_->LeaveLocalSafepointScope(); } GlobalSafepoint::GlobalSafepoint(Isolate* isolate) - : shared_isolate_(isolate), shared_heap_(isolate->heap()) {} + : shared_heap_isolate_(isolate) {} void GlobalSafepoint::AppendClient(Isolate* client) { clients_mutex_.AssertHeld(); @@ -306,7 +306,6 @@ void GlobalSafepoint::AppendClient(Isolate* client) { client->global_safepoint_next_client_isolate_ = clients_head_; clients_head_ = client; - client->shared_isolate_ = shared_isolate_; } void GlobalSafepoint::RemoveClient(Isolate* client) { @@ -369,11 +368,15 @@ void GlobalSafepoint::EnterGlobalSafepointScope(Isolate* initiator) { initiator, &clients.back()); }); - // Make it possible to use AssertActive() on shared isolates. - CHECK(shared_isolate_->heap()->safepoint()->local_heaps_mutex_.TryLock()); + if (shared_heap_isolate_->is_shared()) { + // Make it possible to use AssertActive() on shared isolates. + CHECK(shared_heap_isolate_->heap() + ->safepoint() + ->local_heaps_mutex_.TryLock()); - // Shared isolates should never have multiple threads. - shared_isolate_->heap()->safepoint()->AssertMainThreadIsOnlyThread(); + // Shared isolates should never have multiple threads. + shared_heap_isolate_->heap()->safepoint()->AssertMainThreadIsOnlyThread(); + } // Iterate all clients again to initiate the safepoint for all of them - even // if that means blocking. @@ -384,7 +387,7 @@ void GlobalSafepoint::EnterGlobalSafepointScope(Isolate* initiator) { #if DEBUG for (const PerClientSafepointData& client : clients) { - DCHECK_EQ(client.isolate()->shared_isolate(), shared_isolate_); + DCHECK_EQ(client.isolate()->shared_heap_isolate(), shared_heap_isolate_); DCHECK(client.heap()->deserialization_complete()); } #endif // DEBUG @@ -398,7 +401,9 @@ void GlobalSafepoint::EnterGlobalSafepointScope(Isolate* initiator) { } void GlobalSafepoint::LeaveGlobalSafepointScope(Isolate* initiator) { - shared_isolate_->heap()->safepoint()->local_heaps_mutex_.Unlock(); + if (shared_heap_isolate_->is_shared()) { + shared_heap_isolate_->heap()->safepoint()->local_heaps_mutex_.Unlock(); + } IterateClientIsolates([initiator](Isolate* client) { Heap* client_heap = client->heap(); @@ -409,17 +414,22 @@ void GlobalSafepoint::LeaveGlobalSafepointScope(Isolate* initiator) { } GlobalSafepointScope::GlobalSafepointScope(Isolate* initiator) - : initiator_(initiator), shared_isolate_(initiator->shared_isolate()) { - if (shared_isolate_) { - shared_isolate_->global_safepoint()->EnterGlobalSafepointScope(initiator_); + : initiator_(initiator), + shared_heap_isolate_(initiator->has_shared_heap() + ? initiator->shared_heap_isolate() + : nullptr) { + if (shared_heap_isolate_) { + shared_heap_isolate_->global_safepoint()->EnterGlobalSafepointScope( + initiator_); } else { initiator_->heap()->safepoint()->EnterLocalSafepointScope(); } } GlobalSafepointScope::~GlobalSafepointScope() { - if (shared_isolate_) { - shared_isolate_->global_safepoint()->LeaveGlobalSafepointScope(initiator_); + if (shared_heap_isolate_) { + shared_heap_isolate_->global_safepoint()->LeaveGlobalSafepointScope( + initiator_); } else { initiator_->heap()->safepoint()->LeaveLocalSafepointScope(); } diff --git a/deps/v8/src/heap/safepoint.h b/deps/v8/src/heap/safepoint.h index 82e76fe6d53e7f..97e0e54591cb23 100644 --- a/deps/v8/src/heap/safepoint.h +++ b/deps/v8/src/heap/safepoint.h @@ -133,7 +133,7 @@ class IsolateSafepoint final { } Isolate* isolate() const; - Isolate* shared_isolate() const; + Isolate* shared_heap_isolate() const; Barrier barrier_; Heap* heap_; @@ -186,8 +186,7 @@ class GlobalSafepoint final { void EnterGlobalSafepointScope(Isolate* initiator); void LeaveGlobalSafepointScope(Isolate* initiator); - Isolate* const shared_isolate_; - Heap* const shared_heap_; + Isolate* const shared_heap_isolate_; base::Mutex clients_mutex_; Isolate* clients_head_ = nullptr; @@ -202,7 +201,7 @@ class V8_NODISCARD GlobalSafepointScope { private: Isolate* const initiator_; - Isolate* const shared_isolate_; + Isolate* const shared_heap_isolate_; }; } // namespace internal diff --git a/deps/v8/src/heap/scavenger-inl.h b/deps/v8/src/heap/scavenger-inl.h index 91f4f528d0161f..59d837e1a35e09 100644 --- a/deps/v8/src/heap/scavenger-inl.h +++ b/deps/v8/src/heap/scavenger-inl.h @@ -8,9 +8,11 @@ #include "src/codegen/assembler-inl.h" #include "src/heap/evacuation-allocator-inl.h" #include "src/heap/incremental-marking-inl.h" +#include "src/heap/marking-state-inl.h" #include "src/heap/memory-chunk.h" #include "src/heap/new-spaces.h" #include "src/heap/objects-visiting-inl.h" +#include "src/heap/pretenuring-handler-inl.h" #include "src/heap/scavenger.h" #include "src/objects/map.h" #include "src/objects/objects-body-descriptors-inl.h" @@ -110,10 +112,11 @@ bool Scavenger::MigrateObject(Map map, HeapObject source, HeapObject target, } if (is_incremental_marking_ && - promotion_heap_choice != kPromoteIntoSharedHeap) { + (promotion_heap_choice != kPromoteIntoSharedHeap || mark_shared_heap_)) { heap()->incremental_marking()->TransferColor(source, target); } - heap()->UpdateAllocationSite(map, source, &local_pretenuring_feedback_); + pretenuring_handler_->UpdateAllocationSite(map, source, + &local_pretenuring_feedback_); return true; } @@ -132,8 +135,7 @@ CopyAndForwardResult Scavenger::SemiSpaceCopyObject( HeapObject target; if (allocation.To(&target)) { - DCHECK(heap()->incremental_marking()->non_atomic_marking_state()->IsWhite( - target)); + DCHECK(heap()->non_atomic_marking_state()->IsWhite(target)); const bool self_success = MigrateObject(map, object, target, object_size, kPromoteIntoLocalHeap); if (!self_success) { @@ -181,8 +183,7 @@ CopyAndForwardResult Scavenger::PromoteObject(Map map, THeapObjectSlot slot, HeapObject target; if (allocation.To(&target)) { - DCHECK(heap()->incremental_marking()->non_atomic_marking_state()->IsWhite( - target)); + DCHECK(heap()->non_atomic_marking_state()->IsWhite(target)); const bool self_success = MigrateObject(map, object, target, object_size, promotion_heap_choice); if (!self_success) { diff --git a/deps/v8/src/heap/scavenger.cc b/deps/v8/src/heap/scavenger.cc index dfebb0b89be209..8f1c27c05c0774 100644 --- a/deps/v8/src/heap/scavenger.cc +++ b/deps/v8/src/heap/scavenger.cc @@ -18,6 +18,7 @@ #include "src/heap/memory-chunk-inl.h" #include "src/heap/memory-chunk.h" #include "src/heap/objects-visiting-inl.h" +#include "src/heap/pretenuring-handler.h" #include "src/heap/remembered-set-inl.h" #include "src/heap/scavenger-inl.h" #include "src/heap/slot-set.h" @@ -330,7 +331,7 @@ void ScavengerCollector::CollectGarbage() { EphemeronTableList ephemeron_table_list; { - Sweeper* sweeper = heap_->mark_compact_collector()->sweeper(); + Sweeper* sweeper = heap_->sweeper(); // Pause the concurrent sweeper. Sweeper::PauseScope pause_scope(sweeper); @@ -540,8 +541,7 @@ void ScavengerCollector::SweepArrayBufferExtensions() { void ScavengerCollector::HandleSurvivingNewLargeObjects() { const bool is_compacting = heap_->incremental_marking()->IsCompacting(); - AtomicMarkingState* marking_state = - heap_->incremental_marking()->atomic_marking_state(); + AtomicMarkingState* marking_state = heap_->atomic_marking_state(); for (SurvivingNewLargeObjectMapEntry update_info : surviving_new_large_objects_) { @@ -598,8 +598,8 @@ Scavenger::PromotionList::Local::Local(Scavenger::PromotionList* promotion_list) namespace { ConcurrentAllocator* CreateSharedOldAllocator(Heap* heap) { - if (v8_flags.shared_string_table && heap->isolate()->shared_isolate()) { - return new ConcurrentAllocator(nullptr, heap->shared_old_space()); + if (v8_flags.shared_string_table && heap->isolate()->has_shared_heap()) { + return new ConcurrentAllocator(nullptr, heap->shared_allocation_space()); } return nullptr; } @@ -615,7 +615,9 @@ Scavenger::Scavenger(ScavengerCollector* collector, Heap* heap, bool is_logging, promotion_list_local_(promotion_list), copied_list_local_(*copied_list), ephemeron_table_list_local_(*ephemeron_table_list), - local_pretenuring_feedback_(kInitialLocalPretenuringFeedbackCapacity), + pretenuring_handler_(heap_->pretenuring_handler()), + local_pretenuring_feedback_( + PretenturingHandler::kInitialFeedbackCapacity), copied_size_(0), promoted_size_(0), allocator_(heap, CompactionSpaceKind::kCompactionSpaceForScavenge), @@ -625,7 +627,8 @@ Scavenger::Scavenger(ScavengerCollector* collector, Heap* heap, bool is_logging, is_compacting_(heap->incremental_marking()->IsCompacting()), is_compacting_including_map_space_(is_compacting_ && v8_flags.compact_maps), - shared_string_table_(shared_old_allocator_.get() != nullptr) {} + shared_string_table_(shared_old_allocator_.get() != nullptr), + mark_shared_heap_(heap->isolate()->is_shared_space_isolate()) {} void Scavenger::IterateAndScavengePromotedObject(HeapObject target, Map map, int size) { @@ -636,8 +639,7 @@ void Scavenger::IterateAndScavengePromotedObject(HeapObject target, Map map, // the end of collection it would be a violation of the invariant to record // its slots. const bool record_slots = - is_compacting_ && - heap()->incremental_marking()->atomic_marking_state()->IsBlack(target); + is_compacting_ && heap()->atomic_marking_state()->IsBlack(target); IterateAndScavengePromotedObjectsVisitor visitor(this, record_slots); @@ -663,27 +665,29 @@ void Scavenger::RememberPromotedEphemeron(EphemeronHashTable table, int entry) { void Scavenger::AddPageToSweeperIfNecessary(MemoryChunk* page) { AllocationSpace space = page->owner_identity(); if ((space == OLD_SPACE) && !page->SweepingDone()) { - heap()->mark_compact_collector()->sweeper()->AddPage( - space, reinterpret_cast<Page*>(page), - Sweeper::READD_TEMPORARY_REMOVED_PAGE); + heap()->sweeper()->AddPage(space, reinterpret_cast<Page*>(page), + Sweeper::READD_TEMPORARY_REMOVED_PAGE); } } void Scavenger::ScavengePage(MemoryChunk* page) { CodePageMemoryModificationScope memory_modification_scope(page); - const bool has_shared_isolate = heap_->isolate()->shared_isolate(); + const bool record_old_to_shared_slots = heap_->isolate()->has_shared_heap(); if (page->slot_set<OLD_TO_NEW, AccessMode::ATOMIC>() != nullptr) { InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToNew( page, InvalidatedSlotsFilter::LivenessCheck::kNo); RememberedSet<OLD_TO_NEW>::IterateAndTrackEmptyBuckets( page, - [this, page, has_shared_isolate, &filter](MaybeObjectSlot slot) { + [this, page, record_old_to_shared_slots, + &filter](MaybeObjectSlot slot) { if (!filter.IsValid(slot.address())) return REMOVE_SLOT; SlotCallbackResult result = CheckAndScavengeObject(heap_, slot); // A new space string might have been promoted into the shared heap // during GC. - if (has_shared_isolate) CheckOldToNewSlotForSharedUntyped(page, slot); + if (record_old_to_shared_slots) { + CheckOldToNewSlotForSharedUntyped(page, slot); + } return result; }, &empty_chunks_local_); @@ -700,11 +704,11 @@ void Scavenger::ScavengePage(MemoryChunk* page) { return UpdateTypedSlotHelper::UpdateTypedSlot( heap_, slot_type, slot_address, [this, page, slot_type, slot_address, - has_shared_isolate](FullMaybeObjectSlot slot) { + record_old_to_shared_slots](FullMaybeObjectSlot slot) { SlotCallbackResult result = CheckAndScavengeObject(heap(), slot); // A new space string might have been promoted into the shared // heap during GC. - if (has_shared_isolate) { + if (record_old_to_shared_slots) { CheckOldToNewSlotForSharedTyped(page, slot_type, slot_address); } return result; @@ -809,8 +813,9 @@ void ScavengerCollector::ClearOldEphemerons() { } void Scavenger::Finalize() { - heap()->MergeAllocationSitePretenuringFeedback(local_pretenuring_feedback_); - heap()->IncrementSemiSpaceCopiedObjectSize(copied_size_); + pretenuring_handler_->MergeAllocationSitePretenuringFeedback( + local_pretenuring_feedback_); + heap()->IncrementNewSpaceSurvivingObjectSize(copied_size_); heap()->IncrementPromotedObjectsSize(promoted_size_); collector_->MergeSurvivingNewLargeObjects(surviving_new_large_objects_); allocator_.Finalize(); diff --git a/deps/v8/src/heap/scavenger.h b/deps/v8/src/heap/scavenger.h index 38979bcf1d7454..6476d1c9270131 100644 --- a/deps/v8/src/heap/scavenger.h +++ b/deps/v8/src/heap/scavenger.h @@ -12,6 +12,7 @@ #include "src/heap/memory-chunk.h" #include "src/heap/objects-visiting.h" #include "src/heap/parallel-work-item.h" +#include "src/heap/pretenuring-handler.h" #include "src/heap/slot-set.h" namespace v8 { @@ -116,7 +117,6 @@ class Scavenger { // Number of objects to process before interrupting for potentially waking // up other tasks. static const int kInterruptThreshold = 128; - static const int kInitialLocalPretenuringFeedbackCapacity = 256; inline Heap* heap() { return heap_; } @@ -199,7 +199,8 @@ class Scavenger { PromotionList::Local promotion_list_local_; CopiedList::Local copied_list_local_; EphemeronTableList::Local ephemeron_table_list_local_; - Heap::PretenuringFeedbackMap local_pretenuring_feedback_; + PretenturingHandler* const pretenuring_handler_; + PretenturingHandler::PretenuringFeedbackMap local_pretenuring_feedback_; size_t copied_size_; size_t promoted_size_; EvacuationAllocator allocator_; @@ -212,6 +213,7 @@ class Scavenger { const bool is_compacting_; const bool is_compacting_including_map_space_; const bool shared_string_table_; + const bool mark_shared_heap_; friend class IterateAndScavengePromotedObjectsVisitor; friend class RootScavengeVisitor; diff --git a/deps/v8/src/heap/setup-heap-internal.cc b/deps/v8/src/heap/setup-heap-internal.cc index c1b2fab13d11a0..601d1b832283f4 100644 --- a/deps/v8/src/heap/setup-heap-internal.cc +++ b/deps/v8/src/heap/setup-heap-internal.cc @@ -75,9 +75,11 @@ bool SetupIsolateDelegate::SetupHeapInternal(Heap* heap) { bool Heap::CreateHeapObjects() { // Create initial maps. if (!CreateInitialMaps()) return false; - if (v8_flags.minor_mc && new_space()) { - paged_new_space()->paged_space()->free_list()->RepairLists(this); - } + + // Ensure that all young generation pages are iterable. It must be after heap + // setup, so that the maps have been created. + if (new_space()) new_space()->MakeIterable(); + CreateApiObjects(); // Create initial objects @@ -876,9 +878,11 @@ void Heap::CreateInitialObjects() { set_feedback_vectors_for_profiling_tools(roots.undefined_value()); set_pending_optimize_for_test_bytecode(roots.undefined_value()); set_shared_wasm_memories(roots.empty_weak_array_list()); + set_locals_block_list_cache(roots.undefined_value()); #ifdef V8_ENABLE_WEBASSEMBLY set_active_continuation(roots.undefined_value()); set_active_suspender(roots.undefined_value()); + set_js_to_wasm_wrappers(roots.empty_weak_array_list()); set_wasm_canonical_rtts(roots.empty_weak_array_list()); #endif // V8_ENABLE_WEBASSEMBLY @@ -1025,8 +1029,8 @@ void Heap::CreateInitialObjects() { set_async_generator_await_reject_shared_fun(*info); info = CreateSharedFunctionInfo( - isolate(), Builtin::kAsyncGeneratorYieldResolveClosure, 1); - set_async_generator_yield_resolve_shared_fun(*info); + isolate(), Builtin::kAsyncGeneratorYieldWithAwaitResolveClosure, 1); + set_async_generator_yield_with_await_resolve_shared_fun(*info); info = CreateSharedFunctionInfo( isolate(), Builtin::kAsyncGeneratorReturnResolveClosure, 1); diff --git a/deps/v8/src/heap/slot-set.h b/deps/v8/src/heap/slot-set.h index a67f3e94c5d969..a3a40885f804f7 100644 --- a/deps/v8/src/heap/slot-set.h +++ b/deps/v8/src/heap/slot-set.h @@ -10,9 +10,8 @@ #include <stack> #include <vector> -#include "src/base/atomic-utils.h" #include "src/base/bit-field.h" -#include "src/base/bits.h" +#include "src/heap/base/basic-slot-set.h" #include "src/objects/compressed-slots.h" #include "src/objects/slots.h" #include "src/utils/allocation.h" @@ -22,7 +21,9 @@ namespace v8 { namespace internal { -enum SlotCallbackResult { KEEP_SLOT, REMOVE_SLOT }; +using ::heap::base::KEEP_SLOT; +using ::heap::base::REMOVE_SLOT; +using ::heap::base::SlotCallbackResult; // Possibly empty buckets (buckets that do not contain any slots) are discovered // by the scavenger. Buckets might become non-empty when promoting objects later @@ -126,256 +127,46 @@ class PossiblyEmptyBuckets { static_assert(std::is_standard_layout<PossiblyEmptyBuckets>::value); static_assert(sizeof(PossiblyEmptyBuckets) == kSystemPointerSize); -// Data structure for maintaining a set of slots in a standard (non-large) -// page. -// The data structure assumes that the slots are pointer size aligned and -// splits the valid slot offset range into buckets. -// Each bucket is a bitmap with a bit corresponding to a single slot offset. -class SlotSet { - public: - enum EmptyBucketMode { - FREE_EMPTY_BUCKETS, // An empty bucket will be deallocated immediately. - KEEP_EMPTY_BUCKETS // An empty bucket will be kept. - }; +class SlotSet final : public ::heap::base::BasicSlotSet<kTaggedSize> { + using BasicSlotSet = ::heap::base::BasicSlotSet<kTaggedSize>; - SlotSet() = delete; + public: + static const int kBucketsRegularPage = + (1 << kPageSizeBits) / kTaggedSize / kCellsPerBucket / kBitsPerCell; static SlotSet* Allocate(size_t buckets) { - // SlotSet* slot_set --+ - // | - // v - // +-----------------+-------------------------+ - // | initial buckets | buckets array | - // +-----------------+-------------------------+ - // pointer-sized pointer-sized * buckets - // - // - // The SlotSet pointer points to the beginning of the buckets array for - // faster access in the write barrier. The number of buckets is needed for - // calculating the size of this data structure. - size_t buckets_size = buckets * sizeof(Bucket*); - size_t size = kInitialBucketsSize + buckets_size; - void* allocation = AlignedAllocWithRetry(size, kSystemPointerSize); - SlotSet* slot_set = reinterpret_cast<SlotSet*>( - reinterpret_cast<uint8_t*>(allocation) + kInitialBucketsSize); - DCHECK( - IsAligned(reinterpret_cast<uintptr_t>(slot_set), kSystemPointerSize)); -#ifdef DEBUG - *slot_set->initial_buckets() = buckets; -#endif - for (size_t i = 0; i < buckets; i++) { - *slot_set->bucket(i) = nullptr; - } - return slot_set; - } - - static void Delete(SlotSet* slot_set, size_t buckets) { - if (slot_set == nullptr) return; - - for (size_t i = 0; i < buckets; i++) { - slot_set->ReleaseBucket(i); - } - -#ifdef DEBUG - size_t initial_buckets = *slot_set->initial_buckets(); - - for (size_t i = buckets; i < initial_buckets; i++) { - DCHECK_NULL(*slot_set->bucket(i)); - } -#endif - - AlignedFree(reinterpret_cast<uint8_t*>(slot_set) - kInitialBucketsSize); - } - - static size_t BucketsForSize(size_t size) { - return (size + (kTaggedSize * kBitsPerBucket) - 1) >> - (kTaggedSizeLog2 + kBitsPerBucketLog2); - } - - // Converts the slot offset into bucket index. - static size_t BucketForSlot(size_t slot_offset) { - DCHECK(IsAligned(slot_offset, kTaggedSize)); - return slot_offset >> (kTaggedSizeLog2 + kBitsPerBucketLog2); - } - - // The slot offset specifies a slot at address page_start_ + slot_offset. - // AccessMode defines whether there can be concurrent access on the buckets - // or not. - template <AccessMode access_mode> - void Insert(size_t slot_offset) { - size_t bucket_index; - int cell_index, bit_index; - SlotToIndices(slot_offset, &bucket_index, &cell_index, &bit_index); - Bucket* bucket = LoadBucket<access_mode>(bucket_index); - if (bucket == nullptr) { - bucket = new Bucket; - if (!SwapInNewBucket<access_mode>(bucket_index, bucket)) { - delete bucket; - bucket = LoadBucket<access_mode>(bucket_index); - } - } - // Check that monotonicity is preserved, i.e., once a bucket is set we do - // not free it concurrently. - DCHECK(bucket != nullptr); - DCHECK_EQ(bucket->cells(), LoadBucket<access_mode>(bucket_index)->cells()); - uint32_t mask = 1u << bit_index; - if ((bucket->LoadCell<access_mode>(cell_index) & mask) == 0) { - bucket->SetCellBits<access_mode>(cell_index, mask); - } - } - - // The slot offset specifies a slot at address page_start_ + slot_offset. - // Returns true if the set contains the slot. - bool Contains(size_t slot_offset) { - size_t bucket_index; - int cell_index, bit_index; - SlotToIndices(slot_offset, &bucket_index, &cell_index, &bit_index); - Bucket* bucket = LoadBucket(bucket_index); - if (bucket == nullptr) return false; - return (bucket->LoadCell(cell_index) & (1u << bit_index)) != 0; - } - - // The slot offset specifies a slot at address page_start_ + slot_offset. - void Remove(size_t slot_offset) { - size_t bucket_index; - int cell_index, bit_index; - SlotToIndices(slot_offset, &bucket_index, &cell_index, &bit_index); - Bucket* bucket = LoadBucket(bucket_index); - if (bucket != nullptr) { - uint32_t cell = bucket->LoadCell(cell_index); - uint32_t bit_mask = 1u << bit_index; - if (cell & bit_mask) { - bucket->ClearCellBits(cell_index, bit_mask); - } - } - } - - // The slot offsets specify a range of slots at addresses: - // [page_start_ + start_offset ... page_start_ + end_offset). - void RemoveRange(size_t start_offset, size_t end_offset, size_t buckets, - EmptyBucketMode mode) { - CHECK_LE(end_offset, buckets * kBitsPerBucket * kTaggedSize); - DCHECK_LE(start_offset, end_offset); - size_t start_bucket; - int start_cell, start_bit; - SlotToIndices(start_offset, &start_bucket, &start_cell, &start_bit); - size_t end_bucket; - int end_cell, end_bit; - SlotToIndices(end_offset, &end_bucket, &end_cell, &end_bit); - uint32_t start_mask = (1u << start_bit) - 1; - uint32_t end_mask = ~((1u << end_bit) - 1); - Bucket* bucket; - if (start_bucket == end_bucket && start_cell == end_cell) { - bucket = LoadBucket(start_bucket); - if (bucket != nullptr) { - bucket->ClearCellBits(start_cell, ~(start_mask | end_mask)); - } - return; - } - size_t current_bucket = start_bucket; - int current_cell = start_cell; - bucket = LoadBucket(current_bucket); - if (bucket != nullptr) { - bucket->ClearCellBits(current_cell, ~start_mask); - } - current_cell++; - if (current_bucket < end_bucket) { - if (bucket != nullptr) { - ClearBucket(bucket, current_cell, kCellsPerBucket); - } - // The rest of the current bucket is cleared. - // Move on to the next bucket. - current_bucket++; - current_cell = 0; - } - DCHECK(current_bucket == end_bucket || - (current_bucket < end_bucket && current_cell == 0)); - while (current_bucket < end_bucket) { - if (mode == FREE_EMPTY_BUCKETS) { - ReleaseBucket(current_bucket); - } else { - DCHECK(mode == KEEP_EMPTY_BUCKETS); - bucket = LoadBucket(current_bucket); - if (bucket != nullptr) { - ClearBucket(bucket, 0, kCellsPerBucket); - } - } - current_bucket++; - } - // All buckets between start_bucket and end_bucket are cleared. - DCHECK(current_bucket == end_bucket); - if (current_bucket == buckets) return; - bucket = LoadBucket(current_bucket); - DCHECK(current_cell <= end_cell); - if (bucket == nullptr) return; - while (current_cell < end_cell) { - bucket->StoreCell(current_cell, 0); - current_cell++; - } - // All cells between start_cell and end_cell are cleared. - DCHECK(current_bucket == end_bucket && current_cell == end_cell); - bucket->ClearCellBits(end_cell, ~end_mask); - } - - // The slot offset specifies a slot at address page_start_ + slot_offset. - bool Lookup(size_t slot_offset) { - size_t bucket_index; - int cell_index, bit_index; - SlotToIndices(slot_offset, &bucket_index, &cell_index, &bit_index); - Bucket* bucket = LoadBucket(bucket_index); - if (bucket == nullptr) return false; - return (bucket->LoadCell(cell_index) & (1u << bit_index)) != 0; + return static_cast<SlotSet*>(BasicSlotSet::Allocate(buckets)); } - // Iterate over all slots in the set and for each slot invoke the callback. - // If the callback returns REMOVE_SLOT then the slot is removed from the set. - // Returns the new number of slots. - // - // Iteration can be performed concurrently with other operations that use - // atomic access mode such as insertion and removal. However there is no - // guarantee about ordering and linearizability. - // - // Sample usage: - // Iterate([](MaybeObjectSlot slot) { - // if (good(slot)) return KEEP_SLOT; - // else return REMOVE_SLOT; - // }); - // - // Releases memory for empty buckets with FREE_EMPTY_BUCKETS. + // Similar to BasicSlotSet::Iterate() but Callback takes the parameter of type + // MaybeObjectSlot. template <typename Callback> size_t Iterate(Address chunk_start, size_t start_bucket, size_t end_bucket, Callback callback, EmptyBucketMode mode) { - return Iterate(chunk_start, start_bucket, end_bucket, callback, - [this, mode](size_t bucket_index) { - if (mode == EmptyBucketMode::FREE_EMPTY_BUCKETS) { - ReleaseBucket(bucket_index); - } - }); + return BasicSlotSet::Iterate( + chunk_start, start_bucket, end_bucket, + [&callback](Address slot) { return callback(MaybeObjectSlot(slot)); }, + [this, mode](size_t bucket_index) { + if (mode == EmptyBucketMode::FREE_EMPTY_BUCKETS) { + ReleaseBucket(bucket_index); + } + }); } - // Similar to Iterate but marks potentially empty buckets internally. Stores - // true in empty_bucket_found in case a potentially empty bucket was found. - // Assumes that the possibly empty-array was already cleared by - // CheckPossiblyEmptyBuckets. + // Similar to SlotSet::Iterate() but marks potentially empty buckets + // internally. Stores true in empty_bucket_found in case a potentially empty + // bucket was found. Assumes that the possibly empty-array was already cleared + // by CheckPossiblyEmptyBuckets. template <typename Callback> size_t IterateAndTrackEmptyBuckets( Address chunk_start, size_t start_bucket, size_t end_bucket, Callback callback, PossiblyEmptyBuckets* possibly_empty_buckets) { - return Iterate(chunk_start, start_bucket, end_bucket, callback, - [possibly_empty_buckets, end_bucket](size_t bucket_index) { - possibly_empty_buckets->Insert(bucket_index, end_bucket); - }); - } - - bool FreeEmptyBuckets(size_t buckets) { - bool empty = true; - for (size_t bucket_index = 0; bucket_index < buckets; bucket_index++) { - if (!FreeBucketIfEmpty(bucket_index)) { - empty = false; - } - } - - return empty; + return BasicSlotSet::Iterate( + chunk_start, start_bucket, end_bucket, + [&callback](Address slot) { return callback(MaybeObjectSlot(slot)); }, + [possibly_empty_buckets, end_bucket](size_t bucket_index) { + possibly_empty_buckets->Insert(bucket_index, end_bucket); + }); } // Check whether possibly empty buckets are really empty. Empty buckets are @@ -406,198 +197,6 @@ class SlotSet { return empty; } - - static const int kCellsPerBucket = 32; - static const int kCellsPerBucketLog2 = 5; - static const int kCellSizeBytesLog2 = 2; - static const int kCellSizeBytes = 1 << kCellSizeBytesLog2; - static const int kBitsPerCell = 32; - static const int kBitsPerCellLog2 = 5; - static const int kBitsPerBucket = kCellsPerBucket * kBitsPerCell; - static const int kBitsPerBucketLog2 = kCellsPerBucketLog2 + kBitsPerCellLog2; - static const int kBucketsRegularPage = - (1 << kPageSizeBits) / kTaggedSize / kCellsPerBucket / kBitsPerCell; - - class Bucket : public Malloced { - uint32_t cells_[kCellsPerBucket]; - - public: - Bucket() { - for (int i = 0; i < kCellsPerBucket; i++) { - cells_[i] = 0; - } - } - - uint32_t* cells() { return cells_; } - uint32_t* cell(int cell_index) { return cells() + cell_index; } - - template <AccessMode access_mode = AccessMode::ATOMIC> - uint32_t LoadCell(int cell_index) { - DCHECK_LT(cell_index, kCellsPerBucket); - if (access_mode == AccessMode::ATOMIC) - return base::AsAtomic32::Acquire_Load(cells() + cell_index); - return *(cells() + cell_index); - } - - template <AccessMode access_mode = AccessMode::ATOMIC> - void SetCellBits(int cell_index, uint32_t mask) { - if (access_mode == AccessMode::ATOMIC) { - base::AsAtomic32::SetBits(cell(cell_index), mask, mask); - } else { - uint32_t* c = cell(cell_index); - *c = (*c & ~mask) | mask; - } - } - - void ClearCellBits(int cell_index, uint32_t mask) { - base::AsAtomic32::SetBits(cell(cell_index), 0u, mask); - } - - void StoreCell(int cell_index, uint32_t value) { - base::AsAtomic32::Release_Store(cell(cell_index), value); - } - - bool IsEmpty() { - for (int i = 0; i < kCellsPerBucket; i++) { - if (cells_[i] != 0) { - return false; - } - } - return true; - } - }; - - private: - template <typename Callback, typename EmptyBucketCallback> - size_t Iterate(Address chunk_start, size_t start_bucket, size_t end_bucket, - Callback callback, EmptyBucketCallback empty_bucket_callback) { - size_t new_count = 0; - for (size_t bucket_index = start_bucket; bucket_index < end_bucket; - bucket_index++) { - Bucket* bucket = LoadBucket(bucket_index); - if (bucket != nullptr) { - size_t in_bucket_count = 0; - size_t cell_offset = bucket_index << kBitsPerBucketLog2; - for (int i = 0; i < kCellsPerBucket; i++, cell_offset += kBitsPerCell) { - uint32_t cell = bucket->LoadCell(i); - if (cell) { - uint32_t old_cell = cell; - uint32_t mask = 0; - while (cell) { - int bit_offset = base::bits::CountTrailingZeros(cell); - uint32_t bit_mask = 1u << bit_offset; - Address slot = (cell_offset + bit_offset) << kTaggedSizeLog2; - if (callback(MaybeObjectSlot(chunk_start + slot)) == KEEP_SLOT) { - ++in_bucket_count; - } else { - mask |= bit_mask; - } - cell ^= bit_mask; - } - uint32_t new_cell = old_cell & ~mask; - if (old_cell != new_cell) { - bucket->ClearCellBits(i, mask); - } - } - } - if (in_bucket_count == 0) { - empty_bucket_callback(bucket_index); - } - new_count += in_bucket_count; - } - } - return new_count; - } - - bool FreeBucketIfEmpty(size_t bucket_index) { - Bucket* bucket = LoadBucket<AccessMode::NON_ATOMIC>(bucket_index); - if (bucket != nullptr) { - if (bucket->IsEmpty()) { - ReleaseBucket<AccessMode::NON_ATOMIC>(bucket_index); - } else { - return false; - } - } - - return true; - } - - void ClearBucket(Bucket* bucket, int start_cell, int end_cell) { - DCHECK_GE(start_cell, 0); - DCHECK_LE(end_cell, kCellsPerBucket); - int current_cell = start_cell; - while (current_cell < kCellsPerBucket) { - bucket->StoreCell(current_cell, 0); - current_cell++; - } - } - - template <AccessMode access_mode = AccessMode::ATOMIC> - void ReleaseBucket(size_t bucket_index) { - Bucket* bucket = LoadBucket<access_mode>(bucket_index); - StoreBucket<access_mode>(bucket_index, nullptr); - delete bucket; - } - - template <AccessMode access_mode = AccessMode::ATOMIC> - Bucket* LoadBucket(Bucket** bucket) { - if (access_mode == AccessMode::ATOMIC) - return base::AsAtomicPointer::Acquire_Load(bucket); - return *bucket; - } - - template <AccessMode access_mode = AccessMode::ATOMIC> - Bucket* LoadBucket(size_t bucket_index) { - return LoadBucket(bucket(bucket_index)); - } - - template <AccessMode access_mode = AccessMode::ATOMIC> - void StoreBucket(Bucket** bucket, Bucket* value) { - if (access_mode == AccessMode::ATOMIC) { - base::AsAtomicPointer::Release_Store(bucket, value); - } else { - *bucket = value; - } - } - - template <AccessMode access_mode = AccessMode::ATOMIC> - void StoreBucket(size_t bucket_index, Bucket* value) { - StoreBucket(bucket(bucket_index), value); - } - - template <AccessMode access_mode = AccessMode::ATOMIC> - bool SwapInNewBucket(size_t bucket_index, Bucket* value) { - Bucket** b = bucket(bucket_index); - if (access_mode == AccessMode::ATOMIC) { - return base::AsAtomicPointer::Release_CompareAndSwap(b, nullptr, value) == - nullptr; - } else { - DCHECK_NULL(*b); - *b = value; - return true; - } - } - - // Converts the slot offset into bucket/cell/bit index. - static void SlotToIndices(size_t slot_offset, size_t* bucket_index, - int* cell_index, int* bit_index) { - DCHECK(IsAligned(slot_offset, kTaggedSize)); - size_t slot = slot_offset >> kTaggedSizeLog2; - *bucket_index = slot >> kBitsPerBucketLog2; - *cell_index = - static_cast<int>((slot >> kBitsPerCellLog2) & (kCellsPerBucket - 1)); - *bit_index = static_cast<int>(slot & (kBitsPerCell - 1)); - } - - Bucket** buckets() { return reinterpret_cast<Bucket**>(this); } - Bucket** bucket(size_t bucket_index) { return buckets() + bucket_index; } - -#ifdef DEBUG - size_t* initial_buckets() { return reinterpret_cast<size_t*>(this) - 1; } - static const int kInitialBucketsSize = sizeof(size_t); -#else - static const int kInitialBucketsSize = 0; -#endif }; static_assert(std::is_standard_layout<SlotSet>::value); @@ -614,11 +213,6 @@ enum class SlotType : uint8_t { // accessing. Used when pointer is stored in the instruction stream. kEmbeddedObjectCompressed, - // Full pointer sized slot storing an object start address. - // RelocInfo::target_object/RelocInfo::set_target_object methods are used for - // accessing. Used when pointer is stored in the instruction stream. - kEmbeddedObjectData, - // Full pointer sized slot storing instruction start of Code object. // RelocInfo::target_address/RelocInfo::set_target_address methods are used // for accessing. Used when pointer is stored in the instruction stream. diff --git a/deps/v8/src/heap/spaces-inl.h b/deps/v8/src/heap/spaces-inl.h index 4397ad5ba229c0..9986f84f1bc887 100644 --- a/deps/v8/src/heap/spaces-inl.h +++ b/deps/v8/src/heap/spaces-inl.h @@ -149,6 +149,7 @@ MemoryChunk* OldGenerationMemoryChunkIterator::next() { AllocationResult LocalAllocationBuffer::AllocateRawAligned( int size_in_bytes, AllocationAlignment alignment) { + size_in_bytes = ALIGN_TO_ALLOCATION_ALIGNMENT(size_in_bytes); Address current_top = allocation_info_.top(); int filler_size = Heap::GetFillToAlign(current_top, alignment); int aligned_size = filler_size + size_in_bytes; @@ -164,6 +165,7 @@ AllocationResult LocalAllocationBuffer::AllocateRawAligned( AllocationResult LocalAllocationBuffer::AllocateRawUnaligned( int size_in_bytes) { + size_in_bytes = ALIGN_TO_ALLOCATION_ALIGNMENT(size_in_bytes); return allocation_info_.CanIncrementTop(size_in_bytes) ? AllocationResult::FromObject(HeapObject::FromAddress( allocation_info_.IncrementTop(size_in_bytes))) @@ -214,6 +216,7 @@ MemoryChunk* MemoryChunkIterator::Next() { AllocationResult SpaceWithLinearArea::AllocateFastUnaligned( int size_in_bytes, AllocationOrigin origin) { + size_in_bytes = ALIGN_TO_ALLOCATION_ALIGNMENT(size_in_bytes); if (!allocation_info_.CanIncrementTop(size_in_bytes)) { return AllocationResult::Failure(); } @@ -253,6 +256,7 @@ AllocationResult SpaceWithLinearArea::AllocateRaw(int size_in_bytes, AllocationAlignment alignment, AllocationOrigin origin) { DCHECK(!v8_flags.enable_third_party_heap); + size_in_bytes = ALIGN_TO_ALLOCATION_ALIGNMENT(size_in_bytes); AllocationResult result; diff --git a/deps/v8/src/heap/spaces.cc b/deps/v8/src/heap/spaces.cc index cb80998276d5a0..a29cb88d5a7f52 100644 --- a/deps/v8/src/heap/spaces.cc +++ b/deps/v8/src/heap/spaces.cc @@ -174,7 +174,7 @@ void Page::CreateBlackArea(Address start, Address end) { DCHECK_EQ(Page::FromAddress(start), this); DCHECK_LT(start, end); DCHECK_EQ(Page::FromAddress(end - 1), this); - MarkingState* marking_state = heap()->incremental_marking()->marking_state(); + MarkingState* marking_state = heap()->marking_state(); marking_state->bitmap(this)->SetRange(AddressToMarkbitIndex(start), AddressToMarkbitIndex(end)); marking_state->IncrementLiveBytes(this, static_cast<intptr_t>(end - start)); @@ -186,8 +186,7 @@ void Page::CreateBlackAreaBackground(Address start, Address end) { DCHECK_EQ(Page::FromAddress(start), this); DCHECK_LT(start, end); DCHECK_EQ(Page::FromAddress(end - 1), this); - AtomicMarkingState* marking_state = - heap()->incremental_marking()->atomic_marking_state(); + AtomicMarkingState* marking_state = heap()->atomic_marking_state(); marking_state->bitmap(this)->SetRange(AddressToMarkbitIndex(start), AddressToMarkbitIndex(end)); heap()->incremental_marking()->IncrementLiveBytesBackground( @@ -200,7 +199,7 @@ void Page::DestroyBlackArea(Address start, Address end) { DCHECK_EQ(Page::FromAddress(start), this); DCHECK_LT(start, end); DCHECK_EQ(Page::FromAddress(end - 1), this); - MarkingState* marking_state = heap()->incremental_marking()->marking_state(); + MarkingState* marking_state = heap()->marking_state(); marking_state->bitmap(this)->ClearRange(AddressToMarkbitIndex(start), AddressToMarkbitIndex(end)); marking_state->IncrementLiveBytes(this, -static_cast<intptr_t>(end - start)); @@ -212,8 +211,7 @@ void Page::DestroyBlackAreaBackground(Address start, Address end) { DCHECK_EQ(Page::FromAddress(start), this); DCHECK_LT(start, end); DCHECK_EQ(Page::FromAddress(end - 1), this); - AtomicMarkingState* marking_state = - heap()->incremental_marking()->atomic_marking_state(); + AtomicMarkingState* marking_state = heap()->atomic_marking_state(); marking_state->bitmap(this)->ClearRange(AddressToMarkbitIndex(start), AddressToMarkbitIndex(end)); heap()->incremental_marking()->IncrementLiveBytesBackground( diff --git a/deps/v8/src/heap/spaces.h b/deps/v8/src/heap/spaces.h index 1d60095de3e292..48b8c9fc41c67f 100644 --- a/deps/v8/src/heap/spaces.h +++ b/deps/v8/src/heap/spaces.h @@ -153,6 +153,8 @@ class V8_EXPORT_PRIVATE Space : public BaseSpace { virtual int RoundSizeDownToObjectAlignment(int size) const { if (id_ == CODE_SPACE) { return RoundDown(size, kCodeAlignment); + } else if (V8_COMPRESS_POINTERS_8GB_BOOL) { + return RoundDown(size, kObjectAlignment8GbHeap); } else { return RoundDown(size, kTaggedSize); } @@ -182,7 +184,9 @@ class V8_EXPORT_PRIVATE Space : public BaseSpace { return memory_chunk_list_.back(); } - heap::List<MemoryChunk>& memory_chunk_list() { return memory_chunk_list_; } + virtual heap::List<MemoryChunk>& memory_chunk_list() { + return memory_chunk_list_; + } virtual Page* InitializePage(MemoryChunk* chunk) { UNREACHABLE(); } @@ -299,7 +303,7 @@ class Page : public MemoryChunk { return categories_[type]; } - size_t ShrinkToHighWaterMark(); + V8_EXPORT_PRIVATE size_t ShrinkToHighWaterMark(); V8_EXPORT_PRIVATE void CreateBlackArea(Address start, Address end); V8_EXPORT_PRIVATE void CreateBlackAreaBackground(Address start, Address end); @@ -346,7 +350,11 @@ static_assert(sizeof(Page) <= MemoryChunk::kHeaderSize); class V8_EXPORT_PRIVATE ObjectIterator : public Malloced { public: - virtual ~ObjectIterator() = default; + // Note: The destructor can not be marked as `= default` as this causes + // the compiler on C++20 to define it as `constexpr` resulting in the + // compiler producing warnings about undefined inlines for Next() + // on classes inheriting from it. + virtual ~ObjectIterator() {} virtual HeapObject Next() = 0; }; diff --git a/deps/v8/src/heap/stress-scavenge-observer.cc b/deps/v8/src/heap/stress-scavenge-observer.cc index 9515f6bb8037ee..4c72416e8c864c 100644 --- a/deps/v8/src/heap/stress-scavenge-observer.cc +++ b/deps/v8/src/heap/stress-scavenge-observer.cc @@ -62,8 +62,10 @@ bool StressScavengeObserver::HasRequestedGC() const { } void StressScavengeObserver::RequestedGCDone() { + size_t new_space_size = heap_->new_space()->Size(); double current_percent = - heap_->new_space()->Size() * 100.0 / heap_->new_space()->Capacity(); + new_space_size ? new_space_size * 100.0 / heap_->new_space()->Capacity() + : 0; limit_percentage_ = NextLimit(static_cast<int>(current_percent)); if (v8_flags.trace_stress_scavenge) { diff --git a/deps/v8/src/heap/sweeper.cc b/deps/v8/src/heap/sweeper.cc index a910dddcbe5e37..24ad9beeea40b0 100644 --- a/deps/v8/src/heap/sweeper.cc +++ b/deps/v8/src/heap/sweeper.cc @@ -10,6 +10,7 @@ #include "src/base/logging.h" #include "src/common/globals.h" #include "src/execution/vm-state-inl.h" +#include "src/flags/flags.h" #include "src/heap/base/active-system-pages.h" #include "src/heap/code-object-registry.h" #include "src/heap/free-list-inl.h" @@ -19,21 +20,20 @@ #include "src/heap/mark-compact-inl.h" #include "src/heap/new-spaces.h" #include "src/heap/paged-spaces.h" +#include "src/heap/pretenuring-handler-inl.h" +#include "src/heap/pretenuring-handler.h" #include "src/heap/remembered-set.h" #include "src/objects/objects-inl.h" namespace v8 { namespace internal { -namespace { -static const int kInitialLocalPretenuringFeedbackCapacity = 256; -} // namespace - class Sweeper::ConcurrentSweeper final { public: explicit ConcurrentSweeper(Sweeper* sweeper) : sweeper_(sweeper), - local_pretenuring_feedback_(kInitialLocalPretenuringFeedbackCapacity) {} + local_pretenuring_feedback_( + PretenturingHandler::kInitialFeedbackCapacity) {} bool ConcurrentSweepSpace(AllocationSpace identity, JobDelegate* delegate) { while (!delegate->ShouldYield()) { @@ -45,13 +45,13 @@ class Sweeper::ConcurrentSweeper final { return false; } - Heap::PretenuringFeedbackMap* local_pretenuring_feedback() { + PretenturingHandler::PretenuringFeedbackMap* local_pretenuring_feedback() { return &local_pretenuring_feedback_; } private: Sweeper* const sweeper_; - Heap::PretenuringFeedbackMap local_pretenuring_feedback_; + PretenturingHandler::PretenuringFeedbackMap local_pretenuring_feedback_; }; class Sweeper::SweeperJob final : public JobTask { @@ -69,12 +69,20 @@ class Sweeper::SweeperJob final : public JobTask { void Run(JobDelegate* delegate) final { RwxMemoryWriteScope::SetDefaultPermissionsForNewThread(); + DCHECK(sweeper_->current_collector_.has_value()); if (delegate->IsJoiningThread()) { - TRACE_GC(tracer_, GCTracer::Scope::MC_SWEEP); + TRACE_GC(tracer_, sweeper_->current_collector_ == + GarbageCollector::MINOR_MARK_COMPACTOR + ? GCTracer::Scope::MINOR_MC_SWEEP + : GCTracer::Scope::MC_SWEEP); RunImpl(delegate); } else { - TRACE_GC_EPOCH(tracer_, GCTracer::Scope::MC_BACKGROUND_SWEEPING, - ThreadKind::kBackground); + TRACE_GC_EPOCH( + tracer_, + sweeper_->current_collector_ == GarbageCollector::MINOR_MARK_COMPACTOR + ? GCTracer::Scope::MINOR_MC_BACKGROUND_SWEEPING + : GCTracer::Scope::MC_BACKGROUND_SWEEPING, + ThreadKind::kBackground); RunImpl(delegate); } } @@ -106,12 +114,14 @@ class Sweeper::SweeperJob final : public JobTask { GCTracer* const tracer_; }; -Sweeper::Sweeper(Heap* heap, NonAtomicMarkingState* marking_state) +Sweeper::Sweeper(Heap* heap) : heap_(heap), - marking_state_(marking_state), + marking_state_(heap_->non_atomic_marking_state()), sweeping_in_progress_(false), should_reduce_memory_(false), - local_pretenuring_feedback_(kInitialLocalPretenuringFeedbackCapacity) {} + pretenuring_handler_(heap_->pretenuring_handler()), + local_pretenuring_feedback_( + PretenturingHandler::kInitialFeedbackCapacity) {} Sweeper::~Sweeper() { DCHECK(concurrent_sweepers_.empty()); @@ -160,9 +170,10 @@ void Sweeper::TearDown() { if (job_handle_ && job_handle_->IsValid()) job_handle_->Cancel(); } -void Sweeper::StartSweeping() { +void Sweeper::StartSweeping(GarbageCollector collector) { DCHECK(local_pretenuring_feedback_.empty()); sweeping_in_progress_ = true; + current_collector_ = collector; should_reduce_memory_ = heap_->ShouldReduceMemory(); ForAllSweepingSpaces([this](AllocationSpace space) { // Sorting is done in order to make compaction more efficient: by sweeping @@ -188,6 +199,7 @@ int Sweeper::NumberOfConcurrentSweepers() const { } void Sweeper::StartSweeperTasks() { + DCHECK(current_collector_.has_value()); DCHECK(!job_handle_ || !job_handle_->IsValid()); if (v8_flags.concurrent_sweeping && sweeping_in_progress_ && !heap_->delay_sweeper_tasks_for_testing_) { @@ -230,14 +242,16 @@ void Sweeper::EnsureCompleted(SweepingMode sweeping_mode) { CHECK(sweeping_list_[GetSweepSpaceIndex(space)].empty()); }); - heap_->MergeAllocationSitePretenuringFeedback(local_pretenuring_feedback_); + pretenuring_handler_->MergeAllocationSitePretenuringFeedback( + local_pretenuring_feedback_); for (ConcurrentSweeper& concurrent_sweeper : concurrent_sweepers_) { - heap_->MergeAllocationSitePretenuringFeedback( + pretenuring_handler_->MergeAllocationSitePretenuringFeedback( *concurrent_sweeper.local_pretenuring_feedback()); } local_pretenuring_feedback_.clear(); concurrent_sweepers_.clear(); + current_collector_.reset(); sweeping_in_progress_ = false; } @@ -246,14 +260,6 @@ void Sweeper::DrainSweepingWorklistForSpace(AllocationSpace space) { ParallelSweepSpace(space, SweepingMode::kLazyOrConcurrent, 0); } -void Sweeper::SupportConcurrentSweeping() { - ForAllSweepingSpaces([this](AllocationSpace space) { - const int kMaxPagesToSweepPerSpace = 1; - ParallelSweepSpace(space, SweepingMode::kLazyOrConcurrent, 0, - kMaxPagesToSweepPerSpace); - }); -} - bool Sweeper::AreSweeperTasksRunning() { return job_handle_ && job_handle_->IsValid() && job_handle_->IsActive(); } @@ -268,8 +274,8 @@ V8_INLINE size_t Sweeper::FreeAndProcessFreedMemory( ZapCode(free_start, size); } page->heap()->CreateFillerObjectAtSweeper(free_start, static_cast<int>(size)); - freed_bytes = - reinterpret_cast<PagedSpace*>(space)->UnaccountedFree(free_start, size); + freed_bytes = reinterpret_cast<PagedSpaceBase*>(space)->UnaccountedFree( + free_start, size); if (should_reduce_memory_) page->DiscardUnusedMemory(free_start, size); return freed_bytes; @@ -349,11 +355,11 @@ void Sweeper::ClearMarkBitsAndHandleLivenessStatistics(Page* page, int Sweeper::RawSweep( Page* p, FreeSpaceTreatmentMode free_space_treatment_mode, SweepingMode sweeping_mode, const base::MutexGuard& page_guard, - Heap::PretenuringFeedbackMap* local_pretenuring_feedback) { + PretenturingHandler::PretenuringFeedbackMap* local_pretenuring_feedback) { Space* space = p->owner(); DCHECK_NOT_NULL(space); DCHECK(space->identity() == OLD_SPACE || space->identity() == CODE_SPACE || - space->identity() == MAP_SPACE || + space->identity() == MAP_SPACE || space->identity() == SHARED_SPACE || (space->identity() == NEW_SPACE && v8_flags.minor_mc)); DCHECK_IMPLIES(space->identity() == NEW_SPACE, sweeping_mode == SweepingMode::kEagerDuringGC); @@ -434,12 +440,13 @@ int Sweeper::RawSweep( } Map map = object.map(cage_base, kAcquireLoad); DCHECK(MarkCompactCollector::IsMapOrForwarded(map)); - int size = object.SizeFromMap(map); + int size = ALIGN_TO_ALLOCATION_ALIGNMENT(object.SizeFromMap(map)); live_bytes += size; free_start = free_end + size; if (p->InYoungGeneration()) { - heap_->UpdateAllocationSite(map, object, local_pretenuring_feedback); + pretenuring_handler_->UpdateAllocationSite(map, object, + local_pretenuring_feedback); } if (active_system_pages_after_sweeping) { @@ -519,7 +526,7 @@ int Sweeper::ParallelSweepSpace(AllocationSpace identity, int Sweeper::ParallelSweepPage( Page* page, AllocationSpace identity, - Heap::PretenuringFeedbackMap* local_pretenuring_feedback, + PretenturingHandler::PretenuringFeedbackMap* local_pretenuring_feedback, SweepingMode sweeping_mode) { DCHECK(IsValidSweepingSpace(identity)); @@ -572,7 +579,7 @@ void Sweeper::EnsurePageIsSwept(Page* page) { } } } else { - DCHECK(page->InNewSpace()); + DCHECK(page->InNewSpace() && !v8_flags.minor_mc); } CHECK(page->SweepingDone()); @@ -592,6 +599,21 @@ bool Sweeper::TryRemoveSweepingPageSafe(AllocationSpace space, Page* page) { void Sweeper::AddPage(AllocationSpace space, Page* page, Sweeper::AddPageMode mode) { + DCHECK_NE(NEW_SPACE, space); + AddPageImpl(space, page, mode); +} + +void Sweeper::AddNewSpacePage(Page* page) { + DCHECK_EQ(NEW_SPACE, page->owner_identity()); + size_t live_bytes = marking_state_->live_bytes(page); + heap_->IncrementNewSpaceSurvivingObjectSize(live_bytes); + heap_->IncrementYoungSurvivorsCounter(live_bytes); + page->ClearWasUsedForAllocation(); + AddPageImpl(NEW_SPACE, page, AddPageMode::REGULAR); +} + +void Sweeper::AddPageImpl(AllocationSpace space, Page* page, + Sweeper::AddPageMode mode) { base::MutexGuard guard(&mutex_); DCHECK(IsValidSweepingSpace(space)); DCHECK(!v8_flags.concurrent_sweeping || !job_handle_ || diff --git a/deps/v8/src/heap/sweeper.h b/deps/v8/src/heap/sweeper.h index 6b747547dbb002..aa40f0b5460b97 100644 --- a/deps/v8/src/heap/sweeper.h +++ b/deps/v8/src/heap/sweeper.h @@ -8,11 +8,12 @@ #include <map> #include <vector> +#include "src/base/optional.h" #include "src/base/platform/condition-variable.h" #include "src/base/platform/semaphore.h" #include "src/common/globals.h" #include "src/flags/flags.h" -#include "src/heap/heap.h" +#include "src/heap/pretenuring-handler.h" #include "src/heap/slot-set.h" #include "src/tasks/cancelable-task.h" @@ -76,7 +77,7 @@ class Sweeper { enum AddPageMode { REGULAR, READD_TEMPORARY_REMOVED_PAGE }; enum class SweepingMode { kEagerDuringGC, kLazyOrConcurrent }; - Sweeper(Heap* heap, NonAtomicMarkingState* marking_state); + Sweeper(Heap* heap); ~Sweeper(); bool sweeping_in_progress() const { return sweeping_in_progress_; } @@ -84,38 +85,39 @@ class Sweeper { void TearDown(); void AddPage(AllocationSpace space, Page* page, AddPageMode mode); + void AddNewSpacePage(Page* page); int ParallelSweepSpace(AllocationSpace identity, SweepingMode sweeping_mode, int required_freed_bytes, int max_pages = 0); int ParallelSweepPage( Page* page, AllocationSpace identity, - Heap::PretenuringFeedbackMap* local_pretenuring_feedback, + PretenturingHandler::PretenuringFeedbackMap* local_pretenuring_feedback, SweepingMode sweeping_mode); void EnsurePageIsSwept(Page* page); - int RawSweep(Page* p, FreeSpaceTreatmentMode free_space_treatment_mode, - SweepingMode sweeping_mode, const base::MutexGuard& page_guard, - Heap::PretenuringFeedbackMap* local_pretenuring_feedback); + int RawSweep( + Page* p, FreeSpaceTreatmentMode free_space_treatment_mode, + SweepingMode sweeping_mode, const base::MutexGuard& page_guard, + PretenturingHandler::PretenuringFeedbackMap* local_pretenuring_feedback); // After calling this function sweeping is considered to be in progress // and the main thread can sweep lazily, but the background sweeper tasks // are not running yet. - void StartSweeping(); + void StartSweeping(GarbageCollector collector); V8_EXPORT_PRIVATE void StartSweeperTasks(); void EnsureCompleted( SweepingMode sweeping_mode = SweepingMode::kLazyOrConcurrent); void DrainSweepingWorklistForSpace(AllocationSpace space); bool AreSweeperTasksRunning(); - // Support concurrent sweepers from main thread - void SupportConcurrentSweeping(); - Page* GetSweptPageSafe(PagedSpaceBase* space); + private: NonAtomicMarkingState* marking_state() const { return marking_state_; } - private: + void AddPageImpl(AllocationSpace space, Page* page, AddPageMode mode); + class ConcurrentSweeper; class SweeperJob; @@ -131,6 +133,7 @@ class Sweeper { callback(OLD_SPACE); callback(CODE_SPACE); callback(MAP_SPACE); + callback(SHARED_SPACE); } // Helper function for RawSweep. Depending on the FreeListRebuildingMode and @@ -187,7 +190,7 @@ class Sweeper { int NumberOfConcurrentSweepers() const; Heap* const heap_; - NonAtomicMarkingState* marking_state_; + NonAtomicMarkingState* const marking_state_; std::unique_ptr<JobHandle> job_handle_; base::Mutex mutex_; base::ConditionVariable cv_page_swept_; @@ -198,7 +201,9 @@ class Sweeper { // path checks this flag to see whether it could support concurrent sweeping. std::atomic<bool> sweeping_in_progress_; bool should_reduce_memory_; - Heap::PretenuringFeedbackMap local_pretenuring_feedback_; + PretenturingHandler* const pretenuring_handler_; + PretenturingHandler::PretenuringFeedbackMap local_pretenuring_feedback_; + base::Optional<GarbageCollector> current_collector_; }; } // namespace internal diff --git a/deps/v8/src/ic/accessor-assembler.cc b/deps/v8/src/ic/accessor-assembler.cc index f98413fc553e73..33bcb390610b5a 100644 --- a/deps/v8/src/ic/accessor-assembler.cc +++ b/deps/v8/src/ic/accessor-assembler.cc @@ -1280,10 +1280,14 @@ void AccessorAssembler::HandleStoreICHandlerCase( TVARIABLE(IntPtrT, var_name_index); Label dictionary_found(this, &var_name_index); - NameDictionaryLookup<PropertyDictionary>( - properties, CAST(p->name()), - p->IsAnyDefineOwn() ? &if_slow : &dictionary_found, &var_name_index, - miss); + if (p->IsAnyDefineOwn()) { + NameDictionaryLookup<PropertyDictionary>(properties, CAST(p->name()), + &if_slow, nullptr, miss); + } else { + NameDictionaryLookup<PropertyDictionary>(properties, CAST(p->name()), + &dictionary_found, + &var_name_index, miss); + } // When dealing with class fields defined with DefineKeyedOwnIC or // DefineNamedOwnIC, use the slow path to check the existing property. @@ -2887,7 +2891,7 @@ enum AccessorAssembler::StubCacheTable : int { TNode<IntPtrT> AccessorAssembler::StubCachePrimaryOffset(TNode<Name> name, TNode<Map> map) { // Compute the hash of the name (use entire hash field). - TNode<Uint32T> raw_hash_field = LoadNameRawHashField(name); + TNode<Uint32T> raw_hash_field = LoadNameRawHash(name); CSA_DCHECK(this, Word32Equal(Word32And(raw_hash_field, Int32Constant(Name::kHashNotComputedMask)), @@ -3622,7 +3626,7 @@ void AccessorAssembler::KeyedLoadICGeneric(const LoadICParameters* p) { BIND(&if_notunique); { - if (FLAG_internalize_on_the_fly) { + if (v8_flags.internalize_on_the_fly) { // Ideally we could return undefined directly here if the name is not // found in the string table, i.e. it was never internalized, but that // invariant doesn't hold with named property interceptors (at this diff --git a/deps/v8/src/ic/binary-op-assembler.cc b/deps/v8/src/ic/binary-op-assembler.cc index 51c403ceb49038..403d4b9bbe49ad 100644 --- a/deps/v8/src/ic/binary-op-assembler.cc +++ b/deps/v8/src/ic/binary-op-assembler.cc @@ -17,7 +17,8 @@ TNode<Object> BinaryOpAssembler::Generate_AddWithFeedback( Label do_fadd(this), if_lhsisnotnumber(this, Label::kDeferred), check_rhsisoddball(this, Label::kDeferred), call_with_oddball_feedback(this), call_with_any_feedback(this), - call_add_stub(this), end(this), bigint(this, Label::kDeferred); + call_add_stub(this), end(this), bigint(this, Label::kDeferred), + bigint64(this); TVARIABLE(Float64T, var_fadd_lhs); TVARIABLE(Float64T, var_fadd_rhs); TVARIABLE(Smi, var_type_feedback); @@ -158,7 +159,16 @@ TNode<Object> BinaryOpAssembler::Generate_AddWithFeedback( Goto(&call_with_any_feedback); BIND(&lhs_is_bigint); - Branch(IsBigInt(rhs_heap_object), &bigint, &call_with_any_feedback); + { + GotoIfNot(IsBigInt(rhs_heap_object), &call_with_any_feedback); + if (Is64()) { + GotoIfLargeBigInt(CAST(lhs), &bigint); + GotoIfLargeBigInt(CAST(rhs), &bigint); + Goto(&bigint64); + } else { + Goto(&bigint); + } + } BIND(&lhs_is_string); { @@ -191,6 +201,30 @@ TNode<Object> BinaryOpAssembler::Generate_AddWithFeedback( Goto(&call_with_any_feedback); } + if (Is64()) { + BIND(&bigint64); + { + // Both {lhs} and {rhs} are of BigInt type and can fit in 64-bit + // registers. + Label if_overflow(this); + TVARIABLE(UintPtrT, lhs_raw); + TVARIABLE(UintPtrT, rhs_raw); + BigIntToRawBytes(CAST(lhs), &lhs_raw, &lhs_raw); + BigIntToRawBytes(CAST(rhs), &rhs_raw, &rhs_raw); + var_result = BigIntFromInt64( + TryIntPtrAdd(UncheckedCast<IntPtrT>(lhs_raw.value()), + UncheckedCast<IntPtrT>(rhs_raw.value()), &if_overflow)); + + var_type_feedback = SmiConstant(BinaryOperationFeedback::kBigInt64); + UpdateFeedback(var_type_feedback.value(), maybe_feedback_vector(), + slot_id, update_feedback_mode); + Goto(&end); + + BIND(&if_overflow); + Goto(&bigint); + } + } + BIND(&bigint); { // Both {lhs} and {rhs} are of BigInt type. @@ -245,7 +279,7 @@ TNode<Object> BinaryOpAssembler::Generate_BinaryOperationWithFeedback( Label do_float_operation(this), end(this), call_stub(this), check_rhsisoddball(this, Label::kDeferred), call_with_any_feedback(this), if_lhsisnotnumber(this, Label::kDeferred), - if_both_bigint(this, Label::kDeferred); + if_both_bigint(this, Label::kDeferred), if_both_bigint64(this); TVARIABLE(Float64T, var_float_lhs); TVARIABLE(Float64T, var_float_rhs); TVARIABLE(Smi, var_type_feedback); @@ -377,7 +411,14 @@ TNode<Object> BinaryOpAssembler::Generate_BinaryOperationWithFeedback( BIND(&if_left_bigint); { GotoIf(TaggedIsSmi(rhs), &call_with_any_feedback); - Branch(IsBigInt(CAST(rhs)), &if_both_bigint, &call_with_any_feedback); + GotoIfNot(IsBigInt(CAST(rhs)), &call_with_any_feedback); + if (Is64()) { + GotoIfLargeBigInt(CAST(lhs), &if_both_bigint); + GotoIfLargeBigInt(CAST(rhs), &if_both_bigint); + Goto(&if_both_bigint64); + } else { + Goto(&if_both_bigint); + } } } @@ -394,31 +435,130 @@ TNode<Object> BinaryOpAssembler::Generate_BinaryOperationWithFeedback( Goto(&call_stub); } + if (Is64()) { + BIND(&if_both_bigint64); + // TODO(panq): Remove the condition when all the operations are supported. + if (op == Operation::kSubtract || op == Operation::kMultiply) { + var_type_feedback = SmiConstant(BinaryOperationFeedback::kBigInt64); + UpdateFeedback(var_type_feedback.value(), maybe_feedback_vector(), + slot_id, update_feedback_mode); + + TVARIABLE(UintPtrT, lhs_raw); + TVARIABLE(UintPtrT, rhs_raw); + BigIntToRawBytes(CAST(lhs), &lhs_raw, &lhs_raw); + BigIntToRawBytes(CAST(rhs), &rhs_raw, &rhs_raw); + + switch (op) { + case Operation::kSubtract: { + var_result = BigIntFromInt64(TryIntPtrSub( + UncheckedCast<IntPtrT>(lhs_raw.value()), + UncheckedCast<IntPtrT>(rhs_raw.value()), &if_both_bigint)); + Goto(&end); + break; + } + case Operation::kMultiply: { + var_result = BigIntFromInt64(TryIntPtrMul( + UncheckedCast<IntPtrT>(lhs_raw.value()), + UncheckedCast<IntPtrT>(rhs_raw.value()), &if_both_bigint)); + Goto(&end); + break; + } + default: + UNREACHABLE(); + } + } else { + Goto(&if_both_bigint); + } + } + BIND(&if_both_bigint); { var_type_feedback = SmiConstant(BinaryOperationFeedback::kBigInt); UpdateFeedback(var_type_feedback.value(), maybe_feedback_vector(), slot_id, update_feedback_mode); - if (op == Operation::kSubtract) { - Label bigint_too_big(this); - var_result = - CallBuiltin(Builtin::kBigIntSubtractNoThrow, context(), lhs, rhs); + switch (op) { + case Operation::kSubtract: { + Label bigint_too_big(this); + var_result = + CallBuiltin(Builtin::kBigIntSubtractNoThrow, context(), lhs, rhs); - // Check for sentinel that signals BigIntTooBig exception. - GotoIf(TaggedIsSmi(var_result.value()), &bigint_too_big); - Goto(&end); + // Check for sentinel that signals BigIntTooBig exception. + GotoIfNot(TaggedIsSmi(var_result.value()), &end); - BIND(&bigint_too_big); - { // Update feedback to prevent deopt loop. UpdateFeedback(SmiConstant(BinaryOperationFeedback::kAny), maybe_feedback_vector(), slot_id, update_feedback_mode); ThrowRangeError(context(), MessageTemplate::kBigIntTooBig); + break; + } + case Operation::kMultiply: { + Label bigint_too_big(this), + termination_requested(this, Label::kDeferred); + var_result = + CallBuiltin(Builtin::kBigIntMultiplyNoThrow, context(), lhs, rhs); + + GotoIfNot(TaggedIsSmi(var_result.value()), &end); + + // Check for sentinel that signals TerminationReqeusted exception. + GotoIf(TaggedEqual(var_result.value(), SmiConstant(1)), + &termination_requested); + + // Handles BigIntTooBig exception. + // Update feedback to prevent deopt loop. + UpdateFeedback(SmiConstant(BinaryOperationFeedback::kAny), + maybe_feedback_vector(), slot_id, update_feedback_mode); + ThrowRangeError(context(), MessageTemplate::kBigIntTooBig); + + BIND(&termination_requested); + TerminateExecution(context()); + break; + } + case Operation::kDivide: { + Label bigint_div_zero(this), + termination_requested(this, Label::kDeferred); + var_result = + CallBuiltin(Builtin::kBigIntDivideNoThrow, context(), lhs, rhs); + + GotoIfNot(TaggedIsSmi(var_result.value()), &end); + + // Check for sentinel that signals TerminationReqeusted exception. + GotoIf(TaggedEqual(var_result.value(), SmiConstant(1)), + &termination_requested); + + // Handles BigIntDivZero exception. + // Update feedback to prevent deopt loop. + UpdateFeedback(SmiConstant(BinaryOperationFeedback::kAny), + maybe_feedback_vector(), slot_id, update_feedback_mode); + ThrowRangeError(context(), MessageTemplate::kBigIntDivZero); + + BIND(&termination_requested); + TerminateExecution(context()); + break; + } + case Operation::kBitwiseAnd: { + Label bigint_too_big(this); + var_result = + CallBuiltin(Builtin::kBigIntBitwiseAndNoThrow, context(), lhs, rhs); + + // Check for sentinel that signals BigIntTooBig exception. + GotoIf(TaggedIsSmi(var_result.value()), &bigint_too_big); + Goto(&end); + + BIND(&bigint_too_big); + { + // Update feedback to prevent deopt loop. + UpdateFeedback(SmiConstant(BinaryOperationFeedback::kAny), + maybe_feedback_vector(), slot_id, + update_feedback_mode); + ThrowRangeError(context(), MessageTemplate::kBigIntTooBig); + } + break; + } + default: { + var_result = CallRuntime(Runtime::kBigIntBinaryOp, context(), lhs, rhs, + SmiConstant(op)); + Goto(&end); } - } else { - var_result = CallRuntime(Runtime::kBigIntBinaryOp, context(), lhs, rhs, - SmiConstant(op)); - Goto(&end); } } diff --git a/deps/v8/src/ic/handler-configuration.cc b/deps/v8/src/ic/handler-configuration.cc index 5bfd53c698468a..43511407e0eb54 100644 --- a/deps/v8/src/ic/handler-configuration.cc +++ b/deps/v8/src/ic/handler-configuration.cc @@ -426,7 +426,8 @@ void PrintSmiLoadHandler(int raw_handler, std::ostream& os) { << LoadHandler::ExportsIndexBits::decode(raw_handler); break; default: - UNREACHABLE(); + os << "<invalid value " << static_cast<int>(kind) << ">"; + break; } } @@ -518,7 +519,7 @@ void LoadHandler::PrintHandler(Object handler, std::ostream& os) { << Builtins::name(CodeT::cast(handler).builtin_id()) << ")"; } else if (handler.IsSymbol()) { os << "LoadHandler(Symbol)(" << Brief(Symbol::cast(handler)) << ")"; - } else { + } else if (handler.IsLoadHandler()) { LoadHandler load_handler = LoadHandler::cast(handler); int raw_handler = load_handler.smi_handler().ToSmi().value(); os << "LoadHandler(do access check on lookup start object = " @@ -526,9 +527,10 @@ void LoadHandler::PrintHandler(Object handler, std::ostream& os) { << ", lookup on lookup start object = " << LookupOnLookupStartObjectBits::decode(raw_handler) << ", "; PrintSmiLoadHandler(raw_handler, os); - DCHECK_GE(load_handler.data_field_count(), 1); - os << ", data1 = "; - load_handler.data1().ShortPrint(os); + if (load_handler.data_field_count() >= 1) { + os << ", data1 = "; + load_handler.data1().ShortPrint(os); + } if (load_handler.data_field_count() >= 2) { os << ", data2 = "; load_handler.data2().ShortPrint(os); @@ -540,6 +542,8 @@ void LoadHandler::PrintHandler(Object handler, std::ostream& os) { os << ", validity cell = "; load_handler.validity_cell().ShortPrint(os); os << ")"; + } else { + os << "LoadHandler(<unexpected>)(" << Brief(handler) << ")"; } } @@ -550,11 +554,11 @@ void StoreHandler::PrintHandler(Object handler, std::ostream& os) { os << "StoreHandler(Smi)("; PrintSmiStoreHandler(raw_handler, os); os << ")" << std::endl; - } else { + } else if (handler.IsStoreHandler()) { os << "StoreHandler("; StoreHandler store_handler = StoreHandler::cast(handler); - if (store_handler.smi_handler().IsCode()) { - Code code = Code::cast(store_handler.smi_handler()); + if (store_handler.smi_handler().IsCodeT()) { + CodeT code = CodeT::cast(store_handler.smi_handler()); os << "builtin = "; code.ShortPrint(os); } else { @@ -565,9 +569,10 @@ void StoreHandler::PrintHandler(Object handler, std::ostream& os) { << LookupOnLookupStartObjectBits::decode(raw_handler) << ", "; PrintSmiStoreHandler(raw_handler, os); } - DCHECK_GE(store_handler.data_field_count(), 1); - os << ", data1 = "; - store_handler.data1().ShortPrint(os); + if (store_handler.data_field_count() >= 1) { + os << ", data1 = "; + store_handler.data1().ShortPrint(os); + } if (store_handler.data_field_count() >= 2) { os << ", data2 = "; store_handler.data2().ShortPrint(os); @@ -579,6 +584,8 @@ void StoreHandler::PrintHandler(Object handler, std::ostream& os) { os << ", validity cell = "; store_handler.validity_cell().ShortPrint(os); os << ")" << std::endl; + } else { + os << "StoreHandler(<unexpected>)(" << Brief(handler) << ")"; } } diff --git a/deps/v8/src/ic/ic.cc b/deps/v8/src/ic/ic.cc index e86d04c44d5113..ae1dde1a8c587d 100644 --- a/deps/v8/src/ic/ic.cc +++ b/deps/v8/src/ic/ic.cc @@ -5,9 +5,7 @@ #include "src/ic/ic.h" #include "src/api/api-arguments-inl.h" -#include "src/api/api.h" #include "src/ast/ast.h" -#include "src/base/bits.h" #include "src/base/logging.h" #include "src/builtins/accessors.h" #include "src/common/assert-scope.h" @@ -16,9 +14,11 @@ #include "src/execution/execution.h" #include "src/execution/frames-inl.h" #include "src/execution/isolate-inl.h" +#include "src/execution/isolate.h" #include "src/execution/protectors-inl.h" #include "src/execution/tiering-manager.h" #include "src/handles/handles-inl.h" +#include "src/handles/maybe-handles.h" #include "src/ic/call-optimization.h" #include "src/ic/handler-configuration-inl.h" #include "src/ic/ic-inl.h" @@ -26,19 +26,13 @@ #include "src/ic/stub-cache.h" #include "src/numbers/conversions.h" #include "src/objects/api-callbacks.h" -#include "src/objects/data-handler-inl.h" #include "src/objects/field-type.h" -#include "src/objects/hash-table-inl.h" -#include "src/objects/heap-number-inl.h" #include "src/objects/instance-type.h" #include "src/objects/js-array-buffer-inl.h" #include "src/objects/js-array-inl.h" #include "src/objects/megadom-handler.h" -#include "src/objects/module-inl.h" #include "src/objects/property-descriptor.h" #include "src/objects/prototype.h" -#include "src/objects/struct-inl.h" -#include "src/runtime/runtime-utils.h" #include "src/runtime/runtime.h" #include "src/tracing/trace-event.h" #include "src/tracing/tracing-category-observer.h" @@ -210,6 +204,7 @@ static void LookupForRead(LookupIterator* it, bool is_has_property) { case LookupIterator::TRANSITION: UNREACHABLE(); case LookupIterator::JSPROXY: + case LookupIterator::WASM_OBJECT: return; case LookupIterator::INTERCEPTOR: { // If there is a getter, return; otherwise loop to perform the lookup. @@ -314,7 +309,7 @@ void IC::OnFeedbackChanged(const char* reason) { // static void IC::OnFeedbackChanged(Isolate* isolate, FeedbackVector vector, FeedbackSlot slot, const char* reason) { - if (FLAG_trace_opt_verbose) { + if (v8_flags.trace_opt_verbose) { if (vector.profiler_ticks() != 0) { StdoutStream os; os << "[resetting ticks for "; @@ -326,7 +321,7 @@ void IC::OnFeedbackChanged(Isolate* isolate, FeedbackVector vector, vector.set_profiler_ticks(0); #ifdef V8_TRACE_FEEDBACK_UPDATES - if (FLAG_trace_feedback_updates) { + if (v8_flags.trace_feedback_updates) { int slot_count = vector.metadata().slot_count(); StdoutStream os; if (slot.IsInvalid()) { @@ -415,7 +410,7 @@ void IC::ConfigureVectorState( MaybeHandle<Object> LoadIC::Load(Handle<Object> object, Handle<Name> name, bool update_feedback, Handle<Object> receiver) { - bool use_ic = (state() != NO_FEEDBACK) && FLAG_use_ic && update_feedback; + bool use_ic = (state() != NO_FEEDBACK) && v8_flags.use_ic && update_feedback; if (receiver.is_null()) { receiver = object; @@ -548,7 +543,8 @@ MaybeHandle<Object> LoadGlobalIC::Load(Handle<Name> name, Object); } - bool use_ic = (state() != NO_FEEDBACK) && FLAG_use_ic && update_feedback; + bool use_ic = + (state() != NO_FEEDBACK) && v8_flags.use_ic && update_feedback; if (use_ic) { // 'const' Variables are mutable if REPL mode is enabled. This disables // compiler inlining for all 'const' variables declared in REPL mode. @@ -601,10 +597,10 @@ static bool AddOneReceiverMapIfMissing( } bool IC::UpdateMegaDOMIC(const MaybeObjectHandle& handler, Handle<Name> name) { - if (!FLAG_enable_mega_dom_ic) return false; + if (!v8_flags.enable_mega_dom_ic) return false; // TODO(gsathya): Enable fuzzing once this feature is more stable. - if (FLAG_fuzzing) return false; + if (v8_flags.fuzzing) return false; // TODO(gsathya): Support KeyedLoadIC, StoreIC and KeyedStoreIC. if (!IsLoadIC()) return false; @@ -665,7 +661,7 @@ bool IC::UpdatePolymorphicIC(Handle<Name> name, Handle<Map> map = lookup_start_object_map(); std::vector<MapAndHandler> maps_and_handlers; - maps_and_handlers.reserve(FLAG_max_valid_polymorphic_map_count); + maps_and_handlers.reserve(v8_flags.max_valid_polymorphic_map_count); int deprecated_maps = 0; int handler_to_overwrite = -1; @@ -712,7 +708,7 @@ bool IC::UpdatePolymorphicIC(Handle<Name> name, int number_of_valid_maps = number_of_maps - deprecated_maps - (handler_to_overwrite != -1); - if (number_of_valid_maps >= FLAG_max_valid_polymorphic_map_count) + if (number_of_valid_maps >= v8_flags.max_valid_polymorphic_map_count) return false; if (number_of_maps == 0 && state() != MONOMORPHIC && state() != POLYMORPHIC) { return false; @@ -869,68 +865,6 @@ void IC::UpdateMegamorphicCache(Handle<Map> map, Handle<Name> name, } } -namespace { - -#if V8_ENABLE_WEBASSEMBLY - -inline WasmValueType GetWasmValueType(wasm::ValueType type) { -#define TYPE_CASE(Name) \ - case wasm::k##Name: \ - return WasmValueType::k##Name; - - switch (type.kind()) { - TYPE_CASE(I8) - TYPE_CASE(I16) - TYPE_CASE(I32) - TYPE_CASE(I64) - TYPE_CASE(F32) - TYPE_CASE(F64) - TYPE_CASE(S128) - TYPE_CASE(Ref) - TYPE_CASE(RefNull) - - case wasm::kRtt: - // Rtt values are not supposed to be made available to JavaScript side. - UNREACHABLE(); - - case wasm::kVoid: - case wasm::kBottom: - UNREACHABLE(); - } -#undef TYPE_CASE -} - -Handle<Smi> MakeLoadWasmStructFieldHandler(Isolate* isolate, - Handle<JSReceiver> holder, - LookupIterator* lookup) { - DCHECK(holder->IsWasmObject(isolate)); - WasmValueType type; - int field_offset; - if (holder->IsWasmArray(isolate)) { - // The only named property that WasmArray has is length. - DCHECK_EQ(0, lookup->property_details().field_index()); - DCHECK_EQ(*isolate->factory()->length_string(), *lookup->name()); - type = WasmValueType::kU32; - field_offset = WasmArray::kLengthOffset; - } else { - wasm::StructType* struct_type = Handle<WasmStruct>::cast(holder)->type(); - int field_index = lookup->property_details().field_index(); - type = GetWasmValueType(struct_type->field(field_index)); - field_offset = - WasmStruct::kHeaderSize + struct_type->field_offset(field_index); - - const size_t kMaxWasmFieldOffset = - WasmStruct::kHeaderSize + wasm::StructType::kMaxFieldOffset; - static_assert(kMaxWasmFieldOffset <= LoadHandler::WasmFieldOffsetBits::kMax, - "Bigger numbers of struct fields require different approach"); - } - return LoadHandler::LoadWasmStructField(isolate, type, field_offset); -} - -#endif // V8_ENABLE_WEBASSEMBLY - -} // namespace - MaybeObjectHandle LoadIC::ComputeHandler(LookupIterator* lookup) { Handle<Object> receiver = lookup->GetReceiver(); ReadOnlyRoots roots(isolate()); @@ -1154,30 +1088,14 @@ MaybeObjectHandle LoadIC::ComputeHandler(LookupIterator* lookup) { return MaybeObjectHandle(smi_handler); TRACE_HANDLER_STATS(isolate(), LoadIC_LoadNormalFromPrototypeDH); } else if (lookup->IsElement(*holder)) { -#if V8_ENABLE_WEBASSEMBLY - if (holder_is_lookup_start_object && holder->IsWasmStruct()) { - // TODO(ishell): Consider supporting indexed access to WasmStruct - // fields. - TRACE_HANDLER_STATS(isolate(), LoadIC_LoadNonexistentDH); - return MaybeObjectHandle(LoadHandler::LoadNonExistent(isolate())); - } -#endif // V8_ENABLE_WEBASSEMBLY TRACE_HANDLER_STATS(isolate(), LoadIC_SlowStub); return MaybeObjectHandle(LoadHandler::LoadSlow(isolate())); } else { DCHECK_EQ(PropertyLocation::kField, lookup->property_details().location()); -#if V8_ENABLE_WEBASSEMBLY - if (V8_UNLIKELY(holder->IsWasmObject(isolate()))) { - smi_handler = - MakeLoadWasmStructFieldHandler(isolate(), holder, lookup); - } else // NOLINT(readability/braces) -#endif // V8_ENABLE_WEBASSEMBLY - { - DCHECK(holder->IsJSObject(isolate())); - FieldIndex field = lookup->GetFieldIndex(); - smi_handler = LoadHandler::LoadField(isolate(), field); - } + DCHECK(holder->IsJSObject(isolate())); + FieldIndex field = lookup->GetFieldIndex(); + smi_handler = LoadHandler::LoadField(isolate(), field); TRACE_HANDLER_STATS(isolate(), LoadIC_LoadFieldDH); if (holder_is_lookup_start_object) return MaybeObjectHandle(smi_handler); @@ -1226,6 +1144,9 @@ MaybeObjectHandle LoadIC::ComputeHandler(LookupIterator* lookup) { return MaybeObjectHandle(LoadHandler::LoadFromPrototype( isolate(), map, holder_proxy, smi_handler)); } + + case LookupIterator::WASM_OBJECT: + return MaybeObjectHandle(LoadHandler::LoadSlow(isolate())); case LookupIterator::ACCESS_CHECK: case LookupIterator::NOT_FOUND: case LookupIterator::TRANSITION: @@ -1277,11 +1198,8 @@ void KeyedLoadIC::UpdateLoadElement(Handle<HeapObject> receiver, if ((receiver->IsJSObject() && IsMoreGeneralElementsKindTransition( target_receiver_maps.at(0)->elements_kind(), - Handle<JSObject>::cast(receiver)->GetElementsKind())) -#ifdef V8_ENABLE_WEBASSEMBLY - || receiver->IsWasmObject() -#endif - ) { + Handle<JSObject>::cast(receiver)->GetElementsKind())) || + receiver->IsWasmObject()) { Handle<Object> handler = LoadElementHandler(receiver_map, load_mode); return ConfigureVectorState(Handle<Name>(), receiver_map, handler); } @@ -1307,7 +1225,7 @@ void KeyedLoadIC::UpdateLoadElement(Handle<HeapObject> receiver, // If the maximum number of receiver maps has been exceeded, use the generic // version of the IC. if (static_cast<int>(target_receiver_maps.size()) > - FLAG_max_valid_polymorphic_map_count) { + v8_flags.max_valid_polymorphic_map_count) { set_slow_stub_reason("max polymorph exceeded"); return; } @@ -1514,7 +1432,7 @@ bool IntPtrKeyToSize(intptr_t index, Handle<HeapObject> receiver, size_t* out) { } bool CanCache(Handle<Object> receiver, InlineCacheState state) { - if (!FLAG_use_ic || state == NO_FEEDBACK) return false; + if (!v8_flags.use_ic || state == NO_FEEDBACK) return false; if (!receiver->IsJSReceiver() && !receiver->IsString()) return false; return !receiver->IsAccessCheckNeeded() && !receiver->IsJSPrimitiveWrapper(); } @@ -1615,6 +1533,8 @@ bool StoreIC::LookupForWrite(LookupIterator* it, Handle<Object> value, if (it->state() != LookupIterator::TRANSITION) { for (; it->IsFound(); it->Next()) { switch (it->state()) { + case LookupIterator::WASM_OBJECT: + return false; case LookupIterator::NOT_FOUND: case LookupIterator::TRANSITION: UNREACHABLE(); @@ -1725,7 +1645,7 @@ MaybeHandle<Object> StoreGlobalIC::Store(Handle<Name> name, Object); } - bool use_ic = (state() != NO_FEEDBACK) && FLAG_use_ic; + bool use_ic = (state() != NO_FEEDBACK) && v8_flags.use_ic; if (use_ic) { if (nexus()->ConfigureLexicalVarMode( lookup_result.context_index, lookup_result.slot_index, @@ -1772,11 +1692,15 @@ Maybe<bool> DefineOwnDataProperty(LookupIterator* it, return JSProxy::DefineOwnProperty(it->isolate(), it->GetHolder<JSProxy>(), it->GetName(), &new_desc, should_throw); } + case LookupIterator::WASM_OBJECT: + RETURN_FAILURE(it->isolate(), kThrowOnError, + NewTypeError(MessageTemplate::kWasmObjectsAreOpaque)); // When lazy feedback is disabled, the original state could be different // while the object is already prepared for TRANSITION. case LookupIterator::TRANSITION: { switch (original_state) { case LookupIterator::JSPROXY: + case LookupIterator::WASM_OBJECT: case LookupIterator::TRANSITION: case LookupIterator::DATA: case LookupIterator::INTERCEPTOR: @@ -1838,7 +1762,7 @@ MaybeHandle<Object> StoreIC::Store(Handle<Object> object, Handle<Name> name, return value; } - bool use_ic = (state() != NO_FEEDBACK) && FLAG_use_ic; + bool use_ic = (state() != NO_FEEDBACK) && v8_flags.use_ic; // If the object is undefined or null it's illegal to try to set any // properties on it; throw a TypeError in that case. if (object->IsNullOrUndefined(isolate())) { @@ -2199,6 +2123,7 @@ MaybeObjectHandle StoreIC::ComputeHandler(LookupIterator* lookup) { case LookupIterator::INTEGER_INDEXED_EXOTIC: case LookupIterator::ACCESS_CHECK: case LookupIterator::NOT_FOUND: + case LookupIterator::WASM_OBJECT: UNREACHABLE(); } return MaybeObjectHandle(); @@ -2288,7 +2213,7 @@ void KeyedStoreIC::UpdateStoreElement(Handle<Map> receiver_map, // If the maximum number of receiver maps has been exceeded, use the // megamorphic version of the IC. if (static_cast<int>(target_maps_and_handlers.size()) > - FLAG_max_valid_polymorphic_map_count) { + v8_flags.max_valid_polymorphic_map_count) { return; } @@ -2537,7 +2462,7 @@ MaybeHandle<Object> KeyedStoreIC::Store(Handle<Object> object, JSObject::MakePrototypesFast(object, kStartAtPrototype, isolate()); // TODO(jkummerow): Refactor the condition logic here and below. - bool use_ic = (state() != NO_FEEDBACK) && FLAG_use_ic && + bool use_ic = (state() != NO_FEEDBACK) && v8_flags.use_ic && !object->IsStringWrapper() && !object->IsAccessCheckNeeded() && !object->IsJSGlobalProxy(); if (use_ic && !object->IsSmi()) { @@ -2549,6 +2474,12 @@ MaybeHandle<Object> KeyedStoreIC::Store(Handle<Object> object, set_slow_stub_reason("map in array prototype"); use_ic = false; } +#if V8_ENABLE_WEBASSEMBLY + if (heap_object->map().IsWasmObjectMap()) { + set_slow_stub_reason("wasm object"); + use_ic = false; + } +#endif } Handle<Map> old_receiver_map; @@ -2571,16 +2502,19 @@ MaybeHandle<Object> KeyedStoreIC::Store(Handle<Object> object, } DCHECK(store_handle.is_null()); - ASSIGN_RETURN_ON_EXCEPTION( - isolate(), store_handle, - // TODO(v8:12548): refactor DefineKeyedOwnIC as a subclass of StoreIC - // so the logic doesn't get mixed here. + // TODO(v8:12548): refactor DefineKeyedOwnIC as a subclass of StoreIC + // so the logic doesn't get mixed here. + MaybeHandle<Object> result = IsDefineKeyedOwnIC() ? Runtime::DefineObjectOwnProperty(isolate(), object, key, value, StoreOrigin::kMaybeKeyed) : Runtime::SetObjectProperty(isolate(), object, key, value, - StoreOrigin::kMaybeKeyed), - Object); + StoreOrigin::kMaybeKeyed); + if (result.is_null()) { + DCHECK(isolate()->has_pending_exception()); + set_slow_stub_reason("failed to set property"); + use_ic = false; + } if (use_ic) { if (!old_receiver_map.is_null()) { if (is_arguments) { @@ -2624,7 +2558,7 @@ MaybeHandle<Object> KeyedStoreIC::Store(Handle<Object> object, } TraceIC("StoreIC", key); - return store_handle; + return result; } namespace { @@ -2647,7 +2581,7 @@ MaybeHandle<Object> StoreInArrayLiteralIC::Store(Handle<JSArray> array, DCHECK(!array->map().IsMapInArrayPrototypeChain(isolate())); DCHECK(index->IsNumber()); - if (!FLAG_use_ic || state() == NO_FEEDBACK || + if (!v8_flags.use_ic || state() == NO_FEEDBACK || MigrateDeprecated(isolate(), array)) { MAYBE_RETURN_NULL(StoreOwnElement(isolate(), array, index, value)); TraceIC("StoreInArrayLiteralIC", index); diff --git a/deps/v8/src/ic/keyed-store-generic.cc b/deps/v8/src/ic/keyed-store-generic.cc index 7f541b701d0344..69e4c3e4e502c4 100644 --- a/deps/v8/src/ic/keyed-store-generic.cc +++ b/deps/v8/src/ic/keyed-store-generic.cc @@ -1129,7 +1129,7 @@ void KeyedStoreGenericAssembler::KeyedStoreGeneric( BIND(¬_internalized); { - if (FLAG_internalize_on_the_fly) { + if (v8_flags.internalize_on_the_fly) { TryInternalizeString(CAST(key), &if_index, &var_index, &if_unique_name, &var_unique, &slow, &slow); } else { diff --git a/deps/v8/src/ic/stub-cache.cc b/deps/v8/src/ic/stub-cache.cc index 2a786398cbead1..4dd60fdfa9f27e 100644 --- a/deps/v8/src/ic/stub-cache.cc +++ b/deps/v8/src/ic/stub-cache.cc @@ -50,7 +50,7 @@ void StubCache::Initialize() { // is scaled by 1 << kCacheIndexShift. int StubCache::PrimaryOffset(Name name, Map map) { // Compute the hash of the name (use entire hash field). - uint32_t field = name.raw_hash_field(); + uint32_t field = name.RawHash(); DCHECK(Name::IsHashFieldComputed(field)); // Using only the low bits in 64-bit mode is unlikely to increase the // risk of collision even if the heap is spread over an area larger than diff --git a/deps/v8/src/init/bootstrapper.cc b/deps/v8/src/init/bootstrapper.cc index a9afa3f41367cf..fc7b17d582e79b 100644 --- a/deps/v8/src/init/bootstrapper.cc +++ b/deps/v8/src/init/bootstrapper.cc @@ -47,6 +47,7 @@ #include "src/objects/js-collator.h" #include "src/objects/js-date-time-format.h" #include "src/objects/js-display-names.h" +#include "src/objects/js-duration-format.h" #include "src/objects/js-list-format.h" #include "src/objects/js-locale.h" #include "src/objects/js-number-format.h" @@ -62,12 +63,14 @@ #include "src/objects/js-segments.h" #endif // V8_INTL_SUPPORT #include "src/codegen/script-details.h" +#include "src/objects/js-raw-json.h" #include "src/objects/js-shared-array.h" #include "src/objects/js-struct.h" #include "src/objects/js-temporal-objects-inl.h" #include "src/objects/js-weak-refs.h" #include "src/objects/ordered-hash-table.h" #include "src/objects/property-cell.h" +#include "src/objects/property-descriptor.h" #include "src/objects/slots-inl.h" #include "src/objects/swiss-name-dictionary-inl.h" #include "src/objects/templates.h" @@ -134,13 +137,13 @@ void Bootstrapper::Initialize(bool create_heap_objects) { static const char* GCFunctionName() { bool flag_given = - FLAG_expose_gc_as != nullptr && strlen(FLAG_expose_gc_as) != 0; - return flag_given ? FLAG_expose_gc_as : "gc"; + v8_flags.expose_gc_as != nullptr && strlen(v8_flags.expose_gc_as) != 0; + return flag_given ? v8_flags.expose_gc_as : "gc"; } static bool isValidCpuTraceMarkFunctionName() { - return FLAG_expose_cputracemark_as != nullptr && - strlen(FLAG_expose_cputracemark_as) != 0; + return v8_flags.expose_cputracemark_as != nullptr && + strlen(v8_flags.expose_cputracemark_as) != 0; } void Bootstrapper::InitializeOncePerProcess() { @@ -150,8 +153,8 @@ void Bootstrapper::InitializeOncePerProcess() { v8::RegisterExtension(std::make_unique<TriggerFailureExtension>()); v8::RegisterExtension(std::make_unique<IgnitionStatisticsExtension>()); if (isValidCpuTraceMarkFunctionName()) { - v8::RegisterExtension( - std::make_unique<CpuTraceMarkExtension>(FLAG_expose_cputracemark_as)); + v8::RegisterExtension(std::make_unique<CpuTraceMarkExtension>( + v8_flags.expose_cputracemark_as)); } #ifdef ENABLE_VTUNE_TRACEMARK v8::RegisterExtension( @@ -360,7 +363,7 @@ Handle<JSGlobalProxy> Bootstrapper::NewRemoteContext( } void Bootstrapper::LogAllMaps() { - if (!FLAG_log_maps || isolate_->initialized_from_snapshot()) return; + if (!v8_flags.log_maps || isolate_->initialized_from_snapshot()) return; // Log all created Map objects that are on the heap. For snapshots the Map // logging happens during deserialization in order to avoid printing Maps // multiple times during partial deserialization. @@ -1774,9 +1777,7 @@ void Genesis::InitializeGlobal(Handle<JSGlobalObject> global_object, native_context()->set_initial_array_prototype(*proto); InitializeJSArrayMaps(isolate_, native_context(), - handle(array_function->initial_map(), isolate_)); - SimpleInstallFunction(isolate_, array_function, "isArray", Builtin::kArrayIsArray, 1, true); SimpleInstallFunction(isolate_, array_function, "from", Builtin::kArrayFrom, @@ -2785,6 +2786,7 @@ void Genesis::InitializeGlobal(Handle<JSGlobalObject> global_object, SimpleInstallFunction(isolate_, json_object, "stringify", Builtin::kJsonStringify, 3, true); InstallToStringTag(isolate_, json_object, "JSON"); + native_context()->set_json_object(*json_object); } { // -- M a t h @@ -3921,7 +3923,7 @@ void Genesis::InitializeGlobal(Handle<JSGlobalObject> global_object, // The cleanupSome function is created but not exposed, as it is used // internally by InvokeFinalizationRegistryCleanupFromTask. // - // It is exposed by FLAG_harmony_weak_refs_with_cleanup_some. + // It is exposed by v8_flags.harmony_weak_refs_with_cleanup_some. Handle<JSFunction> cleanup_some_fun = SimpleCreateFunction( isolate_, factory->InternalizeUtf8String("cleanupSome"), Builtin::kFinalizationRegistryPrototypeCleanupSome, 0, false); @@ -4506,6 +4508,7 @@ void Genesis::InitializeConsole(Handle<JSObject> extras_binding) { EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE(harmony_import_assertions) EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE(harmony_class_static_blocks) +EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE(harmony_symbol_as_weakmap_key) #ifdef V8_INTL_SUPPORT EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE(harmony_intl_best_fit_matcher) @@ -4513,8 +4516,30 @@ EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE(harmony_intl_best_fit_matcher) #undef EMPTY_INITIALIZE_GLOBAL_FOR_FEATURE +void Genesis::InitializeGlobal_harmony_json_parse_with_source() { + if (!v8_flags.harmony_json_parse_with_source) return; + Handle<Map> map = factory()->NewMap(JS_RAW_JSON_TYPE, JSRawJson::kSize, + TERMINAL_FAST_ELEMENTS_KIND, 1); + Map::EnsureDescriptorSlack(isolate_, map, 1); + { + Descriptor d = Descriptor::DataField( + isolate(), factory()->raw_json_string(), JSRawJson::kRawJsonIndex, NONE, + Representation::Tagged()); + map->AppendDescriptor(isolate(), &d); + } + map->SetPrototype(isolate(), map, isolate()->factory()->null_value()); + map->SetConstructor(native_context()->object_function()); + native_context()->set_js_raw_json_map(*map); + SimpleInstallFunction(isolate_, + handle(native_context()->json_object(), isolate_), + "rawJSON", Builtin::kJsonRawJson, 1, true); + SimpleInstallFunction(isolate_, + handle(native_context()->json_object(), isolate_), + "isRawJSON", Builtin::kJsonIsRawJson, 1, true); +} + void Genesis::InitializeGlobal_harmony_change_array_by_copy() { - if (!FLAG_harmony_change_array_by_copy) return; + if (!v8_flags.harmony_change_array_by_copy) return; { Handle<JSFunction> array_function(native_context()->array_function(), @@ -4546,13 +4571,15 @@ void Genesis::InitializeGlobal_harmony_change_array_by_copy() { isolate()); SimpleInstallFunction(isolate_, prototype, "toReversed", Builtin::kTypedArrayPrototypeToReversed, 0, true); + SimpleInstallFunction(isolate_, prototype, "toSorted", + Builtin::kTypedArrayPrototypeToSorted, 1, false); SimpleInstallFunction(isolate_, prototype, "with", Builtin::kTypedArrayPrototypeWith, 2, true); } } void Genesis::InitializeGlobal_harmony_regexp_unicode_sets() { - if (!FLAG_harmony_regexp_unicode_sets) return; + if (!v8_flags.harmony_regexp_unicode_sets) return; Handle<JSFunction> regexp_fun(native_context()->regexp_function(), isolate()); Handle<JSObject> regexp_prototype( @@ -4566,7 +4593,7 @@ void Genesis::InitializeGlobal_harmony_regexp_unicode_sets() { } void Genesis::InitializeGlobal_harmony_shadow_realm() { - if (!FLAG_harmony_shadow_realm) return; + if (!v8_flags.harmony_shadow_realm) return; Factory* factory = isolate()->factory(); // -- S h a d o w R e a l m // #sec-shadowrealm-objects @@ -4638,7 +4665,7 @@ void Genesis::InitializeGlobal_harmony_shadow_realm() { } void Genesis::InitializeGlobal_harmony_struct() { - if (!FLAG_harmony_struct) return; + if (!v8_flags.harmony_struct) return; Handle<JSGlobalObject> global(native_context()->global_object(), isolate()); Handle<String> name = @@ -4670,8 +4697,11 @@ void Genesis::InitializeGlobal_harmony_struct() { isolate()->factory()->NewDescriptorArray(1, 0, AllocationType::kSharedOld); Descriptor descriptor = Descriptor::AccessorConstant( - isolate()->shared_isolate()->factory()->length_string(), - isolate()->shared_isolate()->factory()->shared_array_length_accessor(), + isolate()->shared_heap_isolate()->factory()->length_string(), + isolate() + ->shared_heap_isolate() + ->factory() + ->shared_array_length_accessor(), ALL_ATTRIBUTES_MASK); descriptors->Set(InternalIndex(0), &descriptor); shared_array_fun->initial_map().InitializeDescriptors(isolate(), @@ -4728,7 +4758,7 @@ void Genesis::InitializeGlobal_harmony_struct() { } void Genesis::InitializeGlobal_harmony_array_find_last() { - if (!FLAG_harmony_array_find_last) return; + if (!v8_flags.harmony_array_find_last) return; { Handle<JSFunction> array_function(native_context()->array_function(), @@ -4761,7 +4791,7 @@ void Genesis::InitializeGlobal_harmony_array_find_last() { } void Genesis::InitializeGlobal_harmony_array_grouping() { - if (!FLAG_harmony_array_grouping) return; + if (!v8_flags.harmony_array_grouping) return; Handle<JSFunction> array_function(native_context()->array_function(), isolate()); @@ -4783,8 +4813,8 @@ void Genesis::InitializeGlobal_harmony_array_grouping() { } void Genesis::InitializeGlobal_harmony_sharedarraybuffer() { - if (!FLAG_harmony_sharedarraybuffer || - FLAG_enable_sharedarraybuffer_per_context) { + if (!v8_flags.harmony_sharedarraybuffer || + v8_flags.enable_sharedarraybuffer_per_context) { return; } @@ -4795,7 +4825,7 @@ void Genesis::InitializeGlobal_harmony_sharedarraybuffer() { } void Genesis::InitializeGlobal_harmony_atomics() { - if (!FLAG_harmony_atomics) return; + if (!v8_flags.harmony_atomics) return; Handle<JSGlobalObject> global(native_context()->global_object(), isolate()); @@ -4805,7 +4835,7 @@ void Genesis::InitializeGlobal_harmony_atomics() { } void Genesis::InitializeGlobal_harmony_weak_refs_with_cleanup_some() { - if (!FLAG_harmony_weak_refs_with_cleanup_some) return; + if (!v8_flags.harmony_weak_refs_with_cleanup_some) return; Handle<JSFunction> finalization_registry_fun = isolate()->js_finalization_registry_fun(); @@ -4820,7 +4850,7 @@ void Genesis::InitializeGlobal_harmony_weak_refs_with_cleanup_some() { } void Genesis::InitializeGlobal_regexp_linear_flag() { - if (!FLAG_enable_experimental_regexp_engine) return; + if (!v8_flags.enable_experimental_regexp_engine) return; Handle<JSFunction> regexp_fun(native_context()->regexp_function(), isolate()); Handle<JSObject> regexp_prototype( @@ -4834,7 +4864,7 @@ void Genesis::InitializeGlobal_regexp_linear_flag() { } void Genesis::InitializeGlobal_harmony_rab_gsab() { - if (!FLAG_harmony_rab_gsab) return; + if (!v8_flags.harmony_rab_gsab) return; Handle<JSObject> array_buffer_prototype( JSObject::cast(native_context()->array_buffer_fun().instance_prototype()), isolate()); @@ -4865,7 +4895,7 @@ void Genesis::InitializeGlobal_harmony_rab_gsab() { } void Genesis::InitializeGlobal_harmony_temporal() { - if (!FLAG_harmony_temporal) return; + if (!v8_flags.harmony_temporal) return; // -- T e m p o r a l // #sec-temporal-objects Handle<JSObject> temporal = @@ -5508,7 +5538,7 @@ void Genesis::InitializeGlobal_harmony_temporal() { #ifdef V8_INTL_SUPPORT void Genesis::InitializeGlobal_harmony_intl_number_format_v3() { - if (!FLAG_harmony_intl_number_format_v3) return; + if (!v8_flags.harmony_intl_number_format_v3) return; { Handle<JSFunction> number_format_constructor = @@ -5538,7 +5568,7 @@ void Genesis::InitializeGlobal_harmony_intl_number_format_v3() { #endif // V8_INTL_SUPPORT void Genesis::InitializeGlobal_experimental_web_snapshots() { - if (!FLAG_experimental_web_snapshots) return; + if (!v8_flags.experimental_web_snapshots) return; Handle<JSGlobalObject> global(native_context()->global_object(), isolate()); Handle<JSObject> web_snapshot_object = @@ -5552,6 +5582,46 @@ void Genesis::InitializeGlobal_experimental_web_snapshots() { Builtin::kWebSnapshotDeserialize, 2, false); } +#ifdef V8_INTL_SUPPORT +void Genesis::InitializeGlobal_harmony_intl_duration_format() { + if (!FLAG_harmony_intl_duration_format) return; + Handle<JSObject> intl = Handle<JSObject>::cast( + JSReceiver::GetProperty( + isolate(), + Handle<JSReceiver>(native_context()->global_object(), isolate()), + factory()->InternalizeUtf8String("Intl")) + .ToHandleChecked()); + + Handle<JSFunction> duration_format_fun = InstallFunction( + isolate(), intl, "DurationFormat", JS_DURATION_FORMAT_TYPE, + JSDurationFormat::kHeaderSize, 0, factory()->the_hole_value(), + Builtin::kDurationFormatConstructor); + duration_format_fun->shared().set_length(0); + duration_format_fun->shared().DontAdaptArguments(); + InstallWithIntrinsicDefaultProto( + isolate(), duration_format_fun, + Context::INTL_DURATION_FORMAT_FUNCTION_INDEX); + + SimpleInstallFunction(isolate(), duration_format_fun, "supportedLocalesOf", + Builtin::kDurationFormatSupportedLocalesOf, 1, false); + + Handle<JSObject> prototype( + JSObject::cast(duration_format_fun->instance_prototype()), isolate()); + + InstallToStringTag(isolate(), prototype, "Intl.DurationFormat"); + + SimpleInstallFunction(isolate(), prototype, "resolvedOptions", + Builtin::kDurationFormatPrototypeResolvedOptions, 0, + false); + + SimpleInstallFunction(isolate(), prototype, "format", + Builtin::kDurationFormatPrototypeFormat, 1, false); + SimpleInstallFunction(isolate(), prototype, "formatToParts", + Builtin::kDurationFormatPrototypeFormatToParts, 1, + false); +} +#endif // V8_INTL_SUPPORT + Handle<JSFunction> Genesis::CreateArrayBuffer( Handle<String> name, ArrayBufferKind array_buffer_kind) { // Create the %ArrayBufferPrototype% @@ -5791,6 +5861,58 @@ bool Genesis::InstallABunchOfRandomThings() { native_context()->set_data_property_descriptor_map(*map); } + { + // -- TemplateLiteral JSArray Map + Handle<JSFunction> array_function(native_context()->array_function(), + isolate()); + Handle<Map> template_map(array_function->initial_map(), isolate_); + template_map = Map::CopyAsElementsKind(isolate_, template_map, + PACKED_ELEMENTS, OMIT_TRANSITION); + template_map->set_instance_size(template_map->instance_size() + + kTaggedSize); + // Temporarily instantiate full template_literal_object to get the final + // map. + auto template_object = + Handle<JSArray>::cast(factory()->NewJSObjectFromMap(template_map)); + { + DisallowGarbageCollection no_gc; + JSArray raw = *template_object; + raw.set_elements(ReadOnlyRoots(isolate()).empty_fixed_array()); + raw.set_length(Smi::FromInt(0)); + } + + // Install a "raw" data property for {raw_object} on {template_object}. + // See ES#sec-gettemplateobject. + PropertyDescriptor raw_desc; + // Use arbrirary object {template_object} as ".raw" value. + raw_desc.set_value(template_object); + raw_desc.set_configurable(false); + raw_desc.set_enumerable(false); + raw_desc.set_writable(false); + JSArray::DefineOwnProperty(isolate(), template_object, + factory()->raw_string(), &raw_desc, + Just(kThrowOnError)) + .ToChecked(); + + // Freeze the {template_object} as well. + JSObject::SetIntegrityLevel(template_object, FROZEN, kThrowOnError) + .ToChecked(); + { + DisallowGarbageCollection no_gc; + // Verify TemplateLiteralObject::kRawFieldOffset + DescriptorArray desc = template_object->map().instance_descriptors(); + InternalIndex descriptor_index = + desc.Search(*factory()->raw_string(), desc.number_of_descriptors()); + FieldIndex index = + FieldIndex::ForDescriptor(template_object->map(), descriptor_index); + CHECK(index.is_inobject()); + CHECK_EQ(index.offset(), TemplateLiteralObject::kRawFieldOffset); + } + + native_context()->set_js_array_template_literal_object_map( + template_object->map()); + } + // Create a constructor for RegExp results (a variant of Array that // predefines the properties index, input, and groups). { @@ -6000,15 +6122,16 @@ bool Genesis::InstallSpecialObjects(Isolate* isolate, Handle<JSObject> Error = isolate->error_function(); Handle<String> name = isolate->factory()->stackTraceLimit_string(); - Handle<Smi> stack_trace_limit(Smi::FromInt(FLAG_stack_trace_limit), isolate); + Handle<Smi> stack_trace_limit(Smi::FromInt(v8_flags.stack_trace_limit), + isolate); JSObject::AddProperty(isolate, Error, name, stack_trace_limit, NONE); #if V8_ENABLE_WEBASSEMBLY - if (FLAG_expose_wasm) { + if (v8_flags.expose_wasm) { // Install the internal data structures into the isolate and expose on // the global object. WasmJs::Install(isolate, true); - } else if (FLAG_validate_asm) { + } else if (v8_flags.validate_asm) { // Install the internal data structures only; these are needed for asm.js // translated to Wasm to work correctly. WasmJs::Install(isolate, false); @@ -6051,21 +6174,22 @@ bool Genesis::InstallExtensions(Isolate* isolate, v8::ExtensionConfiguration* extensions) { ExtensionStates extension_states; // All extensions have state UNVISITED. return InstallAutoExtensions(isolate, &extension_states) && - (!FLAG_expose_gc || + (!v8_flags.expose_gc || InstallExtension(isolate, "v8/gc", &extension_states)) && - (!FLAG_expose_externalize_string || + (!v8_flags.expose_externalize_string || InstallExtension(isolate, "v8/externalize", &extension_states)) && - (!(FLAG_expose_statistics || TracingFlags::is_gc_stats_enabled()) || + (!(v8_flags.expose_statistics || + TracingFlags::is_gc_stats_enabled()) || InstallExtension(isolate, "v8/statistics", &extension_states)) && - (!FLAG_expose_trigger_failure || + (!v8_flags.expose_trigger_failure || InstallExtension(isolate, "v8/trigger-failure", &extension_states)) && - (!FLAG_expose_ignition_statistics || + (!v8_flags.expose_ignition_statistics || InstallExtension(isolate, "v8/ignition-statistics", &extension_states)) && (!isValidCpuTraceMarkFunctionName() || InstallExtension(isolate, "v8/cpumark", &extension_states)) && #ifdef ENABLE_VTUNE_TRACEMARK - (!FLAG_enable_vtune_domain_support || + (!v8_flags.enable_vtune_domain_support || InstallExtension(isolate, "v8/vtunedomain", &extension_states)) && #endif // ENABLE_VTUNE_TRACEMARK InstallRequestedExtensions(isolate, extensions, &extension_states); @@ -6465,7 +6589,7 @@ Genesis::Genesis( DCHECK(native_context().is_null()); base::ElapsedTimer timer; - if (FLAG_profile_deserialization) timer.Start(); + if (v8_flags.profile_deserialization) timer.Start(); DCHECK_EQ(0u, context_snapshot_index); // We get here if there was no context snapshot. CreateRoots(); @@ -6488,7 +6612,7 @@ Genesis::Genesis( if (!InstallExtrasBindings()) return; if (!ConfigureGlobalObject(global_proxy_template)) return; - if (FLAG_profile_deserialization) { + if (v8_flags.profile_deserialization) { double ms = timer.Elapsed().InMillisecondsF(); PrintF("[Initializing context from scratch took %0.3f ms]\n", ms); } @@ -6515,7 +6639,7 @@ Genesis::Genesis( string_function_prototype.map()); } - if (FLAG_disallow_code_generation_from_strings) { + if (v8_flags.disallow_code_generation_from_strings) { native_context()->set_allow_code_gen_from_strings( ReadOnlyRoots(isolate).false_value()); } diff --git a/deps/v8/src/init/heap-symbols.h b/deps/v8/src/init/heap-symbols.h index edc998f27ee3fa..7443a5c0e7be9a 100644 --- a/deps/v8/src/init/heap-symbols.h +++ b/deps/v8/src/init/heap-symbols.h @@ -27,8 +27,10 @@ V(_, dateStyle_string, "dateStyle") \ V(_, dateTimeField_string, "dateTimeField") \ V(_, dayPeriod_string, "dayPeriod") \ + V(_, daysDisplay_string, "daysDisplay") \ V(_, decimal_string, "decimal") \ V(_, dialect_string, "dialect") \ + V(_, digital_string, "digital") \ V(_, direction_string, "direction") \ V(_, endRange_string, "endRange") \ V(_, engineering_string, "engineering") \ @@ -43,6 +45,7 @@ V(_, floor_string, "floor") \ V(_, format_string, "format") \ V(_, fraction_string, "fraction") \ + V(_, fractionalDigits_string, "fractionalDigits") \ V(_, fractionalSecond_string, "fractionalSecond") \ V(_, full_string, "full") \ V(_, granularity_string, "granularity") \ @@ -60,6 +63,7 @@ V(_, hour12_string, "hour12") \ V(_, hourCycle_string, "hourCycle") \ V(_, hourCycles_string, "hourCycles") \ + V(_, hoursDisplay_string, "hoursDisplay") \ V(_, ideo_string, "ideo") \ V(_, ignorePunctuation_string, "ignorePunctuation") \ V(_, Invalid_Date_string, "Invalid Date") \ @@ -78,6 +82,8 @@ V(_, ltr_string, "ltr") \ V(_, maximumFractionDigits_string, "maximumFractionDigits") \ V(_, maximumSignificantDigits_string, "maximumSignificantDigits") \ + V(_, microsecondsDisplay_string, "microsecondsDisplay") \ + V(_, millisecondsDisplay_string, "millisecondsDisplay") \ V(_, min2_string, "min2") \ V(_, minimalDays_string, "minimalDays") \ V(_, minimumFractionDigits_string, "minimumFractionDigits") \ @@ -85,8 +91,11 @@ V(_, minimumSignificantDigits_string, "minimumSignificantDigits") \ V(_, minus_0, "-0") \ V(_, minusSign_string, "minusSign") \ + V(_, minutesDisplay_string, "minutesDisplay") \ + V(_, monthsDisplay_string, "monthsDisplay") \ V(_, morePrecision_string, "morePrecision") \ V(_, nan_string, "nan") \ + V(_, nanosecondsDisplay_string, "nanosecondsDisplay") \ V(_, narrowSymbol_string, "narrowSymbol") \ V(_, negative_string, "negative") \ V(_, never_string, "never") \ @@ -106,6 +115,7 @@ V(_, roundingPriority_string, "roundingPriority") \ V(_, rtl_string, "rtl") \ V(_, scientific_string, "scientific") \ + V(_, secondsDisplay_string, "secondsDisplay") \ V(_, segment_string, "segment") \ V(_, SegmentIterator_string, "Segment Iterator") \ V(_, Segments_string, "Segments") \ @@ -125,6 +135,7 @@ V(_, timeZoneName_string, "timeZoneName") \ V(_, trailingZeroDisplay_string, "trailingZeroDisplay") \ V(_, trunc_string, "trunc") \ + V(_, two_digit_string, "2-digit") \ V(_, type_string, "type") \ V(_, unknown_string, "unknown") \ V(_, upper_string, "upper") \ @@ -133,8 +144,10 @@ V(_, unitDisplay_string, "unitDisplay") \ V(_, weekday_string, "weekday") \ V(_, weekend_string, "weekend") \ + V(_, weeksDisplay_string, "weeksDisplay") \ V(_, weekInfo_string, "weekInfo") \ - V(_, yearName_string, "yearName") + V(_, yearName_string, "yearName") \ + V(_, yearsDisplay_string, "yearsDisplay") #else // V8_INTL_SUPPORT #define INTERNALIZED_STRING_LIST_GENERATOR_INTL(V, _) #endif // V8_INTL_SUPPORT @@ -359,6 +372,7 @@ V(_, Proxy_string, "Proxy") \ V(_, query_colon_string, "(?:)") \ V(_, RangeError_string, "RangeError") \ + V(_, raw_json_string, "rawJSON") \ V(_, raw_string, "raw") \ V(_, ReferenceError_string, "ReferenceError") \ V(_, ReflectGet_string, "Reflect.get") \ @@ -528,6 +542,8 @@ F(MC_INCREMENTAL_START) \ F(MC_INCREMENTAL_SWEEPING) +#define MINOR_INCREMENTAL_SCOPES(F) F(MINOR_MC_INCREMENTAL_START) + #define TOP_MC_SCOPES(F) \ F(MC_CLEAR) \ F(MC_EPILOGUE) \ @@ -537,8 +553,16 @@ F(MC_PROLOGUE) \ F(MC_SWEEP) +#define TOP_MINOR_MC_SCOPES(F) \ + F(MINOR_MC_CLEAR) \ + F(MINOR_MC_EVACUATE) \ + F(MINOR_MC_FINISH) \ + F(MINOR_MC_MARK) \ + F(MINOR_MC_SWEEP) + #define TRACER_SCOPES(F) \ INCREMENTAL_SCOPES(F) \ + MINOR_INCREMENTAL_SCOPES(F) \ F(HEAP_EMBEDDER_TRACING_EPILOGUE) \ F(HEAP_EPILOGUE) \ F(HEAP_EPILOGUE_REDUCE_NEW_SPACE) \ @@ -580,8 +604,6 @@ F(MC_EVACUATE_UPDATE_POINTERS_SLOTS_MAIN) \ F(MC_EVACUATE_UPDATE_POINTERS_TO_NEW_ROOTS) \ F(MC_EVACUATE_UPDATE_POINTERS_WEAK) \ - F(MC_FINISH_SWEEP_NEW_LO) \ - F(MC_FINISH_SWEEP_NEW) \ F(MC_FINISH_SWEEP_ARRAY_BUFFERS) \ F(MC_MARK_CLIENT_HEAPS) \ F(MC_MARK_EMBEDDER_PROLOGUE) \ @@ -596,17 +618,18 @@ F(MC_MARK_WEAK_CLOSURE_EPHEMERON_LINEAR) \ F(MC_SWEEP_CODE) \ F(MC_SWEEP_CODE_LO) \ + F(MC_SWEEP_FINISH_NEW) \ F(MC_SWEEP_LO) \ F(MC_SWEEP_MAP) \ F(MC_SWEEP_NEW) \ + F(MC_SWEEP_NEW_LO) \ F(MC_SWEEP_OLD) \ + F(MC_SWEEP_SHARED) \ F(MINOR_MARK_COMPACTOR) \ F(MINOR_MC) \ - F(MINOR_MC_CLEAR) \ + TOP_MINOR_MC_SCOPES(F) \ F(MINOR_MC_CLEAR_STRING_TABLE) \ - F(MINOR_MC_CLEAR_WEAK_LISTS) \ F(MINOR_MC_COMPLETE_SWEEP_ARRAY_BUFFERS) \ - F(MINOR_MC_EVACUATE) \ F(MINOR_MC_EVACUATE_CLEAN_UP) \ F(MINOR_MC_EVACUATE_COPY) \ F(MINOR_MC_EVACUATE_COPY_PARALLEL) \ @@ -617,19 +640,17 @@ F(MINOR_MC_EVACUATE_UPDATE_POINTERS_PARALLEL) \ F(MINOR_MC_EVACUATE_UPDATE_POINTERS_SLOTS) \ F(MINOR_MC_EVACUATE_UPDATE_POINTERS_WEAK) \ - F(MINOR_MC_FINISH) \ F(MINOR_MC_FINISH_SWEEP_ARRAY_BUFFERS) \ - F(MINOR_MC_FINISH_SWEEP_NEW) \ - F(MINOR_MC_MARK) \ F(MINOR_MC_MARK_GLOBAL_HANDLES) \ + F(MINOR_MC_MARK_FINISH_INCREMENTAL) \ F(MINOR_MC_MARK_PARALLEL) \ F(MINOR_MC_MARK_SEED) \ F(MINOR_MC_MARK_ROOTS) \ - F(MINOR_MC_MARK_WEAK) \ - F(MINOR_MC_MARKING_DEQUE) \ - F(MINOR_MC_RESET_LIVENESS) \ - F(MINOR_MC_SWEEP) \ + F(MINOR_MC_MARK_CLOSURE_PARALLEL) \ + F(MINOR_MC_MARK_CLOSURE) \ F(MINOR_MC_SWEEP_NEW) \ + F(MINOR_MC_SWEEP_NEW_LO) \ + F(MINOR_MC_SWEEP_FINISH_NEW) \ F(SAFEPOINT) \ F(SCAVENGER) \ F(SCAVENGER_COMPLETE_SWEEP_ARRAY_BUFFERS) \ @@ -664,6 +685,7 @@ F(MINOR_MC_BACKGROUND_EVACUATE_COPY) \ F(MINOR_MC_BACKGROUND_EVACUATE_UPDATE_POINTERS) \ F(MINOR_MC_BACKGROUND_MARKING) \ + F(MINOR_MC_BACKGROUND_SWEEPING) \ F(SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL) #define TRACER_YOUNG_EPOCH_SCOPES(F) \ @@ -673,6 +695,7 @@ F(MINOR_MC_BACKGROUND_EVACUATE_COPY) \ F(MINOR_MC_BACKGROUND_EVACUATE_UPDATE_POINTERS) \ F(MINOR_MC_BACKGROUND_MARKING) \ + F(MINOR_MC_BACKGROUND_SWEEPING) \ F(SCAVENGER) \ F(SCAVENGER_BACKGROUND_SCAVENGE_PARALLEL) \ F(SCAVENGER_COMPLETE_SWEEP_ARRAY_BUFFERS) diff --git a/deps/v8/src/init/v8.cc b/deps/v8/src/init/v8.cc index 13c2244df23571..b6f68c6f17fa64 100644 --- a/deps/v8/src/init/v8.cc +++ b/deps/v8/src/init/v8.cc @@ -99,7 +99,7 @@ void V8::InitializePlatform(v8::Platform* platform) { v8::base::SetPrintStackTrace(platform_->GetStackTracePrinter()); v8::tracing::TracingCategoryObserver::SetUp(); #if defined(V8_OS_WIN) && defined(V8_ENABLE_ETW_STACK_WALKING) - if (FLAG_enable_etw_stack_walking) { + if (v8_flags.enable_etw_stack_walking) { // TODO(sartang@microsoft.com): Move to platform specific diagnostics object v8::internal::ETWJITInterface::Register(); } @@ -114,10 +114,10 @@ void V8::InitializePlatform(v8::Platform* platform) { } #define DISABLE_FLAG(flag) \ - if (FLAG_##flag) { \ + if (v8_flags.flag) { \ PrintF(stderr, \ "Warning: disabling flag --" #flag " due to conflicting flags\n"); \ - FLAG_##flag = false; \ + v8_flags.flag = false; \ } void V8::Initialize() { @@ -125,52 +125,50 @@ void V8::Initialize() { CHECK(platform_); // Update logging information before enforcing flag implications. - FlagValue<bool>* log_all_flags[] = {&FLAG_log_all, - &FLAG_log_code, - &FLAG_log_code_disassemble, - &FLAG_log_source_code, - &FLAG_log_source_position, - &FLAG_log_feedback_vector, + FlagValue<bool>* log_all_flags[] = {&v8_flags.log_all, + &v8_flags.log_code, + &v8_flags.log_code_disassemble, + &v8_flags.log_source_code, + &v8_flags.log_source_position, + &v8_flags.log_feedback_vector, &v8_flags.log_function_events, - &FLAG_log_internal_timer_events, - &FLAG_log_deopt, - &FLAG_log_ic, - &FLAG_log_maps}; - if (FLAG_log_all) { + &v8_flags.log_internal_timer_events, + &v8_flags.log_deopt, + &v8_flags.log_ic, + &v8_flags.log_maps}; + if (v8_flags.log_all) { // Enable all logging flags for (auto* flag : log_all_flags) { *flag = true; } - FLAG_log = true; - } else if (!FLAG_log) { + v8_flags.log = true; + } else if (!v8_flags.log) { // Enable --log if any log flag is set. for (const auto* flag : log_all_flags) { if (!*flag) continue; - FLAG_log = true; + v8_flags.log = true; break; } // Profiling flags depend on logging. - FLAG_log = FLAG_log || FLAG_perf_prof || FLAG_perf_basic_prof || - FLAG_ll_prof || FLAG_prof || FLAG_prof_cpp || FLAG_gdbjit; -#if defined(V8_OS_WIN) && defined(V8_ENABLE_ETW_STACK_WALKING) - FLAG_log = FLAG_log || FLAG_enable_etw_stack_walking; -#endif + v8_flags.log = v8_flags.log || v8_flags.perf_prof || + v8_flags.perf_basic_prof || v8_flags.ll_prof || + v8_flags.prof || v8_flags.prof_cpp || v8_flags.gdbjit; } FlagList::EnforceFlagImplications(); - if (FLAG_predictable && FLAG_random_seed == 0) { + if (v8_flags.predictable && v8_flags.random_seed == 0) { // Avoid random seeds in predictable mode. - FLAG_random_seed = 12347; + v8_flags.random_seed = 12347; } - if (FLAG_stress_compaction) { - FLAG_force_marking_deque_overflows = true; - FLAG_gc_global = true; - FLAG_max_semi_space_size = 1; + if (v8_flags.stress_compaction) { + v8_flags.force_marking_deque_overflows = true; + v8_flags.gc_global = true; + v8_flags.max_semi_space_size = 1; } - if (FLAG_trace_turbo) { + if (v8_flags.trace_turbo) { // Create an empty file shared by the process (e.g. the wasm engine). std::ofstream(Isolate::GetTurboCfgFileName(nullptr).c_str(), std::ios_base::trunc); @@ -187,7 +185,7 @@ void V8::Initialize() { // TODO(jgruber): Remove this once / if wasm can run without executable // memory. #if V8_ENABLE_WEBASSEMBLY - if (FLAG_jitless && !FLAG_correctness_fuzzer_suppressions) { + if (v8_flags.jitless && !v8_flags.correctness_fuzzer_suppressions) { DISABLE_FLAG(expose_wasm); } #endif @@ -197,7 +195,7 @@ void V8::Initialize() { // leads to false positives on TSAN bots. // TODO(chromium:1205289): Teach relevant fuzzers to not pass TF tracing // flags instead, and remove this section. - if (FLAG_fuzzing && FLAG_concurrent_recompilation) { + if (v8_flags.fuzzing && v8_flags.concurrent_recompilation) { DISABLE_FLAG(trace_turbo); DISABLE_FLAG(trace_turbo_graph); DISABLE_FLAG(trace_turbo_scheduled); @@ -215,16 +213,16 @@ void V8::Initialize() { // The --jitless and --interpreted-frames-native-stack flags are incompatible // since the latter requires code generation while the former prohibits code // generation. - CHECK(!FLAG_interpreted_frames_native_stack || !FLAG_jitless); + CHECK(!v8_flags.interpreted_frames_native_stack || !v8_flags.jitless); - base::OS::Initialize(FLAG_hard_abort, FLAG_gc_fake_mmap); + base::OS::Initialize(v8_flags.hard_abort, v8_flags.gc_fake_mmap); - if (FLAG_random_seed) { - GetPlatformPageAllocator()->SetRandomMmapSeed(FLAG_random_seed); - GetPlatformVirtualAddressSpace()->SetRandomSeed(FLAG_random_seed); + if (v8_flags.random_seed) { + GetPlatformPageAllocator()->SetRandomMmapSeed(v8_flags.random_seed); + GetPlatformVirtualAddressSpace()->SetRandomSeed(v8_flags.random_seed); } - if (FLAG_print_flag_values) FlagList::PrintValues(); + if (v8_flags.print_flag_values) FlagList::PrintValues(); // Initialize the default FlagList::Hash. FlagList::Hash(); @@ -232,7 +230,7 @@ void V8::Initialize() { // Before initializing internals, freeze the flags such that further changes // are not allowed. Global initialization of the Isolate or the WasmEngine // already reads flags, so they should not be changed afterwards. - if (FLAG_freeze_flags_after_init) FlagList::FreezeFlags(); + if (v8_flags.freeze_flags_after_init) FlagList::FreezeFlags(); #if defined(V8_ENABLE_SANDBOX) // If enabled, the sandbox must be initialized first. @@ -291,7 +289,7 @@ void V8::DisposePlatform() { AdvanceStartupState(V8StartupState::kPlatformDisposing); CHECK(platform_); #if defined(V8_OS_WIN) && defined(V8_ENABLE_ETW_STACK_WALKING) - if (FLAG_enable_etw_stack_walking) { + if (v8_flags.enable_etw_stack_walking) { v8::internal::ETWJITInterface::Unregister(); } #endif diff --git a/deps/v8/src/inspector/injected-script.cc b/deps/v8/src/inspector/injected-script.cc index c86d658d22dd50..3a917918814124 100644 --- a/deps/v8/src/inspector/injected-script.cc +++ b/deps/v8/src/inspector/injected-script.cc @@ -68,22 +68,39 @@ using protocol::Runtime::PrivatePropertyDescriptor; using protocol::Runtime::PropertyDescriptor; using protocol::Runtime::RemoteObject; +// static +void EvaluateCallback::sendSuccess( + std::weak_ptr<EvaluateCallback> callback, InjectedScript* injectedScript, + std::unique_ptr<protocol::Runtime::RemoteObject> result, + protocol::Maybe<protocol::Runtime::ExceptionDetails> exceptionDetails) { + std::shared_ptr<EvaluateCallback> cb = callback.lock(); + if (!cb) return; + injectedScript->deleteEvaluateCallback(cb); + CHECK_EQ(cb.use_count(), 1); + cb->sendSuccess(std::move(result), std::move(exceptionDetails)); +} + +// static +void EvaluateCallback::sendFailure(std::weak_ptr<EvaluateCallback> callback, + InjectedScript* injectedScript, + const protocol::DispatchResponse& response) { + std::shared_ptr<EvaluateCallback> cb = callback.lock(); + if (!cb) return; + injectedScript->deleteEvaluateCallback(cb); + CHECK_EQ(cb.use_count(), 1); + cb->sendFailure(response); +} + class InjectedScript::ProtocolPromiseHandler { public: - static bool add(V8InspectorSessionImpl* session, + static void add(V8InspectorSessionImpl* session, v8::Local<v8::Context> context, v8::Local<v8::Value> value, int executionContextId, const String16& objectGroup, WrapMode wrapMode, bool replMode, bool throwOnSideEffect, - EvaluateCallback* callback) { - v8::Local<v8::Promise::Resolver> resolver; - if (!v8::Promise::Resolver::New(context).ToLocal(&resolver)) { - callback->sendFailure(Response::InternalError()); - return false; - } - if (!resolver->Resolve(context, value).FromMaybe(false)) { - callback->sendFailure(Response::InternalError()); - return false; - } + std::weak_ptr<EvaluateCallback> callback) { + InjectedScript::ContextScope scope(session, executionContextId); + Response response = scope.initialize(); + if (!response.IsSuccess()) return; v8::MaybeLocal<v8::Promise> originalPromise = value->IsPromise() ? value.As<v8::Promise>() @@ -101,13 +118,36 @@ class InjectedScript::ProtocolPromiseHandler { v8::Function::New(context, catchCallback, wrapper, 0, v8::ConstructorBehavior::kThrow) .ToLocalChecked(); - v8::Local<v8::Promise> promise = resolver->GetPromise(); + + v8::Local<v8::Promise> promise; + v8::Local<v8::Promise::Resolver> resolver; + if (value->IsPromise()) { + // If value is a promise, we can chain the handlers directly onto `value`. + promise = value.As<v8::Promise>(); + } else { + // Otherwise we do `Promise.resolve(value)`. + CHECK(!replMode); + if (!v8::Promise::Resolver::New(context).ToLocal(&resolver)) { + EvaluateCallback::sendFailure(callback, scope.injectedScript(), + Response::InternalError()); + return; + } + if (!resolver->Resolve(context, value).FromMaybe(false)) { + EvaluateCallback::sendFailure(callback, scope.injectedScript(), + Response::InternalError()); + return; + } + promise = resolver->GetPromise(); + } + if (promise->Then(context, thenCallbackFunction, catchCallbackFunction) .IsEmpty()) { - callback->sendFailure(Response::InternalError()); - return false; + // Re-initialize after returning from JS. + Response response = scope.initialize(); + if (!response.IsSuccess()) return; + EvaluateCallback::sendFailure(callback, scope.injectedScript(), + Response::InternalError()); } - return true; } private: @@ -143,7 +183,8 @@ class InjectedScript::ProtocolPromiseHandler { ProtocolPromiseHandler(V8InspectorSessionImpl* session, int executionContextId, const String16& objectGroup, WrapMode wrapMode, bool replMode, - bool throwOnSideEffect, EvaluateCallback* callback, + bool throwOnSideEffect, + std::weak_ptr<EvaluateCallback> callback, v8::MaybeLocal<v8::Promise> maybeEvaluationResult) : m_inspector(session->inspector()), m_sessionId(session->sessionId()), @@ -184,24 +225,22 @@ class InjectedScript::ProtocolPromiseHandler { Response response = scope.initialize(); if (!response.IsSuccess()) return; - std::unique_ptr<EvaluateCallback> callback = - scope.injectedScript()->takeEvaluateCallback(m_callback); - if (!callback) return; - // In REPL mode the result is additionally wrapped in an object. // The evaluation result can be found at ".repl_result". v8::Local<v8::Value> result = value; if (m_replMode) { v8::Local<v8::Object> object; if (!result->ToObject(scope.context()).ToLocal(&object)) { - callback->sendFailure(response); + EvaluateCallback::sendFailure(m_callback, scope.injectedScript(), + response); return; } v8::Local<v8::String> name = GetDotReplResultString(m_inspector->isolate()); if (!object->Get(scope.context(), name).ToLocal(&result)) { - callback->sendFailure(response); + EvaluateCallback::sendFailure(m_callback, scope.injectedScript(), + response); return; } } @@ -214,11 +253,13 @@ class InjectedScript::ProtocolPromiseHandler { response = scope.injectedScript()->wrapObject(result, m_objectGroup, m_wrapMode, &wrappedValue); if (!response.IsSuccess()) { - callback->sendFailure(response); + EvaluateCallback::sendFailure(m_callback, scope.injectedScript(), + response); return; } - callback->sendSuccess(std::move(wrappedValue), - Maybe<protocol::Runtime::ExceptionDetails>()); + EvaluateCallback::sendSuccess(m_callback, scope.injectedScript(), + std::move(wrappedValue), + Maybe<protocol::Runtime::ExceptionDetails>()); } void catchCallback(v8::Local<v8::Value> result) { @@ -228,14 +269,12 @@ class InjectedScript::ProtocolPromiseHandler { InjectedScript::ContextScope scope(session, m_executionContextId); Response response = scope.initialize(); if (!response.IsSuccess()) return; - std::unique_ptr<EvaluateCallback> callback = - scope.injectedScript()->takeEvaluateCallback(m_callback); - if (!callback) return; std::unique_ptr<protocol::Runtime::RemoteObject> wrappedValue; response = scope.injectedScript()->wrapObject(result, m_objectGroup, m_wrapMode, &wrappedValue); if (!response.IsSuccess()) { - callback->sendFailure(response); + EvaluateCallback::sendFailure(m_callback, scope.injectedScript(), + response); return; } v8::Isolate* isolate = session->inspector()->isolate(); @@ -258,12 +297,14 @@ class InjectedScript::ProtocolPromiseHandler { response = scope.injectedScript()->createExceptionDetails( message, exception, m_objectGroup, &exceptionDetails); if (!response.IsSuccess()) { - callback->sendFailure(response); + EvaluateCallback::sendFailure(m_callback, scope.injectedScript(), + response); return; } - callback->sendSuccess(std::move(wrappedValue), - std::move(exceptionDetails)); + EvaluateCallback::sendSuccess(m_callback, scope.injectedScript(), + std::move(wrappedValue), + std::move(exceptionDetails)); return; } @@ -304,7 +345,8 @@ class InjectedScript::ProtocolPromiseHandler { response = scope.injectedScript()->addExceptionToDetails( result, exceptionDetails.get(), m_objectGroup); if (!response.IsSuccess()) { - callback->sendFailure(response); + EvaluateCallback::sendFailure(m_callback, scope.injectedScript(), + response); return; } if (stack) @@ -313,7 +355,9 @@ class InjectedScript::ProtocolPromiseHandler { if (stack && !stack->isEmpty()) exceptionDetails->setScriptId( String16::fromInteger(stack->topScriptId())); - callback->sendSuccess(std::move(wrappedValue), std::move(exceptionDetails)); + EvaluateCallback::sendSuccess(m_callback, scope.injectedScript(), + std::move(wrappedValue), + std::move(exceptionDetails)); } void sendPromiseCollected() { @@ -323,10 +367,9 @@ class InjectedScript::ProtocolPromiseHandler { InjectedScript::ContextScope scope(session, m_executionContextId); Response response = scope.initialize(); if (!response.IsSuccess()) return; - std::unique_ptr<EvaluateCallback> callback = - scope.injectedScript()->takeEvaluateCallback(m_callback); - if (!callback) return; - callback->sendFailure(Response::ServerError("Promise was collected")); + EvaluateCallback::sendFailure( + m_callback, scope.injectedScript(), + Response::ServerError("Promise was collected")); } V8InspectorImpl* m_inspector; @@ -337,7 +380,7 @@ class InjectedScript::ProtocolPromiseHandler { WrapMode m_wrapMode; bool m_replMode; bool m_throwOnSideEffect; - EvaluateCallback* m_callback; + std::weak_ptr<EvaluateCallback> m_callback; v8::Global<v8::External> m_wrapper; v8::Global<v8::Promise> m_evaluationResult; }; @@ -655,37 +698,46 @@ std::unique_ptr<protocol::Runtime::RemoteObject> InjectedScript::wrapTable( void InjectedScript::addPromiseCallback( V8InspectorSessionImpl* session, v8::MaybeLocal<v8::Value> value, const String16& objectGroup, WrapMode wrapMode, bool replMode, - bool throwOnSideEffect, std::unique_ptr<EvaluateCallback> callback) { + bool throwOnSideEffect, std::shared_ptr<EvaluateCallback> callback) { + m_evaluateCallbacks.insert(callback); + // After stashing the shared_ptr in `m_evaluateCallback`, we reset `callback`. + // `ProtocolPromiseHandler:add` can take longer than the life time of this + // `InjectedScript` and we don't want `callback` to survive that. + std::weak_ptr<EvaluateCallback> weak_callback = callback; + callback.reset(); + CHECK_EQ(weak_callback.use_count(), 1); + if (value.IsEmpty()) { - callback->sendFailure(Response::InternalError()); + EvaluateCallback::sendFailure(weak_callback, this, + Response::InternalError()); return; } + v8::MicrotasksScope microtasksScope(m_context->isolate(), v8::MicrotasksScope::kRunMicrotasks); - if (ProtocolPromiseHandler::add( - session, m_context->context(), value.ToLocalChecked(), - m_context->contextId(), objectGroup, wrapMode, replMode, - throwOnSideEffect, callback.get())) { - m_evaluateCallbacks.insert(callback.release()); - } + ProtocolPromiseHandler::add(session, m_context->context(), + value.ToLocalChecked(), m_context->contextId(), + objectGroup, wrapMode, replMode, + throwOnSideEffect, weak_callback); + // Do not add any code here! `this` might be invalid. + // `ProtocolPromiseHandler::add` calls into JS which could kill this + // `InjectedScript`. } void InjectedScript::discardEvaluateCallbacks() { - for (auto& callback : m_evaluateCallbacks) { - callback->sendFailure( + while (!m_evaluateCallbacks.empty()) { + EvaluateCallback::sendFailure( + *m_evaluateCallbacks.begin(), this, Response::ServerError("Execution context was destroyed.")); - delete callback; } - m_evaluateCallbacks.clear(); + CHECK(m_evaluateCallbacks.empty()); } -std::unique_ptr<EvaluateCallback> InjectedScript::takeEvaluateCallback( - EvaluateCallback* callback) { +void InjectedScript::deleteEvaluateCallback( + std::shared_ptr<EvaluateCallback> callback) { auto it = m_evaluateCallbacks.find(callback); - if (it == m_evaluateCallbacks.end()) return nullptr; - std::unique_ptr<EvaluateCallback> value(*it); + CHECK_NE(it, m_evaluateCallbacks.end()); m_evaluateCallbacks.erase(it); - return value; } Response InjectedScript::findObject(const RemoteObjectId& objectId, diff --git a/deps/v8/src/inspector/injected-script.h b/deps/v8/src/inspector/injected-script.h index fd14bb40fbf65d..4f5bbf2823386f 100644 --- a/deps/v8/src/inspector/injected-script.h +++ b/deps/v8/src/inspector/injected-script.h @@ -58,12 +58,22 @@ using protocol::Response; class EvaluateCallback { public: + static void sendSuccess( + std::weak_ptr<EvaluateCallback> callback, InjectedScript* injectedScript, + std::unique_ptr<protocol::Runtime::RemoteObject> result, + protocol::Maybe<protocol::Runtime::ExceptionDetails> exceptionDetails); + static void sendFailure(std::weak_ptr<EvaluateCallback> callback, + InjectedScript* injectedScript, + const protocol::DispatchResponse& response); + + virtual ~EvaluateCallback() = default; + + private: virtual void sendSuccess( std::unique_ptr<protocol::Runtime::RemoteObject> result, protocol::Maybe<protocol::Runtime::ExceptionDetails> exceptionDetails) = 0; virtual void sendFailure(const protocol::DispatchResponse& response) = 0; - virtual ~EvaluateCallback() = default; }; class InjectedScript final { @@ -114,7 +124,7 @@ class InjectedScript final { v8::MaybeLocal<v8::Value> value, const String16& objectGroup, WrapMode wrapMode, bool replMode, bool throwOnSideEffect, - std::unique_ptr<EvaluateCallback> callback); + std::shared_ptr<EvaluateCallback> callback); Response findObject(const RemoteObjectId&, v8::Local<v8::Value>*) const; String16 objectGroupName(const RemoteObjectId&) const; @@ -221,6 +231,8 @@ class InjectedScript final { String16 bindObject(v8::Local<v8::Value>, const String16& groupName); private: + friend class EvaluateCallback; + v8::Local<v8::Object> commandLineAPI(); void unbindObject(int id); @@ -230,8 +242,7 @@ class InjectedScript final { class ProtocolPromiseHandler; void discardEvaluateCallbacks(); - std::unique_ptr<EvaluateCallback> takeEvaluateCallback( - EvaluateCallback* callback); + void deleteEvaluateCallback(std::shared_ptr<EvaluateCallback> callback); Response addExceptionToDetails( v8::Local<v8::Value> exception, protocol::Runtime::ExceptionDetails* exceptionDetails, @@ -245,7 +256,7 @@ class InjectedScript final { std::unordered_map<int, v8::Global<v8::Value>> m_idToWrappedObject; std::unordered_map<int, String16> m_idToObjectGroupName; std::unordered_map<String16, std::vector<int>> m_nameToObjectGroup; - std::unordered_set<EvaluateCallback*> m_evaluateCallbacks; + std::unordered_set<std::shared_ptr<EvaluateCallback>> m_evaluateCallbacks; bool m_customPreviewEnabled = false; }; diff --git a/deps/v8/src/inspector/v8-debugger.cc b/deps/v8/src/inspector/v8-debugger.cc index 01a8ce439c535e..00fc67465a34b2 100644 --- a/deps/v8/src/inspector/v8-debugger.cc +++ b/deps/v8/src/inspector/v8-debugger.cc @@ -786,6 +786,11 @@ v8::MaybeLocal<v8::Array> V8Debugger::internalProperties( toV8StringInternalized(m_isolate, "[[Entries]]")); createDataProperty(context, properties, properties->Length(), entries); } + + if (v8::debug::isExperimentalRemoveInternalScopesPropertyEnabled()) { + return properties; + } + if (value->IsGeneratorObject()) { v8::Local<v8::Value> scopes; if (generatorScopes(context, value).ToLocal(&scopes)) { diff --git a/deps/v8/src/inspector/v8-profiler-agent-impl.cc b/deps/v8/src/inspector/v8-profiler-agent-impl.cc index be6e8ab85c1b7a..4b9ad7c40a7994 100644 --- a/deps/v8/src/inspector/v8-profiler-agent-impl.cc +++ b/deps/v8/src/inspector/v8-profiler-agent-impl.cc @@ -28,7 +28,6 @@ static const char preciseCoverageCallCount[] = "preciseCoverageCallCount"; static const char preciseCoverageDetailed[] = "preciseCoverageDetailed"; static const char preciseCoverageAllowTriggeredUpdates[] = "preciseCoverageAllowTriggeredUpdates"; -static const char typeProfileStarted[] = "typeProfileStarted"; } // namespace ProfilerAgentState namespace { @@ -450,80 +449,6 @@ Response V8ProfilerAgentImpl::getBestEffortCoverage( return coverageToProtocol(m_session->inspector(), coverage, out_result); } -namespace { -std::unique_ptr<protocol::Array<protocol::Profiler::ScriptTypeProfile>> -typeProfileToProtocol(V8InspectorImpl* inspector, - const v8::debug::TypeProfile& type_profile) { - auto result = std::make_unique< - protocol::Array<protocol::Profiler::ScriptTypeProfile>>(); - v8::Isolate* isolate = inspector->isolate(); - for (size_t i = 0; i < type_profile.ScriptCount(); i++) { - v8::debug::TypeProfile::ScriptData script_data = - type_profile.GetScriptData(i); - v8::Local<v8::debug::Script> script = script_data.GetScript(); - auto entries = std::make_unique< - protocol::Array<protocol::Profiler::TypeProfileEntry>>(); - - for (const auto& entry : script_data.Entries()) { - auto types = - std::make_unique<protocol::Array<protocol::Profiler::TypeObject>>(); - for (const auto& type : entry.Types()) { - types->emplace_back( - protocol::Profiler::TypeObject::create() - .setName(toProtocolString( - isolate, type.FromMaybe(v8::Local<v8::String>()))) - .build()); - } - entries->emplace_back(protocol::Profiler::TypeProfileEntry::create() - .setOffset(entry.SourcePosition()) - .setTypes(std::move(types)) - .build()); - } - String16 url; - v8::Local<v8::String> name; - if (script->SourceURL().ToLocal(&name) && name->Length()) { - url = toProtocolString(isolate, name); - } else if (script->Name().ToLocal(&name) && name->Length()) { - url = resourceNameToUrl(inspector, name); - } - result->emplace_back(protocol::Profiler::ScriptTypeProfile::create() - .setScriptId(String16::fromInteger(script->Id())) - .setUrl(url) - .setEntries(std::move(entries)) - .build()); - } - return result; -} -} // anonymous namespace - -Response V8ProfilerAgentImpl::startTypeProfile() { - m_state->setBoolean(ProfilerAgentState::typeProfileStarted, true); - v8::debug::TypeProfile::SelectMode(m_isolate, - v8::debug::TypeProfileMode::kCollect); - return Response::Success(); -} - -Response V8ProfilerAgentImpl::stopTypeProfile() { - m_state->setBoolean(ProfilerAgentState::typeProfileStarted, false); - v8::debug::TypeProfile::SelectMode(m_isolate, - v8::debug::TypeProfileMode::kNone); - return Response::Success(); -} - -Response V8ProfilerAgentImpl::takeTypeProfile( - std::unique_ptr<protocol::Array<protocol::Profiler::ScriptTypeProfile>>* - out_result) { - if (!m_state->booleanProperty(ProfilerAgentState::typeProfileStarted, - false)) { - return Response::ServerError("Type profile has not been started."); - } - v8::HandleScope handle_scope(m_isolate); - v8::debug::TypeProfile type_profile = - v8::debug::TypeProfile::Collect(m_isolate); - *out_result = typeProfileToProtocol(m_session->inspector(), type_profile); - return Response::Success(); -} - String16 V8ProfilerAgentImpl::nextProfileId() { return String16::fromInteger( v8::base::Relaxed_AtomicIncrement(&s_lastProfileId, 1)); diff --git a/deps/v8/src/inspector/v8-profiler-agent-impl.h b/deps/v8/src/inspector/v8-profiler-agent-impl.h index 4fba6e6c704d69..6e76ff57ff9b9d 100644 --- a/deps/v8/src/inspector/v8-profiler-agent-impl.h +++ b/deps/v8/src/inspector/v8-profiler-agent-impl.h @@ -53,12 +53,6 @@ class V8ProfilerAgentImpl : public protocol::Profiler::Backend { std::unique_ptr<protocol::Array<protocol::Profiler::ScriptCoverage>>* out_result) override; - Response startTypeProfile() override; - Response stopTypeProfile() override; - Response takeTypeProfile( - std::unique_ptr<protocol::Array<protocol::Profiler::ScriptTypeProfile>>* - out_result) override; - void consoleProfile(const String16& title); void consoleProfileEnd(const String16& title); diff --git a/deps/v8/src/inspector/v8-runtime-agent-impl.cc b/deps/v8/src/inspector/v8-runtime-agent-impl.cc index a598af680a20d4..d695d018e1b5a1 100644 --- a/deps/v8/src/inspector/v8-runtime-agent-impl.cc +++ b/deps/v8/src/inspector/v8-runtime-agent-impl.cc @@ -70,9 +70,9 @@ namespace { template <typename ProtocolCallback> class EvaluateCallbackWrapper : public EvaluateCallback { public: - static std::unique_ptr<EvaluateCallback> wrap( + static std::shared_ptr<EvaluateCallback> wrap( std::unique_ptr<ProtocolCallback> callback) { - return std::unique_ptr<EvaluateCallback>( + return std::shared_ptr<EvaluateCallback>( new EvaluateCallbackWrapper(std::move(callback))); } void sendSuccess(std::unique_ptr<protocol::Runtime::RemoteObject> result, diff --git a/deps/v8/src/inspector/v8-webdriver-serializer.cc b/deps/v8/src/inspector/v8-webdriver-serializer.cc index f29c56f2aa5371..bf68a4cdb30b5d 100644 --- a/deps/v8/src/inspector/v8-webdriver-serializer.cc +++ b/deps/v8/src/inspector/v8-webdriver-serializer.cc @@ -56,6 +56,9 @@ String16 _descriptionForRegExpFlags(v8::Local<v8::RegExp> value) { if (flags & v8::RegExp::Flags::kMultiline) result_string_builder.append('m'); if (flags & v8::RegExp::Flags::kDotAll) result_string_builder.append('s'); if (flags & v8::RegExp::Flags::kUnicode) result_string_builder.append('u'); + if (flags & v8::RegExp::Flags::kUnicodeSets) { + result_string_builder.append('v'); + } if (flags & v8::RegExp::Flags::kSticky) result_string_builder.append('y'); return result_string_builder.toString(); } diff --git a/deps/v8/src/inspector/value-mirror.cc b/deps/v8/src/inspector/value-mirror.cc index f13463b9fb3ac8..59369cac651ba6 100644 --- a/deps/v8/src/inspector/value-mirror.cc +++ b/deps/v8/src/inspector/value-mirror.cc @@ -243,6 +243,7 @@ String16 descriptionForRegExp(v8::Isolate* isolate, if (flags & v8::RegExp::Flags::kMultiline) description.append('m'); if (flags & v8::RegExp::Flags::kDotAll) description.append('s'); if (flags & v8::RegExp::Flags::kUnicode) description.append('u'); + if (flags & v8::RegExp::Flags::kUnicodeSets) description.append('v'); if (flags & v8::RegExp::Flags::kSticky) description.append('y'); return description.toString(); } @@ -651,6 +652,18 @@ class SymbolMirror final : public ValueMirror { .build(); } + void buildEntryPreview( + v8::Local<v8::Context> context, int* nameLimit, int* indexLimit, + std::unique_ptr<ObjectPreview>* preview) const override { + *preview = + ObjectPreview::create() + .setType(RemoteObject::TypeEnum::Symbol) + .setDescription(descriptionForSymbol(context, m_symbol)) + .setOverflow(false) + .setProperties(std::make_unique<protocol::Array<PropertyPreview>>()) + .build(); + } + v8::Local<v8::Value> v8Value() const override { return m_symbol; } std::unique_ptr<protocol::Runtime::WebDriverValue> buildWebDriverValue( diff --git a/deps/v8/src/interpreter/bytecode-array-builder.cc b/deps/v8/src/interpreter/bytecode-array-builder.cc index 1548986e0ed70a..784d88a9a8f327 100644 --- a/deps/v8/src/interpreter/bytecode-array-builder.cc +++ b/deps/v8/src/interpreter/bytecode-array-builder.cc @@ -58,7 +58,7 @@ BytecodeArrayBuilder::BytecodeArrayBuilder( DCHECK_GE(parameter_count_, 0); DCHECK_GE(local_register_count_, 0); - if (FLAG_ignition_reo) { + if (v8_flags.ignition_reo) { register_optimizer_ = zone->New<BytecodeRegisterOptimizer>( zone, ®ister_allocator_, fixed_register_count(), parameter_count, zone->New<RegisterTransferWriter>(this)); @@ -138,7 +138,7 @@ BytecodeSourceInfo BytecodeArrayBuilder::CurrentSourcePosition( // throw (if expression position filtering is turned on). We only // invalidate the existing source position information if it is used. if (latest_source_info_.is_statement() || - !FLAG_ignition_filter_expression_positions || + !v8_flags.ignition_filter_expression_positions || !Bytecodes::IsWithoutExternalSideEffects(bytecode)) { source_position = latest_source_info_; latest_source_info_.set_invalid(); @@ -526,9 +526,10 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::GetSuperConstructor(Register out) { return *this; } -BytecodeArrayBuilder& BytecodeArrayBuilder::FindNonDefaultConstructor( +BytecodeArrayBuilder& +BytecodeArrayBuilder::FindNonDefaultConstructorOrConstruct( Register this_function, Register new_target, RegisterList output) { - OutputFindNonDefaultConstructor(this_function, new_target, output); + OutputFindNonDefaultConstructorOrConstruct(this_function, new_target, output); return *this; } @@ -880,11 +881,6 @@ BytecodeArrayBuilder& BytecodeArrayBuilder::DefineKeyedOwnPropertyInLiteral( return *this; } -BytecodeArrayBuilder& BytecodeArrayBuilder::CollectTypeProfile(int position) { - OutputCollectTypeProfile(position); - return *this; -} - BytecodeArrayBuilder& BytecodeArrayBuilder::SetNamedProperty( Register object, size_t name_index, int feedback_slot, LanguageMode language_mode) { diff --git a/deps/v8/src/interpreter/bytecode-array-builder.h b/deps/v8/src/interpreter/bytecode-array-builder.h index 9fb13b8cf34929..a9d21998405b39 100644 --- a/deps/v8/src/interpreter/bytecode-array-builder.h +++ b/deps/v8/src/interpreter/bytecode-array-builder.h @@ -158,10 +158,6 @@ class V8_EXPORT_PRIVATE BytecodeArrayBuilder final { Register object, Register name, DefineKeyedOwnPropertyInLiteralFlags flags, int feedback_slot); - // Collect type information for developer tools. The value for which we - // record the type is stored in the accumulator. - BytecodeArrayBuilder& CollectTypeProfile(int position); - // Set a property named by a property name, trigger the setters and // set traps if necessary. The value to be set should be in the // accumulator. @@ -386,9 +382,8 @@ class V8_EXPORT_PRIVATE BytecodeArrayBuilder final { // throws a TypeError exception. BytecodeArrayBuilder& GetSuperConstructor(Register out); - BytecodeArrayBuilder& FindNonDefaultConstructor(Register this_function, - Register new_target, - RegisterList output); + BytecodeArrayBuilder& FindNonDefaultConstructorOrConstruct( + Register this_function, Register new_target, RegisterList output); // Deletes property from an object. This expects that accumulator contains // the key to be deleted and the register contains a reference to the object. diff --git a/deps/v8/src/interpreter/bytecode-array-writer.cc b/deps/v8/src/interpreter/bytecode-array-writer.cc index 06c9e6537248ca..6962891133e35e 100644 --- a/deps/v8/src/interpreter/bytecode-array-writer.cc +++ b/deps/v8/src/interpreter/bytecode-array-writer.cc @@ -31,7 +31,8 @@ BytecodeArrayWriter::BytecodeArrayWriter( last_bytecode_(Bytecode::kIllegal), last_bytecode_offset_(0), last_bytecode_had_source_info_(false), - elide_noneffectful_bytecodes_(FLAG_ignition_elide_noneffectful_bytecodes), + elide_noneffectful_bytecodes_( + v8_flags.ignition_elide_noneffectful_bytecodes), exit_seen_in_block_(false) { bytecodes_.reserve(512); // Derived via experimentation. } diff --git a/deps/v8/src/interpreter/bytecode-generator.cc b/deps/v8/src/interpreter/bytecode-generator.cc index c027fb8b0721f4..706d897d8a760f 100644 --- a/deps/v8/src/interpreter/bytecode-generator.cc +++ b/deps/v8/src/interpreter/bytecode-generator.cc @@ -1213,7 +1213,7 @@ Handle<BytecodeArray> BytecodeGenerator::FinalizeBytecode( Handle<CoverageInfo> coverage_info = isolate->factory()->NewCoverageInfo(block_coverage_builder_->slots()); info()->set_coverage_info(coverage_info); - if (FLAG_trace_block_coverage) { + if (v8_flags.trace_block_coverage) { StdoutStream os; coverage_info->CoverageInfoPrint(os, info()->literal()->GetDebugName()); } @@ -1421,18 +1421,7 @@ void BytecodeGenerator::GenerateBytecodeBody() { } // Emit tracing call if requested to do so. - if (FLAG_trace) builder()->CallRuntime(Runtime::kTraceEnter); - - // Emit type profile call. - if (info()->flags().collect_type_profile()) { - feedback_spec()->AddTypeProfileSlot(); - int num_parameters = closure_scope()->num_parameters(); - for (int i = 0; i < num_parameters; i++) { - Register parameter(builder()->Parameter(i)); - builder()->LoadAccumulatorWithRegister(parameter).CollectTypeProfile( - closure_scope()->parameter(i)->initializer_position()); - } - } + if (v8_flags.trace) builder()->CallRuntime(Runtime::kTraceEnter); // Increment the function-scope block coverage counter. BuildIncrementBlockCoverageCounterIfEnabled(literal, SourceRangeKind::kBody); @@ -1831,7 +1820,7 @@ inline int ReduceToSmiSwitchCaseValue(Expression* expr) { // Is the range of Smi's small enough relative to number of cases? inline bool IsSpreadAcceptable(int spread, int ncases) { - return spread < FLAG_switch_table_spread_threshold * ncases; + return spread < v8_flags.switch_table_spread_threshold * ncases; } struct SwitchInfo { @@ -1894,7 +1883,7 @@ bool IsSwitchOptimizable(SwitchStatement* stmt, SwitchInfo* info) { // GCC also jump-table optimizes switch statements with 6 cases or more. if (static_cast<int>(info->covered_cases.size()) >= - FLAG_switch_table_min_cases) { + v8_flags.switch_table_min_cases) { // Due to case spread will be used as the size of jump-table, // we need to check if it doesn't overflow by casting its // min and max bounds to int64_t, and calculate if the difference is less @@ -3484,8 +3473,7 @@ void BytecodeGenerator::BuildCreateArrayLiteral( Expression* subexpr = *current; if (subexpr->IsSpread()) { RegisterAllocationScope scope(this); - builder()->SetExpressionAsStatementPosition( - subexpr->AsSpread()->expression()); + builder()->SetExpressionPosition(subexpr->AsSpread()->expression()); VisitForAccumulatorValue(subexpr->AsSpread()->expression()); builder()->SetExpressionPosition(subexpr->AsSpread()->expression()); IteratorRecord iterator = BuildGetIteratorRecord(IteratorType::kNormal); @@ -3680,16 +3668,13 @@ void BytecodeGenerator::BuildVariableLoadForAccumulatorValue( } void BytecodeGenerator::BuildReturn(int source_position) { - if (FLAG_trace) { + if (v8_flags.trace) { RegisterAllocationScope register_scope(this); Register result = register_allocator()->NewRegister(); // Runtime returns {result} value, preserving accumulator. builder()->StoreAccumulatorInRegister(result).CallRuntime( Runtime::kTraceExit, result); } - if (info()->flags().collect_type_profile()) { - builder()->CollectTypeProfile(info()->literal()->return_position()); - } builder()->SetStatementPosition(source_position); builder()->Return(); } @@ -4216,9 +4201,7 @@ void BytecodeGenerator::BuildDestructuringArrayAssignment( } Expression* default_value = GetDestructuringDefaultValue(&target); - if (!target->IsPattern()) { - builder()->SetExpressionAsStatementPosition(target); - } + builder()->SetExpressionPosition(target); AssignmentLhsData lhs_data = PrepareAssignmentLhs(target); @@ -4292,10 +4275,7 @@ void BytecodeGenerator::BuildDestructuringArrayAssignment( // A spread is turned into a loop over the remainer of the iterator. Expression* target = spread->expression(); - - if (!target->IsPattern()) { - builder()->SetExpressionAsStatementPosition(spread); - } + builder()->SetExpressionPosition(spread); AssignmentLhsData lhs_data = PrepareAssignmentLhs(target); @@ -4418,10 +4398,7 @@ void BytecodeGenerator::BuildDestructuringObjectAssignment( Expression* pattern_key = pattern_property->key(); Expression* target = pattern_property->value(); Expression* default_value = GetDestructuringDefaultValue(&target); - - if (!target->IsPattern()) { - builder()->SetExpressionAsStatementPosition(target); - } + builder()->SetExpressionPosition(target); // Calculate this property's key into the assignment RHS value, additionally // storing the key for rest_runtime_callargs if needed. @@ -4723,8 +4700,11 @@ void BytecodeGenerator::VisitYield(Yield* expr) { if (suspend_count_ > 0) { if (IsAsyncGeneratorFunction(function_kind())) { // AsyncGenerator yields (with the exception of the initial yield) - // delegate work to the AsyncGeneratorYield stub, which Awaits the operand - // and on success, wraps the value in an IteratorResult. + // delegate work to the AsyncGeneratorYieldWithAwait stub, which Awaits + // the operand and on success, wraps the value in an IteratorResult. + // + // In the spec the Await is a separate operation, but they are combined + // here to reduce bytecode size. RegisterAllocationScope register_scope(this); RegisterList args = register_allocator()->NewRegisterList(3); builder() @@ -4732,7 +4712,7 @@ void BytecodeGenerator::VisitYield(Yield* expr) { .StoreAccumulatorInRegister(args[1]) // value .LoadBoolean(catch_prediction() != HandlerTable::ASYNC_AWAIT) .StoreAccumulatorInRegister(args[2]) // is_caught - .CallRuntime(Runtime::kInlineAsyncGeneratorYield, args); + .CallRuntime(Runtime::kInlineAsyncGeneratorYieldWithAwait, args); } else { // Generator yields (with the exception of the initial yield) wrap the // value into IteratorResult. @@ -4849,9 +4829,8 @@ void BytecodeGenerator::VisitYield(Yield* expr) { // // From the generator to its user: // // Forward output, receive new input, and determine resume mode. // if (IS_ASYNC_GENERATOR) { -// // AsyncGeneratorYield abstract operation awaits the operand before -// // resolving the promise for the current AsyncGeneratorRequest. -// %_AsyncGeneratorYield(output.value) +// // Resolve the promise for the current AsyncGeneratorRequest. +// %_AsyncGeneratorResolve(output.value, /* done = */ false) // } // input = Suspend(output); // resumeMode = %GeneratorGetResumeMode(); @@ -4986,9 +4965,10 @@ void BytecodeGenerator::VisitYieldStar(YieldStar* expr) { } else { RegisterAllocationScope inner_register_scope(this); DCHECK_EQ(iterator_type, IteratorType::kAsync); - // If generatorKind is async, perform AsyncGeneratorYield(output.value), - // which will await `output.value` before resolving the current - // AsyncGeneratorRequest's promise. + // If generatorKind is async, perform + // AsyncGeneratorResolve(output.value, /* done = */ false), which will + // resolve the current AsyncGeneratorRequest's promise with + // output.value. builder()->LoadNamedProperty( output, ast_string_constants()->value_string(), feedback_index(feedback_spec()->AddLoadICSlot())); @@ -4997,9 +4977,9 @@ void BytecodeGenerator::VisitYieldStar(YieldStar* expr) { builder() ->MoveRegister(generator_object(), args[0]) // generator .StoreAccumulatorInRegister(args[1]) // value - .LoadBoolean(catch_prediction() != HandlerTable::ASYNC_AWAIT) - .StoreAccumulatorInRegister(args[2]) // is_caught - .CallRuntime(Runtime::kInlineAsyncGeneratorYield, args); + .LoadFalse() + .StoreAccumulatorInRegister(args[2]) // done + .CallRuntime(Runtime::kInlineAsyncGeneratorResolve, args); } BuildSuspendPoint(expr->position()); @@ -5322,7 +5302,7 @@ void BytecodeGenerator::VisitPropertyLoadForRegister(Register obj, void BytecodeGenerator::VisitNamedSuperPropertyLoad(Property* property, Register opt_receiver_out) { RegisterAllocationScope register_scope(this); - if (FLAG_super_ic) { + if (v8_flags.super_ic) { Register receiver = register_allocator()->NewRegister(); BuildThisVariableLoad(); builder()->StoreAccumulatorInRegister(receiver); @@ -5650,12 +5630,12 @@ void BytecodeGenerator::VisitCallSuper(Call* expr) { Register this_function = VisitForRegisterValue(super->this_function_var()); // This register will initially hold the constructor, then afterward it will // hold the instance -- the lifetimes of the two don't need to overlap, and - // this way FindNonDefaultConstructor can choose to write either the instance - // or the constructor into the same register. + // this way FindNonDefaultConstructorOrConstruct can choose to write either + // the instance or the constructor into the same register. Register constructor_then_instance = register_allocator()->NewRegister(); BytecodeLabel super_ctor_call_done; - bool omit_super_ctor = FLAG_omit_default_ctors && + bool omit_super_ctor = v8_flags.omit_default_ctors && IsDerivedConstructor(info()->literal()->kind()); if (spread_position == Call::kHasNonFinalSpread) { @@ -5790,9 +5770,10 @@ void BytecodeGenerator::VisitCallSuper(Call* expr) { void BytecodeGenerator::BuildSuperCallOptimization( Register this_function, Register new_target, Register constructor_then_instance, BytecodeLabel* super_ctor_call_done) { - DCHECK(FLAG_omit_default_ctors); + DCHECK(v8_flags.omit_default_ctors); RegisterList output = register_allocator()->NewRegisterList(2); - builder()->FindNonDefaultConstructor(this_function, new_target, output); + builder()->FindNonDefaultConstructorOrConstruct(this_function, new_target, + output); builder()->MoveRegister(output[1], constructor_then_instance); builder()->LoadAccumulatorWithRegister(output[0]).JumpIfTrue( ToBooleanMode::kAlreadyBoolean, super_ctor_call_done); @@ -6618,6 +6599,7 @@ void BytecodeGenerator::VisitSuperPropertyReference( void BytecodeGenerator::VisitCommaExpression(BinaryOperation* binop) { VisitForEffect(binop->left()); + builder()->SetExpressionAsStatementPosition(binop->right()); Visit(binop->right()); } @@ -6626,8 +6608,11 @@ void BytecodeGenerator::VisitNaryCommaExpression(NaryOperation* expr) { VisitForEffect(expr->first()); for (size_t i = 0; i < expr->subsequent_length() - 1; ++i) { + builder()->SetExpressionAsStatementPosition(expr->subsequent(i)); VisitForEffect(expr->subsequent(i)); } + builder()->SetExpressionAsStatementPosition( + expr->subsequent(expr->subsequent_length() - 1)); Visit(expr->subsequent(expr->subsequent_length() - 1)); } @@ -7347,7 +7332,7 @@ FeedbackSlot BytecodeGenerator::GetCachedStoreGlobalICSlot( FeedbackSlot BytecodeGenerator::GetCachedLoadICSlot(const Expression* expr, const AstRawString* name) { DCHECK(!expr->IsSuperPropertyReference()); - if (!FLAG_ignition_share_named_property_feedback) { + if (!v8_flags.ignition_share_named_property_feedback) { return feedback_spec()->AddLoadICSlot(); } FeedbackSlotCache::SlotKind slot_kind = @@ -7369,7 +7354,7 @@ FeedbackSlot BytecodeGenerator::GetCachedLoadICSlot(const Expression* expr, FeedbackSlot BytecodeGenerator::GetCachedLoadSuperICSlot( const AstRawString* name) { - if (!FLAG_ignition_share_named_property_feedback) { + if (!v8_flags.ignition_share_named_property_feedback) { return feedback_spec()->AddLoadICSlot(); } FeedbackSlotCache::SlotKind slot_kind = @@ -7386,7 +7371,7 @@ FeedbackSlot BytecodeGenerator::GetCachedLoadSuperICSlot( FeedbackSlot BytecodeGenerator::GetCachedStoreICSlot(const Expression* expr, const AstRawString* name) { - if (!FLAG_ignition_share_named_property_feedback) { + if (!v8_flags.ignition_share_named_property_feedback) { return feedback_spec()->AddStoreICSlot(language_mode()); } FeedbackSlotCache::SlotKind slot_kind = diff --git a/deps/v8/src/interpreter/bytecodes.h b/deps/v8/src/interpreter/bytecodes.h index bb0dafe16d9d9a..3919daeb20312e 100644 --- a/deps/v8/src/interpreter/bytecodes.h +++ b/deps/v8/src/interpreter/bytecodes.h @@ -157,8 +157,6 @@ namespace interpreter { V(DefineKeyedOwnPropertyInLiteral, ImplicitRegisterUse::kReadAccumulator, \ OperandType::kReg, OperandType::kReg, OperandType::kFlag8, \ OperandType::kIdx) \ - V(CollectTypeProfile, ImplicitRegisterUse::kReadAccumulator, \ - OperandType::kImm) \ \ /* Binary Operators */ \ V(Add, ImplicitRegisterUse::kReadWriteAccumulator, OperandType::kReg, \ @@ -228,8 +226,8 @@ namespace interpreter { /* GetSuperConstructor operator */ \ V(GetSuperConstructor, ImplicitRegisterUse::kReadAccumulator, \ OperandType::kRegOut) \ - V(FindNonDefaultConstructor, ImplicitRegisterUse::kNone, OperandType::kReg, \ - OperandType::kReg, OperandType::kRegOutPair) \ + V(FindNonDefaultConstructorOrConstruct, ImplicitRegisterUse::kNone, \ + OperandType::kReg, OperandType::kReg, OperandType::kRegOutPair) \ \ /* Call operations */ \ V(CallAnyReceiver, ImplicitRegisterUse::kWriteAccumulator, \ @@ -611,6 +609,22 @@ class V8_EXPORT_PRIVATE Bytecodes final : public AllStatic { // Returns the scaling applied to scalable operands if bytecode is // is a scaling prefix. static OperandScale PrefixBytecodeToOperandScale(Bytecode bytecode) { +#ifdef V8_TARGET_OS_ANDROID + // The compiler is very smart, turning the switch into branchless code. + // However this triggers a CPU bug on some android devices (see + // crbug.com/1379788). We therefore intentionally use code the compiler has + // a harder time optimizing on Android. At least until clang 15.0 the + // current workaround prevents hitting the CPU bug. + // TODO(chromium:1379788): Remove this hack if we get an external fix. + if (bytecode == Bytecode::kWide || bytecode == Bytecode::kDebugBreakWide) { + return OperandScale::kDouble; + } else if (bytecode == Bytecode::kExtraWide || + bytecode == Bytecode::kDebugBreakExtraWide) { + return OperandScale::kQuadruple; + } else { + UNREACHABLE(); + } +#else switch (bytecode) { case Bytecode::kExtraWide: case Bytecode::kDebugBreakExtraWide: @@ -621,6 +635,7 @@ class V8_EXPORT_PRIVATE Bytecodes final : public AllStatic { default: UNREACHABLE(); } +#endif } // Returns how accumulator is used by |bytecode|. diff --git a/deps/v8/src/interpreter/interpreter-assembler.cc b/deps/v8/src/interpreter/interpreter-assembler.cc index 14e06e2d27d2a1..deca631416832f 100644 --- a/deps/v8/src/interpreter/interpreter-assembler.cc +++ b/deps/v8/src/interpreter/interpreter-assembler.cc @@ -1494,7 +1494,7 @@ TNode<FixedArray> InterpreterAssembler::ExportParametersAndRegisterFile( TNode<IntPtrT> formal_parameter_count_intptr = Signed(ChangeUint32ToWord(formal_parameter_count)); TNode<UintPtrT> register_count = ChangeUint32ToWord(registers.reg_count()); - if (FLAG_debug_code) { + if (v8_flags.debug_code) { CSA_DCHECK(this, IntPtrEqual(registers.base_reg_location(), RegisterLocation(Register(0)))); AbortIfRegisterCountInvalid(array, formal_parameter_count_intptr, @@ -1566,7 +1566,7 @@ TNode<FixedArray> InterpreterAssembler::ImportRegisterFile( TNode<IntPtrT> formal_parameter_count_intptr = Signed(ChangeUint32ToWord(formal_parameter_count)); TNode<UintPtrT> register_count = ChangeUint32ToWord(registers.reg_count()); - if (FLAG_debug_code) { + if (v8_flags.debug_code) { CSA_DCHECK(this, IntPtrEqual(registers.base_reg_location(), RegisterLocation(Register(0)))); AbortIfRegisterCountInvalid(array, formal_parameter_count_intptr, diff --git a/deps/v8/src/interpreter/interpreter-generator.cc b/deps/v8/src/interpreter/interpreter-generator.cc index 57f2eeae3ccf8e..82f053300c3af4 100644 --- a/deps/v8/src/interpreter/interpreter-generator.cc +++ b/deps/v8/src/interpreter/interpreter-generator.cc @@ -743,18 +743,6 @@ IGNITION_HANDLER(DefineKeyedOwnPropertyInLiteral, InterpreterAssembler) { Dispatch(); } -IGNITION_HANDLER(CollectTypeProfile, InterpreterAssembler) { - TNode<Smi> position = BytecodeOperandImmSmi(0); - TNode<Object> value = GetAccumulator(); - - TNode<HeapObject> feedback_vector = LoadFeedbackVector(); - TNode<Context> context = GetContext(); - - CallRuntime(Runtime::kCollectTypeProfile, context, position, value, - feedback_vector); - Dispatch(); -} - // LdaModuleVariable <cell_index> <depth> // // Load the contents of a module variable into the accumulator. The variable is @@ -2781,14 +2769,14 @@ IGNITION_HANDLER(ThrowIfNotSuperConstructor, InterpreterAssembler) { } } -// FinNonDefaultConstructor <this_function> <new_target> <output> +// FindNonDefaultConstructorOrConstruct <this_function> <new_target> <output> // // Walks the prototype chain from <this_function>'s super ctor until we see a // non-default ctor. If the walk ends at a default base ctor, creates an // instance and stores it in <output[1]> and stores true into output[0]. // Otherwise, stores the first non-default ctor into <output[1]> and false into // <output[0]>. -IGNITION_HANDLER(FindNonDefaultConstructor, InterpreterAssembler) { +IGNITION_HANDLER(FindNonDefaultConstructorOrConstruct, InterpreterAssembler) { TNode<Context> context = GetContext(); TVARIABLE(Object, constructor); Label found_default_base_ctor(this, &constructor), @@ -2796,8 +2784,9 @@ IGNITION_HANDLER(FindNonDefaultConstructor, InterpreterAssembler) { TNode<JSFunction> this_function = CAST(LoadRegisterAtOperandIndex(0)); - FindNonDefaultConstructor(context, this_function, constructor, - &found_default_base_ctor, &found_something_else); + FindNonDefaultConstructorOrConstruct(context, this_function, constructor, + &found_default_base_ctor, + &found_something_else); BIND(&found_default_base_ctor); { @@ -3176,7 +3165,7 @@ Handle<Code> GenerateBytecodeHandler(Isolate* isolate, const char* debug_name, &state, options, ProfileDataFromFile::TryRead(debug_name)); #ifdef ENABLE_DISASSEMBLER - if (FLAG_trace_ignition_codegen) { + if (v8_flags.trace_ignition_codegen) { StdoutStream os; code->Disassemble(Bytecodes::ToString(bytecode), os, isolate); os << std::flush; diff --git a/deps/v8/src/interpreter/interpreter-intrinsics-generator.cc b/deps/v8/src/interpreter/interpreter-intrinsics-generator.cc index c211631e1805f1..68b6bdcb850038 100644 --- a/deps/v8/src/interpreter/interpreter-intrinsics-generator.cc +++ b/deps/v8/src/interpreter/interpreter-intrinsics-generator.cc @@ -82,7 +82,7 @@ TNode<Object> IntrinsicsGenerator::InvokeIntrinsic( #define HANDLE_CASE(name, lower_case, expected_arg_count) \ __ BIND(&lower_case); \ { \ - if (FLAG_debug_code && expected_arg_count >= 0) { \ + if (v8_flags.debug_code && expected_arg_count >= 0) { \ AbortIfArgCountMismatch(expected_arg_count, args.reg_count()); \ } \ TNode<Object> value = name(args, context, expected_arg_count); \ @@ -261,11 +261,11 @@ TNode<Object> IntrinsicsGenerator::AsyncGeneratorResolve( arg_count); } -TNode<Object> IntrinsicsGenerator::AsyncGeneratorYield( +TNode<Object> IntrinsicsGenerator::AsyncGeneratorYieldWithAwait( const InterpreterAssembler::RegListNodePair& args, TNode<Context> context, int arg_count) { - return IntrinsicAsBuiltinCall(args, context, Builtin::kAsyncGeneratorYield, - arg_count); + return IntrinsicAsBuiltinCall( + args, context, Builtin::kAsyncGeneratorYieldWithAwait, arg_count); } void IntrinsicsGenerator::AbortIfArgCountMismatch(int expected, diff --git a/deps/v8/src/interpreter/interpreter-intrinsics.h b/deps/v8/src/interpreter/interpreter-intrinsics.h index 94ff5e88bbbfb5..028e1f40b7c540 100644 --- a/deps/v8/src/interpreter/interpreter-intrinsics.h +++ b/deps/v8/src/interpreter/interpreter-intrinsics.h @@ -13,25 +13,25 @@ namespace interpreter { // List of supported intrisics, with upper case name, lower case name and // expected number of arguments (-1 denoting argument count is variable). -#define INTRINSICS_LIST(V) \ - V(AsyncFunctionAwaitCaught, async_function_await_caught, 2) \ - V(AsyncFunctionAwaitUncaught, async_function_await_uncaught, 2) \ - V(AsyncFunctionEnter, async_function_enter, 2) \ - V(AsyncFunctionReject, async_function_reject, 2) \ - V(AsyncFunctionResolve, async_function_resolve, 2) \ - V(AsyncGeneratorAwaitCaught, async_generator_await_caught, 2) \ - V(AsyncGeneratorAwaitUncaught, async_generator_await_uncaught, 2) \ - V(AsyncGeneratorReject, async_generator_reject, 2) \ - V(AsyncGeneratorResolve, async_generator_resolve, 3) \ - V(AsyncGeneratorYield, async_generator_yield, 3) \ - V(CreateJSGeneratorObject, create_js_generator_object, 2) \ - V(GeneratorGetResumeMode, generator_get_resume_mode, 1) \ - V(GeneratorClose, generator_close, 1) \ - V(GetImportMetaObject, get_import_meta_object, 0) \ - V(CopyDataProperties, copy_data_properties, 2) \ - V(CopyDataPropertiesWithExcludedPropertiesOnStack, \ - copy_data_properties_with_excluded_properties_on_stack, -1) \ - V(CreateIterResultObject, create_iter_result_object, 2) \ +#define INTRINSICS_LIST(V) \ + V(AsyncFunctionAwaitCaught, async_function_await_caught, 2) \ + V(AsyncFunctionAwaitUncaught, async_function_await_uncaught, 2) \ + V(AsyncFunctionEnter, async_function_enter, 2) \ + V(AsyncFunctionReject, async_function_reject, 2) \ + V(AsyncFunctionResolve, async_function_resolve, 2) \ + V(AsyncGeneratorAwaitCaught, async_generator_await_caught, 2) \ + V(AsyncGeneratorAwaitUncaught, async_generator_await_uncaught, 2) \ + V(AsyncGeneratorReject, async_generator_reject, 2) \ + V(AsyncGeneratorResolve, async_generator_resolve, 3) \ + V(AsyncGeneratorYieldWithAwait, async_generator_yield_with_await, 3) \ + V(CreateJSGeneratorObject, create_js_generator_object, 2) \ + V(GeneratorGetResumeMode, generator_get_resume_mode, 1) \ + V(GeneratorClose, generator_close, 1) \ + V(GetImportMetaObject, get_import_meta_object, 0) \ + V(CopyDataProperties, copy_data_properties, 2) \ + V(CopyDataPropertiesWithExcludedPropertiesOnStack, \ + copy_data_properties_with_excluded_properties_on_stack, -1) \ + V(CreateIterResultObject, create_iter_result_object, 2) \ V(CreateAsyncFromSyncIterator, create_async_from_sync_iterator, 1) class IntrinsicsHelper { diff --git a/deps/v8/src/interpreter/interpreter.cc b/deps/v8/src/interpreter/interpreter.cc index 6d474ba89dbe80..11c83fa9b58c85 100644 --- a/deps/v8/src/interpreter/interpreter.cc +++ b/deps/v8/src/interpreter/interpreter.cc @@ -139,7 +139,7 @@ namespace { void MaybePrintAst(ParseInfo* parse_info, UnoptimizedCompilationInfo* compilation_info) { - if (!FLAG_print_ast) return; + if (!v8_flags.print_ast) return; StdoutStream os; std::unique_ptr<char[]> name = compilation_info->literal()->GetDebugName(); @@ -153,15 +153,15 @@ void MaybePrintAst(ParseInfo* parse_info, } bool ShouldPrintBytecode(Handle<SharedFunctionInfo> shared) { - if (!FLAG_print_bytecode) return false; + if (!v8_flags.print_bytecode) return false; // Checks whether function passed the filter. if (shared->is_toplevel()) { base::Vector<const char> filter = - base::CStrVector(FLAG_print_bytecode_filter); + base::CStrVector(v8_flags.print_bytecode_filter); return (filter.length() == 0) || (filter.length() == 1 && filter[0] == '*'); } else { - return shared->PassesFilter(FLAG_print_bytecode_filter); + return shared->PassesFilter(v8_flags.print_bytecode_filter); } } diff --git a/deps/v8/src/json/json-parser.cc b/deps/v8/src/json/json-parser.cc index c124eedb475996..8d5f563fdf94cf 100644 --- a/deps/v8/src/json/json-parser.cc +++ b/deps/v8/src/json/json-parser.cc @@ -122,20 +122,34 @@ static const constexpr uint8_t character_json_scan_flags[256] = { } // namespace MaybeHandle<Object> JsonParseInternalizer::Internalize(Isolate* isolate, - Handle<Object> object, - Handle<Object> reviver) { + Handle<Object> result, + Handle<Object> reviver, + Handle<String> source) { DCHECK(reviver->IsCallable()); - JsonParseInternalizer internalizer(isolate, - Handle<JSReceiver>::cast(reviver)); + JsonParseInternalizer internalizer(isolate, Handle<JSReceiver>::cast(reviver), + source); Handle<JSObject> holder = isolate->factory()->NewJSObject(isolate->object_function()); Handle<String> name = isolate->factory()->empty_string(); - JSObject::AddProperty(isolate, holder, name, object, NONE); - return internalizer.InternalizeJsonProperty(holder, name); + if (v8_flags.harmony_json_parse_with_source) { + DCHECK(result->IsFixedArray()); + Handle<FixedArray> array = Handle<FixedArray>::cast(result); + DCHECK_EQ(2, array->length()); + Handle<Object> object(array->get(0), isolate); + Handle<Object> val_node(array->get(1), isolate); + JSObject::AddProperty(isolate, holder, name, object, NONE); + return internalizer.InternalizeJsonProperty(holder, name, val_node); + } else { + JSObject::AddProperty(isolate, holder, name, result, NONE); + return internalizer.InternalizeJsonProperty(holder, name, Handle<Object>()); + } } +// TODO(v8:12955): Fix the parse node assert bug. See +// https://github.com/tc39/proposal-json-parse-with-source/issues/35. MaybeHandle<Object> JsonParseInternalizer::InternalizeJsonProperty( - Handle<JSReceiver> holder, Handle<String> name) { + Handle<JSReceiver> holder, Handle<String> name, Handle<Object> val_node) { + DCHECK(reviver_->IsCallable()); HandleScope outer_scope(isolate_); Handle<Object> value; ASSIGN_RETURN_ON_EXCEPTION( @@ -151,11 +165,31 @@ MaybeHandle<Object> JsonParseInternalizer::InternalizeJsonProperty( isolate_, length_object, Object::GetLengthFromArrayLike(isolate_, object), Object); double length = length_object->Number(); - for (double i = 0; i < length; i++) { - HandleScope inner_scope(isolate_); - Handle<Object> index = isolate_->factory()->NewNumber(i); - Handle<String> index_name = isolate_->factory()->NumberToString(index); - if (!RecurseAndApply(object, index_name)) return MaybeHandle<Object>(); + + if (v8_flags.harmony_json_parse_with_source) { + DCHECK(val_node->IsFixedArray()); + Handle<FixedArray> val_nodes = Handle<FixedArray>::cast(val_node); + for (double i = 0; i < length; i++) { + HandleScope inner_scope(isolate_); + Handle<Object> index = isolate_->factory()->NewNumber(i); + Handle<String> index_name = + isolate_->factory()->NumberToString(index); + if (!RecurseAndApply(object, index_name, + handle(val_nodes->get(i), isolate_))) { + return MaybeHandle<Object>(); + } + } + } else { + DCHECK(val_node.is_null()); + for (double i = 0; i < length; i++) { + HandleScope inner_scope(isolate_); + Handle<Object> index = isolate_->factory()->NewNumber(i); + Handle<String> index_name = + isolate_->factory()->NumberToString(index); + if (!RecurseAndApply(object, index_name, Handle<Object>())) { + return MaybeHandle<Object>(); + } + } } } else { Handle<FixedArray> contents; @@ -165,28 +199,64 @@ MaybeHandle<Object> JsonParseInternalizer::InternalizeJsonProperty( ENUMERABLE_STRINGS, GetKeysConversion::kConvertToString), Object); - for (int i = 0; i < contents->length(); i++) { - HandleScope inner_scope(isolate_); - Handle<String> key_name(String::cast(contents->get(i)), isolate_); - if (!RecurseAndApply(object, key_name)) return MaybeHandle<Object>(); + if (v8_flags.harmony_json_parse_with_source) { + DCHECK(val_node->IsObjectHashTable()); + Handle<ObjectHashTable> val_nodes = + Handle<ObjectHashTable>::cast(val_node); + for (int i = 0; i < contents->length(); i++) { + HandleScope inner_scope(isolate_); + Handle<String> key_name(String::cast(contents->get(i)), isolate_); + Handle<Object> node = handle(val_nodes->Lookup(key_name), isolate_); + DCHECK(!node->IsTheHole()); + if (!RecurseAndApply(object, key_name, node)) { + return MaybeHandle<Object>(); + } + } + } else { + DCHECK(val_node.is_null()); + for (int i = 0; i < contents->length(); i++) { + HandleScope inner_scope(isolate_); + Handle<String> key_name(String::cast(contents->get(i)), isolate_); + if (!RecurseAndApply(object, key_name, Handle<Object>())) { + return MaybeHandle<Object>(); + } + } } } } - Handle<Object> argv[] = {name, value}; Handle<Object> result; - ASSIGN_RETURN_ON_EXCEPTION( - isolate_, result, Execution::Call(isolate_, reviver_, holder, 2, argv), - Object); + if (v8_flags.harmony_json_parse_with_source) { + DCHECK(!val_node.is_null()); + Handle<JSObject> context = + isolate_->factory()->NewJSObject(isolate_->object_function()); + if (val_node->IsString()) { + JSReceiver::CreateDataProperty(isolate_, context, + isolate_->factory()->source_string(), + val_node, Just(kThrowOnError)) + .Check(); + } + Handle<Object> argv[] = {name, value, context}; + ASSIGN_RETURN_ON_EXCEPTION( + isolate_, result, Execution::Call(isolate_, reviver_, holder, 3, argv), + Object); + } else { + DCHECK(val_node.is_null()); + Handle<Object> argv[] = {name, value}; + ASSIGN_RETURN_ON_EXCEPTION( + isolate_, result, Execution::Call(isolate_, reviver_, holder, 2, argv), + Object); + } return outer_scope.CloseAndEscape(result); } bool JsonParseInternalizer::RecurseAndApply(Handle<JSReceiver> holder, - Handle<String> name) { + Handle<String> name, + Handle<Object> val_node) { STACK_CHECK(isolate_, false); - + DCHECK(reviver_->IsCallable()); Handle<Object> result; ASSIGN_RETURN_ON_EXCEPTION_VALUE( - isolate_, result, InternalizeJsonProperty(holder, name), false); + isolate_, result, InternalizeJsonProperty(holder, name, val_node), false); Maybe<bool> change_result = Nothing<bool>(); if (result->IsUndefined(isolate_)) { change_result = JSReceiver::DeletePropertyOrElement(holder, name, @@ -403,12 +473,28 @@ JsonParser<Char>::~JsonParser() { } template <typename Char> -MaybeHandle<Object> JsonParser<Char>::ParseJson() { - MaybeHandle<Object> result = ParseJsonValue(); - if (!Check(JsonToken::EOS)) +MaybeHandle<Object> JsonParser<Char>::ParseJson(Handle<Object> reviver) { + Handle<Object> result; + // Only record the val node when reviver is callable. + bool reviver_is_callable = reviver->IsCallable(); + bool should_track_json_source = + v8_flags.harmony_json_parse_with_source && reviver_is_callable; + if (should_track_json_source) { + ASSIGN_RETURN_ON_EXCEPTION(isolate(), result, ParseJsonValue<true>(reviver), + Object); + } else { + ASSIGN_RETURN_ON_EXCEPTION(isolate(), result, + ParseJsonValue<false>(reviver), Object); + } + + if (!Check(JsonToken::EOS)) { ReportUnexpectedToken( peek(), MessageTemplate::kJsonParseUnexpectedNonWhiteSpaceCharacter); - if (isolate_->has_pending_exception()) return MaybeHandle<Object>(); + return MaybeHandle<Object>(); + } + if (isolate_->has_pending_exception()) { + return MaybeHandle<Object>(); + } return result; } @@ -662,7 +748,7 @@ Handle<Object> JsonParser<Char>::BuildJsonObject( : reinterpret_cast<Address>( mutable_double_buffer->GetDataStartAddress()); Address filler_address = mutable_double_address; - if (kTaggedSize != kDoubleSize) { + if (!V8_COMPRESS_POINTERS_8GB_BOOL && kTaggedSize != kDoubleSize) { if (IsAligned(mutable_double_address, kDoubleAlignment)) { mutable_double_address += kTaggedSize; } else { @@ -681,7 +767,7 @@ Handle<Object> JsonParser<Char>::BuildJsonObject( if (details.representation().IsDouble()) { if (value.IsSmi()) { - if (kTaggedSize != kDoubleSize) { + if (!V8_COMPRESS_POINTERS_8GB_BOOL && kTaggedSize != kDoubleSize) { // Write alignment filler. HeapObject filler = HeapObject::FromAddress(filler_address); filler.set_map_after_allocation( @@ -698,7 +784,8 @@ Handle<Object> JsonParser<Char>::BuildJsonObject( hn.set_map_after_allocation(*factory()->heap_number_map()); HeapNumber::cast(hn).set_value_as_bits(bits, kRelaxedStore); value = hn; - mutable_double_address += kMutableDoubleSize; + mutable_double_address += + ALIGN_TO_ALLOCATION_ALIGNMENT(kMutableDoubleSize); } else { DCHECK(value.IsHeapNumber()); HeapObject::cast(value).set_map(*factory()->heap_number_map(), @@ -712,7 +799,7 @@ Handle<Object> JsonParser<Char>::BuildJsonObject( #ifdef DEBUG Address end = reinterpret_cast<Address>(mutable_double_buffer->GetDataEndAddress()); - if (kTaggedSize != kDoubleSize) { + if (!V8_COMPRESS_POINTERS_8GB_BOOL && kTaggedSize != kDoubleSize) { DCHECK_EQ(std::min(filler_address, mutable_double_address), end); DCHECK_GE(filler_address, end); DCHECK_GE(mutable_double_address, end); @@ -724,7 +811,8 @@ Handle<Object> JsonParser<Char>::BuildJsonObject( // must ensure that the sweeper is not running or has already swept the // object's page. Otherwise the GC can add the contents of // mutable_double_buffer to the free list. - isolate()->heap()->EnsureSweepingCompleted(*mutable_double_buffer); + isolate()->heap()->EnsureSweepingCompletedForObject( + *mutable_double_buffer); mutable_double_buffer->set_length(0); } } @@ -787,9 +875,56 @@ Handle<Object> JsonParser<Char>::BuildJsonArray( return array; } +// Parse rawJSON value. +template <typename Char> +bool JsonParser<Char>::ParseRawJson() { + if (end_ == cursor_) { + isolate_->Throw(*isolate_->factory()->NewSyntaxError( + MessageTemplate::kInvalidRawJsonValue)); + return false; + } + next_ = V8_LIKELY(*cursor_ <= unibrow::Latin1::kMaxChar) + ? one_char_json_tokens[*cursor_] + : JsonToken::ILLEGAL; + switch (peek()) { + case JsonToken::STRING: + Consume(JsonToken::STRING); + ScanJsonString(false); + break; + + case JsonToken::NUMBER: + ParseJsonNumber(); + break; + + case JsonToken::TRUE_LITERAL: + ScanLiteral("true"); + break; + + case JsonToken::FALSE_LITERAL: + ScanLiteral("false"); + break; + + case JsonToken::NULL_LITERAL: + ScanLiteral("null"); + break; + + default: + ReportUnexpectedCharacter(CurrentCharacter()); + return false; + } + if (isolate_->has_pending_exception()) return false; + if (cursor_ != end_) { + isolate_->Throw(*isolate_->factory()->NewSyntaxError( + MessageTemplate::kInvalidRawJsonValue)); + return false; + } + return true; +} + // Parse any JSON value. template <typename Char> -MaybeHandle<Object> JsonParser<Char>::ParseJsonValue() { +template <bool should_track_json_source> +MaybeHandle<Object> JsonParser<Char>::ParseJsonValue(Handle<Object> reviver) { std::vector<JsonContinuation> cont_stack; SmallVector<JsonProperty> property_stack; SmallVector<Handle<Object>> element_stack; @@ -799,6 +934,34 @@ MaybeHandle<Object> JsonParser<Char>::ParseJsonValue() { JsonContinuation cont(isolate_, JsonContinuation::kReturn, 0); Handle<Object> value; + + // We use val_node to record current json value's parse node. For primitive + // values, the val_node is the source string of the json value. For JSObject + // values, the val_node is an ObjectHashTable in which the key is the property + // name and the value is the property value's parse node. For JSArray values, + // the val_node is a FixedArray containing the parse nodes of the elements. + // And for JSObject values, The order in which properties are defined may be + // different from the order in which properties are enumerated when calling + // InternalizeJSONProperty for the JSObject value. E.g., the json source + // string is '{"a": 1, "1": 2}', and the properties enumerate order is ["1", + // "a"]. Moreover, properties may be defined repeatedly in the json string. + // E.g., the json string is '{"a": 1, "a": 1}', and the properties enumerate + // order is ["a"]. So we cannot use the FixedArray to record the properties's + // parse node by the order in which properties are defined and we use a + // ObjectHashTable here to record the property name and the property's parse + // node. We then look up the property's parse node by the property name when + // calling InternalizeJSONProperty. + Handle<Object> val_node; + // Record the start position and end position for the primitive values. + int start_position; + int end_position; + + // element_val_node_stack is used to track all the elements's parse node. And + // we use this to construct the JSArray's parse node. + SmallVector<Handle<Object>> element_val_node_stack; + // property_val_node_stack is used to track all the property value's parse + // node. And we use this to construct the JSObject's parse node. + SmallVector<Handle<Object>> property_val_node_stack; while (true) { // Produce a json value. // @@ -809,14 +972,28 @@ MaybeHandle<Object> JsonParser<Char>::ParseJsonValue() { SkipWhitespace(); // The switch is immediately followed by 'break' so we can use 'break' to // break out of the loop, and 'continue' to continue the loop. + + if (should_track_json_source) { + start_position = position(); + } switch (peek()) { case JsonToken::STRING: Consume(JsonToken::STRING); value = MakeString(ScanJsonString(false)); + if (should_track_json_source) { + end_position = position(); + val_node = isolate_->factory()->NewSubString( + source_, start_position, end_position); + } break; case JsonToken::NUMBER: value = ParseJsonNumber(); + if (should_track_json_source) { + end_position = position(); + val_node = isolate_->factory()->NewSubString( + source_, start_position, end_position); + } break; case JsonToken::LBRACE: { @@ -824,6 +1001,7 @@ MaybeHandle<Object> JsonParser<Char>::ParseJsonValue() { if (Check(JsonToken::RBRACE)) { // TODO(verwaest): Directly use the map instead. value = factory()->NewJSObject(object_constructor_); + val_node = ObjectHashTable::New(isolate_, 0); break; } @@ -836,6 +1014,9 @@ MaybeHandle<Object> JsonParser<Char>::ParseJsonValue() { ExpectNext(JsonToken::STRING, MessageTemplate::kJsonParseExpectedPropNameOrRBrace); property_stack.emplace_back(ScanJsonPropertyKey(&cont)); + if (should_track_json_source) { + property_val_node_stack.emplace_back(Handle<Object>()); + } ExpectNext(JsonToken::COLON, MessageTemplate::kJsonParseExpectedColonAfterPropertyName); @@ -848,6 +1029,7 @@ MaybeHandle<Object> JsonParser<Char>::ParseJsonValue() { Consume(JsonToken::LBRACK); if (Check(JsonToken::RBRACK)) { value = factory()->NewJSArray(0, PACKED_SMI_ELEMENTS); + val_node = factory()->NewFixedArray(0); break; } @@ -862,16 +1044,25 @@ MaybeHandle<Object> JsonParser<Char>::ParseJsonValue() { case JsonToken::TRUE_LITERAL: ScanLiteral("true"); value = factory()->true_value(); + if (should_track_json_source) { + val_node = isolate_->factory()->true_string(); + } break; case JsonToken::FALSE_LITERAL: ScanLiteral("false"); value = factory()->false_value(); + if (should_track_json_source) { + val_node = isolate_->factory()->false_string(); + } break; case JsonToken::NULL_LITERAL: ScanLiteral("null"); value = factory()->null_value(); + if (should_track_json_source) { + val_node = isolate_->factory()->null_string(); + } break; case JsonToken::COLON: @@ -891,7 +1082,6 @@ MaybeHandle<Object> JsonParser<Char>::ParseJsonValue() { case JsonToken::WHITESPACE: UNREACHABLE(); } - // Done producing a value, consume it. break; } @@ -905,11 +1095,22 @@ MaybeHandle<Object> JsonParser<Char>::ParseJsonValue() { // break out of the loop, and 'continue' to continue the loop. switch (cont.type()) { case JsonContinuation::kReturn: - return cont.scope.CloseAndEscape(value); + if (should_track_json_source) { + DCHECK(!val_node.is_null()); + Handle<FixedArray> result = factory()->NewFixedArray(2); + result->set(0, *value); + result->set(1, *val_node); + return cont.scope.CloseAndEscape(result); + } else { + return cont.scope.CloseAndEscape(value); + } case JsonContinuation::kObjectProperty: { // Store the previous property value into its property info. property_stack.back().value = value; + if (should_track_json_source) { + property_val_node_stack.back() = val_node; + } if (V8_LIKELY(Check(JsonToken::COMMA))) { // Parse the property key. @@ -918,6 +1119,9 @@ MaybeHandle<Object> JsonParser<Char>::ParseJsonValue() { MessageTemplate::kJsonParseExpectedDoubleQuotedPropertyName); property_stack.emplace_back(ScanJsonPropertyKey(&cont)); + if (should_track_json_source) { + property_val_node_stack.emplace_back(Handle<Object>()); + } ExpectNext(JsonToken::COLON); // Break to start producing the subsequent property value. @@ -940,12 +1144,35 @@ MaybeHandle<Object> JsonParser<Char>::ParseJsonValue() { } } value = BuildJsonObject(cont, property_stack, feedback); - property_stack.resize_no_init(cont.index); Expect(JsonToken::RBRACE, MessageTemplate::kJsonParseExpectedCommaOrRBrace); - // Return the object. - value = cont.scope.CloseAndEscape(value); + if (should_track_json_source) { + size_t start = cont.index; + int length = static_cast<int>(property_stack.size() - start); + Handle<ObjectHashTable> table = + ObjectHashTable::New(isolate(), length); + for (int i = 0; i < length; i++) { + const JsonProperty& property = property_stack[start + i]; + if (property.string.is_index()) { + table = ObjectHashTable::Put( + table, factory()->Uint32ToString(property.string.index()), + property_val_node_stack[start + i]); + } else { + table = + ObjectHashTable::Put(table, MakeString(property.string), + property_val_node_stack[start + i]); + } + } + property_val_node_stack.resize_no_init(cont.index); + Object value_obj = *value; + val_node = cont.scope.CloseAndEscape(table); + value = cont.scope.CloseAndEscape(handle(value_obj, isolate_)); + } else { + value = cont.scope.CloseAndEscape(value); + } + property_stack.resize_no_init(cont.index); + // Pop the continuation. cont = std::move(cont_stack.back()); cont_stack.pop_back(); @@ -956,16 +1183,31 @@ MaybeHandle<Object> JsonParser<Char>::ParseJsonValue() { case JsonContinuation::kArrayElement: { // Store the previous element on the stack. element_stack.emplace_back(value); + if (should_track_json_source) { + element_val_node_stack.emplace_back(val_node); + } // Break to start producing the subsequent element value. if (V8_LIKELY(Check(JsonToken::COMMA))) break; value = BuildJsonArray(cont, element_stack); - element_stack.resize_no_init(cont.index); Expect(JsonToken::RBRACK, MessageTemplate::kJsonParseExpectedCommaOrRBrack); - // Return the array. - value = cont.scope.CloseAndEscape(value); + if (should_track_json_source) { + size_t start = cont.index; + int length = static_cast<int>(element_stack.size() - start); + Handle<FixedArray> array = factory()->NewFixedArray(length); + for (int i = 0; i < length; i++) { + array->set(i, *element_val_node_stack[start + i]); + } + element_val_node_stack.resize_no_init(cont.index); + Object value_obj = *value; + val_node = cont.scope.CloseAndEscape(array); + value = cont.scope.CloseAndEscape(handle(value_obj, isolate_)); + } else { + value = cont.scope.CloseAndEscape(value); + } + element_stack.resize_no_init(cont.index); // Pop the continuation. cont = std::move(cont_stack.back()); cont_stack.pop_back(); diff --git a/deps/v8/src/json/json-parser.h b/deps/v8/src/json/json-parser.h index 38d45bcf292a44..9304c163b30bef 100644 --- a/deps/v8/src/json/json-parser.h +++ b/deps/v8/src/json/json-parser.h @@ -103,20 +103,25 @@ struct JsonProperty { class JsonParseInternalizer { public: static MaybeHandle<Object> Internalize(Isolate* isolate, - Handle<Object> object, - Handle<Object> reviver); + Handle<Object> result, + Handle<Object> reviver, + Handle<String> source); private: - JsonParseInternalizer(Isolate* isolate, Handle<JSReceiver> reviver) - : isolate_(isolate), reviver_(reviver) {} + JsonParseInternalizer(Isolate* isolate, Handle<JSReceiver> reviver, + Handle<String> source) + : isolate_(isolate), reviver_(reviver), source_(source) {} MaybeHandle<Object> InternalizeJsonProperty(Handle<JSReceiver> holder, - Handle<String> key); + Handle<String> key, + Handle<Object> val_node); - bool RecurseAndApply(Handle<JSReceiver> holder, Handle<String> name); + bool RecurseAndApply(Handle<JSReceiver> holder, Handle<String> name, + Handle<Object> val_node); Isolate* isolate_; Handle<JSReceiver> reviver_; + Handle<String> source_; }; enum class JsonToken : uint8_t { @@ -143,15 +148,22 @@ class JsonParser final { using SeqString = typename CharTraits<Char>::String; using SeqExternalString = typename CharTraits<Char>::ExternalString; + V8_WARN_UNUSED_RESULT static bool CheckRawJson(Isolate* isolate, + Handle<String> source) { + return JsonParser(isolate, source).ParseRawJson(); + } + V8_WARN_UNUSED_RESULT static MaybeHandle<Object> Parse( Isolate* isolate, Handle<String> source, Handle<Object> reviver) { HighAllocationThroughputScope high_throughput_scope( V8::GetCurrentPlatform()); Handle<Object> result; ASSIGN_RETURN_ON_EXCEPTION(isolate, result, - JsonParser(isolate, source).ParseJson(), Object); + JsonParser(isolate, source).ParseJson(reviver), + Object); if (reviver->IsCallable()) { - return JsonParseInternalizer::Internalize(isolate, result, reviver); + return JsonParseInternalizer::Internalize(isolate, result, reviver, + source); } return result; } @@ -187,7 +199,9 @@ class JsonParser final { ~JsonParser(); // Parse a string containing a single JSON value. - MaybeHandle<Object> ParseJson(); + MaybeHandle<Object> ParseJson(Handle<Object> reviver); + + bool ParseRawJson(); void advance() { ++cursor_; } @@ -295,7 +309,8 @@ class JsonParser final { // Parse a single JSON value from input (grammar production JSONValue). // A JSON value is either a (double-quoted) string literal, a number literal, // one of "true", "false", or "null", or an object or array literal. - MaybeHandle<Object> ParseJsonValue(); + template <bool should_track_json_source> + MaybeHandle<Object> ParseJsonValue(Handle<Object> reviver); Handle<Object> BuildJsonObject( const JsonContinuation& cont, diff --git a/deps/v8/src/json/json-stringifier.cc b/deps/v8/src/json/json-stringifier.cc index cbc951d88a0004..98ff273a2e76d0 100644 --- a/deps/v8/src/json/json-stringifier.cc +++ b/deps/v8/src/json/json-stringifier.cc @@ -9,6 +9,7 @@ #include "src/numbers/conversions.h" #include "src/objects/heap-number-inl.h" #include "src/objects/js-array-inl.h" +#include "src/objects/js-raw-json-inl.h" #include "src/objects/lookup.h" #include "src/objects/objects-inl.h" #include "src/objects/oddball-inl.h" @@ -582,6 +583,14 @@ JsonStringifier::Result JsonStringifier::Serialize_(Handle<Object> object, Handle<JSPrimitiveWrapper>::cast(object), key); case SYMBOL_TYPE: return UNCHANGED; + case JS_RAW_JSON_TYPE: + DCHECK(v8_flags.harmony_json_parse_with_source); + if (deferred_string_key) SerializeDeferredKey(comma, key); + builder_.AppendString(Handle<String>::cast( + handle(Handle<JSRawJson>::cast(object)->InObjectPropertyAt( + JSRawJson::kRawJsonIndex), + isolate_))); + return SUCCESS; default: if (InstanceTypeChecker::IsString(instance_type)) { if (deferred_string_key) SerializeDeferredKey(comma, key); diff --git a/deps/v8/src/libplatform/tracing/recorder-win.cc b/deps/v8/src/libplatform/tracing/recorder-win.cc index 955a400928a654..ce431c94920e82 100644 --- a/deps/v8/src/libplatform/tracing/recorder-win.cc +++ b/deps/v8/src/libplatform/tracing/recorder-win.cc @@ -33,13 +33,13 @@ bool Recorder::IsEnabled(const uint8_t level) { void Recorder::AddEvent(TraceObject* trace_event) { // TODO(sartang@microsoft.com): Figure out how to write the conditional // arguments - wchar_t* wName = new wchar_t[4096]; + wchar_t wName[4096]; MultiByteToWideChar(CP_ACP, 0, trace_event->name(), -1, wName, 4096); #if defined(V8_USE_PERFETTO) const wchar_t* wCategoryGroupName = L""; #else // defined(V8_USE_PERFETTO) - wchar_t* wCategoryGroupName = new wchar_t[4096]; + wchar_t wCategoryGroupName[4096]; MultiByteToWideChar(CP_ACP, 0, TracingController::GetCategoryGroupName( trace_event->category_enabled_flag()), diff --git a/deps/v8/src/logging/counters-definitions.h b/deps/v8/src/logging/counters-definitions.h index e1e27fb4ed58c7..c487e9cf85b9a8 100644 --- a/deps/v8/src/logging/counters-definitions.h +++ b/deps/v8/src/logging/counters-definitions.h @@ -32,10 +32,6 @@ namespace internal { HR(gc_scavenger_scavenge_main, V8.GCScavenger.ScavengeMain, 0, 10000, 101) \ HR(gc_scavenger_scavenge_roots, V8.GCScavenger.ScavengeRoots, 0, 10000, 101) \ HR(gc_marking_sum, V8.GCMarkingSum, 0, 10000, 101) \ - /* Range and bucket matches BlinkGC.MainThreadMarkingThroughput. */ \ - HR(gc_main_thread_marking_throughput, V8.GCMainThreadMarkingThroughput, 0, \ - 100000, 50) \ - HR(young_generation_handling, V8.GCYoungGenerationHandling, 0, 2, 3) \ /* Asm/Wasm. */ \ HR(wasm_functions_per_asm_module, V8.WasmFunctionsPerModule.asm, 1, 1000000, \ 51) \ @@ -131,8 +127,8 @@ namespace internal { HT(gc_incremental_marking, V8.GCIncrementalMarking, 10000, MILLISECOND) \ HT(gc_incremental_marking_start, V8.GCIncrementalMarkingStart, 10000, \ MILLISECOND) \ - HT(gc_incremental_marking_finalize, V8.GCIncrementalMarkingFinalize, 10000, \ - MILLISECOND) \ + HT(gc_minor_incremental_marking_start, V8.GCMinorIncrementalMarkingStart, \ + 10000, MILLISECOND) \ HT(gc_low_memory_notification, V8.GCLowMemoryNotification, 10000, \ MILLISECOND) \ /* Compilation times. */ \ diff --git a/deps/v8/src/logging/counters.h b/deps/v8/src/logging/counters.h index 1e284bf3312a2f..354bc54845ac48 100644 --- a/deps/v8/src/logging/counters.h +++ b/deps/v8/src/logging/counters.h @@ -367,7 +367,7 @@ class V8_NODISCARD AggregatedHistogramTimerScope { // AggretatedMemoryHistogram collects (time, value) sample pairs and turns // them into time-uniform samples for the backing historgram, such that the // backing histogram receives one sample every T ms, where the T is controlled -// by the FLAG_histogram_interval. +// by the v8_flags.histogram_interval. // // More formally: let F be a real-valued function that maps time to sample // values. We define F as a linear interpolation between adjacent samples. For @@ -388,7 +388,7 @@ class AggregatedMemoryHistogram { // 1) For we processed samples that came in before start_ms_ and sent the // corresponding aggregated samples to backing histogram. // 2) (last_ms_, last_value_) is the last received sample. - // 3) last_ms_ < start_ms_ + FLAG_histogram_interval. + // 3) last_ms_ < start_ms_ + v8_flags.histogram_interval. // 4) aggregate_value_ is the average of the function that is constructed by // linearly interpolating samples received between start_ms_ and last_ms_. void AddSample(double current_ms, double current_value); @@ -429,7 +429,7 @@ void AggregatedMemoryHistogram<Histogram>::AddSample(double current_ms, // Two samples have the same time, remember the last one. last_value_ = current_value; } else { - double sample_interval_ms = FLAG_histogram_interval; + double sample_interval_ms = v8_flags.histogram_interval; double end_ms = start_ms_ + sample_interval_ms; if (end_ms <= current_ms + kEpsilon) { // Linearly interpolate between the last_ms_ and the current_ms. @@ -520,10 +520,10 @@ class Counters : public std::enable_shared_from_this<Counters> { NESTED_TIMED_HISTOGRAM_LIST(HT) #undef HT -#define HT(name, caption, max, res) \ - NestedTimedHistogram* name() { \ - name##_.EnsureCreated(FLAG_slow_histograms); \ - return &name##_; \ +#define HT(name, caption, max, res) \ + NestedTimedHistogram* name() { \ + name##_.EnsureCreated(v8_flags.slow_histograms); \ + return &name##_; \ } NESTED_TIMED_HISTOGRAM_LIST_SLOW(HT) #undef HT diff --git a/deps/v8/src/logging/log-file.cc b/deps/v8/src/logging/log-file.cc index 21f2d1c968abbd..acdd24701c64b5 100644 --- a/deps/v8/src/logging/log-file.cc +++ b/deps/v8/src/logging/log-file.cc @@ -28,7 +28,7 @@ const char* const LogFile::kLogToConsole = "-"; // static FILE* LogFile::CreateOutputHandle(std::string file_name) { // If we're logging anything, we need to open the log file. - if (!FLAG_log) { + if (!v8_flags.log) { return nullptr; } else if (LogFile::IsLoggingToConsole(file_name)) { return stdout; diff --git a/deps/v8/src/logging/log.cc b/deps/v8/src/logging/log.cc index b406ab4a17b0de..e591ce8224af42 100644 --- a/deps/v8/src/logging/log.cc +++ b/deps/v8/src/logging/log.cc @@ -11,6 +11,7 @@ #include "include/v8-locker.h" #include "src/api/api-inl.h" +#include "src/base/functional.h" #include "src/base/platform/mutex.h" #include "src/base/platform/platform.h" #include "src/base/platform/wrappers.h" @@ -127,7 +128,7 @@ const char* ComputeMarker(SharedFunctionInfo shared, AbstractCode code) { CodeKind kind = code.kind(cage_base); // We record interpreter trampoline builtin copies as having the // "interpreted" marker. - if (FLAG_interpreted_frames_native_stack && kind == CodeKind::BUILTIN && + if (v8_flags.interpreted_frames_native_stack && kind == CodeKind::BUILTIN && !code.is_off_heap_trampoline(cage_base)) { DCHECK_EQ(code.builtin_id(cage_base), Builtin::kInterpreterEntryTrampoline); kind = CodeKind::INTERPRETED_FUNCTION; @@ -416,7 +417,7 @@ void LinuxPerfBasicLogger::LogRecordedBuffer(Handle<AbstractCode> code, MaybeHandle<SharedFunctionInfo>, const char* name, int length) { PtrComprCageBase cage_base(isolate_); - if (FLAG_perf_basic_prof_only_functions && + if (v8_flags.perf_basic_prof_only_functions && CodeKindIsBuiltinOrJSFunction(code->kind(cage_base))) { return; } @@ -946,6 +947,14 @@ class SamplingThread : public base::Thread { const int interval_microseconds_; }; +#if defined(V8_OS_WIN) && defined(V8_ENABLE_ETW_STACK_WALKING) +class ETWJitLogger : public JitLogger { + public: + explicit ETWJitLogger(Isolate* isolate) + : JitLogger(isolate, i::ETWJITInterface::EventHandler) {} +}; +#endif + // The Profiler samples pc and sp values for the main thread. // Each sample is appended to a circular buffer. // An independent thread removes data and writes it to the log. @@ -1128,7 +1137,7 @@ V8FileLogger::~V8FileLogger() = default; const LogSeparator V8FileLogger::kNext = LogSeparator::kSeparator; int64_t V8FileLogger::Time() { - if (FLAG_verify_predictable) { + if (v8_flags.verify_predictable) { return isolate_->heap()->MonotonicallyIncreasingTimeInMs() * 1000; } return timer_.Elapsed().InMicroseconds(); @@ -1145,12 +1154,13 @@ void V8FileLogger::RemoveLogEventListener(LogEventListener* listener) { void V8FileLogger::ProfilerBeginEvent() { MSG_BUILDER(); - msg << "profiler" << kNext << "begin" << kNext << FLAG_prof_sampling_interval; + msg << "profiler" << kNext << "begin" << kNext + << v8_flags.prof_sampling_interval; msg.WriteToLogFile(); } void V8FileLogger::StringEvent(const char* name, const char* value) { - if (FLAG_log) UncheckedStringEvent(name, value); + if (v8_flags.log) UncheckedStringEvent(name, value); } void V8FileLogger::UncheckedStringEvent(const char* name, const char* value) { @@ -1160,7 +1170,7 @@ void V8FileLogger::UncheckedStringEvent(const char* name, const char* value) { } void V8FileLogger::IntPtrTEvent(const char* name, intptr_t value) { - if (!FLAG_log) return; + if (!v8_flags.log) return; MSG_BUILDER(); msg << name << kNext; msg.AppendFormatString("%" V8PRIdPTR, value); @@ -1170,7 +1180,7 @@ void V8FileLogger::IntPtrTEvent(const char* name, intptr_t value) { void V8FileLogger::SharedLibraryEvent(const std::string& library_path, uintptr_t start, uintptr_t end, intptr_t aslr_slide) { - if (!FLAG_prof_cpp) return; + if (!v8_flags.prof_cpp) return; MSG_BUILDER(); msg << "shared-library" << kNext << library_path.c_str() << kNext << reinterpret_cast<void*>(start) << kNext << reinterpret_cast<void*>(end) @@ -1179,14 +1189,14 @@ void V8FileLogger::SharedLibraryEvent(const std::string& library_path, } void V8FileLogger::SharedLibraryEnd() { - if (!FLAG_prof_cpp) return; + if (!v8_flags.prof_cpp) return; MSG_BUILDER(); msg << "shared-library-end"; msg.WriteToLogFile(); } void V8FileLogger::CurrentTimeEvent() { - DCHECK(FLAG_log_internal_timer_events); + DCHECK(v8_flags.log_internal_timer_events); MSG_BUILDER(); msg << "current-time" << kNext << Time(); msg.WriteToLogFile(); @@ -1222,7 +1232,7 @@ TIMER_EVENTS_LIST(V) #undef V void V8FileLogger::NewEvent(const char* name, void* object, size_t size) { - if (!FLAG_log) return; + if (!v8_flags.log) return; MSG_BUILDER(); msg << "new" << kNext << name << kNext << object << kNext << static_cast<unsigned int>(size); @@ -1230,7 +1240,7 @@ void V8FileLogger::NewEvent(const char* name, void* object, size_t size) { } void V8FileLogger::DeleteEvent(const char* name, void* object) { - if (!FLAG_log) return; + if (!v8_flags.log) return; MSG_BUILDER(); msg << "delete" << kNext << name << kNext << object; msg.WriteToLogFile(); @@ -1288,7 +1298,7 @@ void V8FileLogger::LogSourceCodeInformation(Handle<AbstractCode> code, Script script = Script::cast(script_object); EnsureLogScriptSource(script); - if (!FLAG_log_source_position) return; + if (!v8_flags.log_source_position) return; MSG_BUILDER(); msg << "code-source-info" << V8FileLogger::kNext << reinterpret_cast<void*>(code->InstructionStart(cage_base)) @@ -1349,7 +1359,7 @@ void V8FileLogger::LogSourceCodeInformation(Handle<AbstractCode> code, } void V8FileLogger::LogCodeDisassemble(Handle<AbstractCode> code) { - if (!FLAG_log_code_disassemble) return; + if (!v8_flags.log_code_disassemble) return; PtrComprCageBase cage_base(isolate_); MSG_BUILDER(); msg << "code-disassemble" << V8FileLogger::kNext @@ -1380,7 +1390,7 @@ void V8FileLogger::LogCodeDisassemble(Handle<AbstractCode> code) { void V8FileLogger::CodeCreateEvent(CodeTag tag, Handle<AbstractCode> code, const char* name) { if (!is_listening_to_code_events()) return; - if (!FLAG_log_code) return; + if (!v8_flags.log_code) return; { MSG_BUILDER(); AppendCodeCreateHeader(isolate_, msg, tag, *code, Time()); @@ -1393,7 +1403,7 @@ void V8FileLogger::CodeCreateEvent(CodeTag tag, Handle<AbstractCode> code, void V8FileLogger::CodeCreateEvent(CodeTag tag, Handle<AbstractCode> code, Handle<Name> name) { if (!is_listening_to_code_events()) return; - if (!FLAG_log_code) return; + if (!v8_flags.log_code) return; { MSG_BUILDER(); AppendCodeCreateHeader(isolate_, msg, tag, *code, Time()); @@ -1408,7 +1418,7 @@ void V8FileLogger::CodeCreateEvent(CodeTag tag, Handle<AbstractCode> code, Handle<SharedFunctionInfo> shared, Handle<Name> script_name) { if (!is_listening_to_code_events()) return; - if (!FLAG_log_code) return; + if (!v8_flags.log_code) return; if (*code == AbstractCode::cast(isolate_->builtins()->code(Builtin::kCompileLazy))) { return; @@ -1427,7 +1437,7 @@ void V8FileLogger::CodeCreateEvent(CodeTag tag, Handle<AbstractCode> code, void V8FileLogger::FeedbackVectorEvent(FeedbackVector vector, AbstractCode code) { DisallowGarbageCollection no_gc; - if (!FLAG_log_feedback_vector) return; + if (!v8_flags.log_feedback_vector) return; PtrComprCageBase cage_base(isolate_); MSG_BUILDER(); msg << "feedback-vector" << kNext << Time(); @@ -1460,7 +1470,7 @@ void V8FileLogger::CodeCreateEvent(CodeTag tag, Handle<AbstractCode> code, Handle<Name> script_name, int line, int column) { if (!is_listening_to_code_events()) return; - if (!FLAG_log_code) return; + if (!v8_flags.log_code) return; { MSG_BUILDER(); AppendCodeCreateHeader(isolate_, msg, tag, *code, Time()); @@ -1480,7 +1490,7 @@ void V8FileLogger::CodeCreateEvent(CodeTag tag, const wasm::WasmCode* code, const char* /*source_url*/, int /*code_offset*/, int /*script_id*/) { if (!is_listening_to_code_events()) return; - if (!FLAG_log_code) return; + if (!v8_flags.log_code) return; MSG_BUILDER(); AppendCodeCreateHeader(msg, tag, CodeKind::WASM_FUNCTION, code->instructions().begin(), @@ -1502,7 +1512,7 @@ void V8FileLogger::CodeCreateEvent(CodeTag tag, const wasm::WasmCode* code, void V8FileLogger::CallbackEventInternal(const char* prefix, Handle<Name> name, Address entry_point) { - if (!FLAG_log_code) return; + if (!v8_flags.log_code) return; MSG_BUILDER(); msg << Event::kCodeCreation << kNext << CodeTag::kCallback << kNext << -2 << kNext << Time() << kNext << reinterpret_cast<void*>(entry_point) @@ -1525,7 +1535,7 @@ void V8FileLogger::SetterCallbackEvent(Handle<Name> name, Address entry_point) { void V8FileLogger::RegExpCodeCreateEvent(Handle<AbstractCode> code, Handle<String> source) { if (!is_listening_to_code_events()) return; - if (!FLAG_log_code) return; + if (!v8_flags.log_code) return; MSG_BUILDER(); AppendCodeCreateHeader(isolate_, msg, LogEventListener::CodeTag::kRegExp, *code, Time()); @@ -1547,14 +1557,14 @@ void V8FileLogger::SharedFunctionInfoMoveEvent(Address from, Address to) { void V8FileLogger::CodeMovingGCEvent() { if (!is_listening_to_code_events()) return; - if (!FLAG_ll_prof) return; + if (!v8_flags.ll_prof) return; base::OS::SignalCodeMovingGC(); } void V8FileLogger::CodeDisableOptEvent(Handle<AbstractCode> code, Handle<SharedFunctionInfo> shared) { if (!is_listening_to_code_events()) return; - if (!FLAG_log_code) return; + if (!v8_flags.log_code) return; MSG_BUILDER(); msg << Event::kCodeDisableOpt << kNext << shared->DebugNameCStr().get() << kNext << GetBailoutReason(shared->disabled_optimization_reason()); @@ -1585,7 +1595,7 @@ void V8FileLogger::ProcessDeoptEvent(Handle<Code> code, SourcePosition position, void V8FileLogger::CodeDeoptEvent(Handle<Code> code, DeoptimizeKind kind, Address pc, int fp_to_sp_delta) { - if (!is_logging() || !FLAG_log_deopt) return; + if (!is_logging() || !v8_flags.log_deopt) return; Deoptimizer::DeoptInfo info = Deoptimizer::GetDeoptInfo(*code, pc); ProcessDeoptEvent(code, info.position, Deoptimizer::MessageFor(kind), DeoptimizeReasonToString(info.deopt_reason)); @@ -1594,7 +1604,7 @@ void V8FileLogger::CodeDeoptEvent(Handle<Code> code, DeoptimizeKind kind, void V8FileLogger::CodeDependencyChangeEvent(Handle<Code> code, Handle<SharedFunctionInfo> sfi, const char* reason) { - if (!is_logging() || !FLAG_log_deopt) return; + if (!is_logging() || !v8_flags.log_deopt) return; SourcePosition position(sfi->StartPosition(), -1); ProcessDeoptEvent(code, position, "dependency-change", reason); } @@ -1647,7 +1657,7 @@ void V8FileLogger::CodeNameEvent(Address addr, int pos, const char* code_name) { } void V8FileLogger::MoveEventInternal(Event event, Address from, Address to) { - if (!FLAG_log_code) return; + if (!v8_flags.log_code) return; MSG_BUILDER(); msg << event << kNext << reinterpret_cast<void*>(from) << kNext << reinterpret_cast<void*>(to); @@ -1661,7 +1671,7 @@ void AppendFunctionMessage(LogFile::MessageBuilder& msg, const char* reason, msg << "function" << V8FileLogger::kNext << reason << V8FileLogger::kNext << script_id << V8FileLogger::kNext << start_position << V8FileLogger::kNext << end_position << V8FileLogger::kNext; - if (V8_UNLIKELY(FLAG_predictable)) { + if (V8_UNLIKELY(v8_flags.predictable)) { msg << 0.1; } else { msg << time_delta; @@ -1757,7 +1767,7 @@ void V8FileLogger::ScriptDetails(Script script) { } bool V8FileLogger::EnsureLogScriptSource(Script script) { - if (!FLAG_log_source_code) return true; + if (!v8_flags.log_source_code) return true; // Make sure the script is written to the log file. int script_id = script.id(); if (logged_source_code_.find(script_id) != logged_source_code_.end()) { @@ -1800,7 +1810,7 @@ void V8FileLogger::RuntimeCallTimerEvent() { } void V8FileLogger::TickEvent(TickSample* sample, bool overflow) { - if (!FLAG_prof_cpp) return; + if (!v8_flags.prof_cpp) return; if (V8_UNLIKELY(TracingFlags::runtime_stats.load(std::memory_order_relaxed) == v8::tracing::TracingCategoryObserver::ENABLED_BY_NATIVE)) { RuntimeCallTimerEvent(); @@ -1825,7 +1835,7 @@ void V8FileLogger::TickEvent(TickSample* sample, bool overflow) { void V8FileLogger::ICEvent(const char* type, bool keyed, Handle<Map> map, Handle<Object> key, char old_state, char new_state, const char* modifier, const char* slow_stub_reason) { - if (!FLAG_log_ic) return; + if (!v8_flags.log_ic) return; int line; int column; // GetAbstractPC must come before MSG_BUILDER(), as it can GC, which might @@ -1854,7 +1864,7 @@ void V8FileLogger::ICEvent(const char* type, bool keyed, Handle<Map> map, void V8FileLogger::MapEvent(const char* type, Handle<Map> from, Handle<Map> to, const char* reason, Handle<HeapObject> name_or_sfi) { - if (!FLAG_log_maps) return; + if (!v8_flags.log_maps) return; if (!to.is_null()) MapDetails(*to); int line = -1; int column = -1; @@ -1885,7 +1895,7 @@ void V8FileLogger::MapEvent(const char* type, Handle<Map> from, Handle<Map> to, } void V8FileLogger::MapCreate(Map map) { - if (!FLAG_log_maps) return; + if (!v8_flags.log_maps) return; DisallowGarbageCollection no_gc; MSG_BUILDER(); msg << "map-create" << kNext << Time() << kNext << AsHex::Address(map.ptr()); @@ -1893,12 +1903,12 @@ void V8FileLogger::MapCreate(Map map) { } void V8FileLogger::MapDetails(Map map) { - if (!FLAG_log_maps) return; + if (!v8_flags.log_maps) return; DisallowGarbageCollection no_gc; MSG_BUILDER(); msg << "map-details" << kNext << Time() << kNext << AsHex::Address(map.ptr()) << kNext; - if (FLAG_log_maps_details) { + if (v8_flags.log_maps_details) { std::ostringstream buffer; map.PrintMapDetails(buffer); msg << buffer.str().c_str(); @@ -1913,6 +1923,17 @@ EnumerateCompiledFunctions(Heap* heap) { std::vector<std::pair<Handle<SharedFunctionInfo>, Handle<AbstractCode>>> compiled_funcs; Isolate* isolate = heap->isolate(); + auto hash = [](const std::pair<SharedFunctionInfo, AbstractCode>& p) { + return base::hash_combine(p.first.address(), p.second.address()); + }; + std::unordered_set<std::pair<SharedFunctionInfo, AbstractCode>, + decltype(hash)> + seen(8, hash); + + auto record = [&](SharedFunctionInfo sfi, AbstractCode c) { + if (auto [iter, inserted] = seen.emplace(sfi, c); inserted) + compiled_funcs.emplace_back(handle(sfi, isolate), handle(c, isolate)); + }; // Iterate the heap to find JSFunctions and record their optimized code. for (HeapObject obj = iterator.Next(); !obj.is_null(); @@ -1920,9 +1941,7 @@ EnumerateCompiledFunctions(Heap* heap) { if (obj.IsSharedFunctionInfo()) { SharedFunctionInfo sfi = SharedFunctionInfo::cast(obj); if (sfi.is_compiled() && !sfi.HasBytecodeArray()) { - compiled_funcs.emplace_back( - handle(sfi, isolate), - handle(AbstractCode::cast(sfi.abstract_code(isolate)), isolate)); + record(sfi, AbstractCode::cast(sfi.abstract_code(isolate))); } } else if (obj.IsJSFunction()) { // Given that we no longer iterate over all optimized JSFunctions, we need @@ -1933,9 +1952,8 @@ EnumerateCompiledFunctions(Heap* heap) { // only on a type feedback vector. We should make this mroe precise. if (function.HasAttachedOptimizedCode() && Script::cast(function.shared().script()).HasValidSource()) { - compiled_funcs.emplace_back( - handle(function.shared(), isolate), - handle(AbstractCode::cast(FromCodeT(function.code())), isolate)); + record(function.shared(), + AbstractCode::cast(FromCodeT(function.code()))); } } } @@ -1949,9 +1967,7 @@ EnumerateCompiledFunctions(Heap* heap) { for (SharedFunctionInfo sfi = sfi_iterator.Next(); !sfi.is_null(); sfi = sfi_iterator.Next()) { if (sfi.is_compiled()) { - compiled_funcs.emplace_back( - handle(sfi, isolate), - handle(AbstractCode::cast(sfi.abstract_code(isolate)), isolate)); + record(sfi, AbstractCode::cast(sfi.abstract_code(isolate))); } } } @@ -2013,7 +2029,7 @@ void V8FileLogger::LogAllMaps() { } static void AddIsolateIdIfNeeded(std::ostream& os, Isolate* isolate) { - if (!FLAG_logfile_per_isolate) return; + if (!v8_flags.logfile_per_isolate) return; os << "isolate-" << isolate << "-" << base::OS::GetCurrentProcessId() << "-"; } @@ -2067,30 +2083,30 @@ bool V8FileLogger::SetUp(Isolate* isolate) { is_initialized_ = true; std::ostringstream log_file_name; - PrepareLogFileName(log_file_name, isolate, FLAG_logfile); + PrepareLogFileName(log_file_name, isolate, v8_flags.logfile); log_ = std::make_unique<LogFile>(this, log_file_name.str()); #if V8_OS_LINUX - if (FLAG_perf_basic_prof) { + if (v8_flags.perf_basic_prof) { perf_basic_logger_ = std::make_unique<LinuxPerfBasicLogger>(isolate); AddLogEventListener(perf_basic_logger_.get()); } - if (FLAG_perf_prof) { + if (v8_flags.perf_prof) { perf_jit_logger_ = std::make_unique<LinuxPerfJitLogger>(isolate); AddLogEventListener(perf_jit_logger_.get()); } #else static_assert( - !FLAG_perf_prof.value(), + !v8_flags.perf_prof.value(), "--perf-prof should be statically disabled on non-Linux platforms"); static_assert( - !FLAG_perf_basic_prof.value(), + !v8_flags.perf_basic_prof.value(), "--perf-basic-prof should be statically disabled on non-Linux platforms"); #endif #ifdef ENABLE_GDB_JIT_INTERFACE - if (i::FLAG_gdbjit) { + if (v8_flags.gdbjit) { gdb_jit_logger_ = std::make_unique<JitLogger>(isolate, i::GDBJITInterface::EventHandler); AddLogEventListener(gdb_jit_logger_.get()); @@ -2098,25 +2114,16 @@ bool V8FileLogger::SetUp(Isolate* isolate) { } #endif // ENABLE_GDB_JIT_INTERFACE -#if defined(V8_OS_WIN) && defined(V8_ENABLE_ETW_STACK_WALKING) - if (i::FLAG_enable_etw_stack_walking) { - etw_jit_logger_ = - std::make_unique<JitLogger>(isolate, i::ETWJITInterface::EventHandler); - AddLogEventListener(etw_jit_logger_.get()); - CHECK(isolate->logger()->is_listening_to_code_events()); - } -#endif // defined(V8_OS_WIN) - - if (FLAG_ll_prof) { + if (v8_flags.ll_prof) { ll_logger_ = std::make_unique<LowLevelLogger>(isolate, log_file_name.str().c_str()); AddLogEventListener(ll_logger_.get()); } - ticker_ = std::make_unique<Ticker>(isolate, FLAG_prof_sampling_interval); - if (FLAG_log) UpdateIsLogging(true); + ticker_ = std::make_unique<Ticker>(isolate, v8_flags.prof_sampling_interval); + if (v8_flags.log) UpdateIsLogging(true); timer_.Start(); - if (FLAG_prof_cpp) { - CHECK(FLAG_log); + if (v8_flags.prof_cpp) { + CHECK(v8_flags.log); CHECK(is_logging()); profiler_ = std::make_unique<Profiler>(isolate); profiler_->Engage(); @@ -2133,6 +2140,42 @@ void V8FileLogger::LateSetup(Isolate* isolate) { #endif } +#if defined(V8_OS_WIN) && defined(V8_ENABLE_ETW_STACK_WALKING) +void V8FileLogger::SetEtwCodeEventHandler(uint32_t options) { + DCHECK(v8_flags.enable_etw_stack_walking); + isolate_->UpdateLogObjectRelocation(); +#if V8_ENABLE_WEBASSEMBLY + wasm::GetWasmEngine()->EnableCodeLogging(isolate_); +#endif // V8_ENABLE_WEBASSEMBLY + + if (!etw_jit_logger_) { + etw_jit_logger_ = std::make_unique<ETWJitLogger>(isolate_); + AddLogEventListener(etw_jit_logger_.get()); + CHECK(isolate_->logger()->is_listening_to_code_events()); + } + + if (options & kJitCodeEventEnumExisting) { + // TODO(v8:11043) Here we log the existing code to all the listeners + // registered to this Isolate logger, while we should only log to the newly + // created ETWJitLogger. This should not generally be a problem because it + // is quite unlikely to have both file logger and ETW tracing both enabled + // by default. + HandleScope scope(isolate_); + LogBuiltins(); + LogCodeObjects(); + LogCompiledFunctions(); + } +} + +void V8FileLogger::ResetEtwCodeEventHandler() { + DCHECK(v8_flags.enable_etw_stack_walking); + if (etw_jit_logger_) { + RemoveLogEventListener(etw_jit_logger_.get()); + etw_jit_logger_.reset(); + } +} +#endif + void V8FileLogger::SetCodeEventHandler(uint32_t options, JitCodeEventHandler event_handler) { if (jit_logger_) { diff --git a/deps/v8/src/logging/log.h b/deps/v8/src/logging/log.h index 18560d78e2373e..339031c4ff84b2 100644 --- a/deps/v8/src/logging/log.h +++ b/deps/v8/src/logging/log.h @@ -69,10 +69,14 @@ class Profiler; class SourcePosition; class Ticker; +#if defined(V8_OS_WIN) && defined(V8_ENABLE_ETW_STACK_WALKING) +class ETWJitLogger; +#endif + #undef LOG -#define LOG(isolate, Call) \ - do { \ - if (v8::internal::FLAG_log) (isolate)->v8_file_logger()->Call; \ +#define LOG(isolate, Call) \ + do { \ + if (v8::internal::v8_flags.log) (isolate)->v8_file_logger()->Call; \ } while (false) #define LOG_CODE_EVENT(isolate, Call) \ @@ -134,6 +138,11 @@ class V8FileLogger : public LogEventListener { // Sets the current code event handler. void SetCodeEventHandler(uint32_t options, JitCodeEventHandler event_handler); +#if defined(V8_OS_WIN) && defined(V8_ENABLE_ETW_STACK_WALKING) + void SetEtwCodeEventHandler(uint32_t options); + void ResetEtwCodeEventHandler(); +#endif + sampler::Sampler* sampler(); V8_EXPORT_PRIVATE std::string file_name() const; @@ -261,7 +270,11 @@ class V8FileLogger : public LogEventListener { V8_EXPORT_PRIVATE bool is_logging(); bool is_listening_to_code_events() override { - return is_logging() || jit_logger_ != nullptr; + return +#if defined(V8_OS_WIN) && defined(V8_ENABLE_ETW_STACK_WALKING) + etw_jit_logger_ != nullptr || +#endif + is_logging() || jit_logger_ != nullptr; } void LogExistingFunction(Handle<SharedFunctionInfo> shared, @@ -299,7 +312,7 @@ class V8FileLogger : public LogEventListener { void TickEvent(TickSample* sample, bool overflow); void RuntimeCallTimerEvent(); - // Logs a StringEvent regardless of whether FLAG_log is true. + // Logs a StringEvent regardless of whether v8_flags.log is true. void UncheckedStringEvent(const char* name, const char* value); // Logs a scripts sources. Keeps track of all logged scripts to ensure that @@ -345,7 +358,7 @@ class V8FileLogger : public LogEventListener { std::unique_ptr<JitLogger> gdb_jit_logger_; #endif #if defined(V8_OS_WIN) && defined(V8_ENABLE_ETW_STACK_WALKING) - std::unique_ptr<JitLogger> etw_jit_logger_; + std::unique_ptr<ETWJitLogger> etw_jit_logger_; #endif std::set<int> logged_source_code_; uint32_t next_source_info_id_ = 0; diff --git a/deps/v8/src/logging/runtime-call-stats.cc b/deps/v8/src/logging/runtime-call-stats.cc index 82f3979befa8be..e9f5e1755fb80b 100644 --- a/deps/v8/src/logging/runtime-call-stats.cc +++ b/deps/v8/src/logging/runtime-call-stats.cc @@ -157,7 +157,7 @@ RuntimeCallStats::RuntimeCallStats(ThreadType thread_type) for (int i = 0; i < kNumberOfCounters; i++) { this->counters_[i] = RuntimeCallCounter(kNames[i]); } - if (FLAG_rcs_cpu_time) { + if (v8_flags.rcs_cpu_time) { CHECK(base::ThreadTicks::IsSupported()); base::ThreadTicks::WaitUntilInitialized(); RuntimeCallTimer::Now = &RuntimeCallTimer::NowCPUTime; diff --git a/deps/v8/src/logging/runtime-call-stats.h b/deps/v8/src/logging/runtime-call-stats.h index 4c02309b7493dd..202c379f640777 100644 --- a/deps/v8/src/logging/runtime-call-stats.h +++ b/deps/v8/src/logging/runtime-call-stats.h @@ -368,6 +368,7 @@ class RuntimeCallTimer final { ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, Scheduling) \ ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, SelectInstructions) \ ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, SimplifiedLowering) \ + ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, SimplifyLoops) \ ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, StoreStoreElimination) \ ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, TraceScheduleAndVerify) \ ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, BuildTurboshaft) \ diff --git a/deps/v8/src/logging/tracing-flags.h b/deps/v8/src/logging/tracing-flags.h index b3ccb896aa6ed8..7ebd8e3e1987e0 100644 --- a/deps/v8/src/logging/tracing-flags.h +++ b/deps/v8/src/logging/tracing-flags.h @@ -13,8 +13,8 @@ namespace v8 { namespace internal { // This struct contains a set of flags that can be modified from multiple -// threads at runtime unlike the normal FLAG_-like flags which are not modified -// after V8 instance is initialized. +// threads at runtime unlike the normal v8_flags.-like flags which are not +// modified after V8 instance is initialized. struct TracingFlags { static V8_EXPORT_PRIVATE std::atomic_uint runtime_stats; diff --git a/deps/v8/src/maglev/maglev-assembler-inl.h b/deps/v8/src/maglev/maglev-assembler-inl.h index f9fefb53f99a8f..309e74a50249af 100644 --- a/deps/v8/src/maglev/maglev-assembler-inl.h +++ b/deps/v8/src/maglev/maglev-assembler-inl.h @@ -5,6 +5,10 @@ #ifndef V8_MAGLEV_MAGLEV_ASSEMBLER_INL_H_ #define V8_MAGLEV_MAGLEV_ASSEMBLER_INL_H_ +#include <tuple> +#include <type_traits> +#include <utility> + #include "src/codegen/macro-assembler-inl.h" #include "src/maglev/maglev-assembler.h" #include "src/maglev/maglev-basic-block.h" @@ -131,6 +135,9 @@ struct CopyForDeferredHelper<MaglevCompilationInfo*> template <> struct CopyForDeferredHelper<Register> : public CopyForDeferredByValue<Register> {}; +template <> +struct CopyForDeferredHelper<DoubleRegister> + : public CopyForDeferredByValue<DoubleRegister> {}; // Bytecode offsets are copied by value. template <> struct CopyForDeferredHelper<BytecodeOffset> @@ -187,10 +194,10 @@ struct FunctionArgumentsTupleHelper<R (&)(A...)> { }; template <typename T> -struct StripFirstTwoTupleArgs; +struct StripFirstTupleArg; -template <typename T1, typename T2, typename... T> -struct StripFirstTwoTupleArgs<std::tuple<T1, T2, T...>> { +template <typename T1, typename... T> +struct StripFirstTupleArg<std::tuple<T1, T...>> { using Stripped = std::tuple<T...>; }; @@ -199,9 +206,8 @@ class DeferredCodeInfoImpl final : public DeferredCodeInfo { public: using FunctionPointer = typename FunctionArgumentsTupleHelper<Function>::FunctionPointer; - using Tuple = typename StripFirstTwoTupleArgs< + using Tuple = typename StripFirstTupleArg< typename FunctionArgumentsTupleHelper<Function>::Tuple>::Stripped; - static constexpr size_t kSize = FunctionArgumentsTupleHelper<Function>::kSize; template <typename... InArgs> explicit DeferredCodeInfoImpl(MaglevCompilationInfo* compilation_info, @@ -213,18 +219,12 @@ class DeferredCodeInfoImpl final : public DeferredCodeInfo { DeferredCodeInfoImpl(DeferredCodeInfoImpl&&) = delete; DeferredCodeInfoImpl(const DeferredCodeInfoImpl&) = delete; - void Generate(MaglevAssembler* masm, Label* return_label) override { - DoCall(masm, return_label, std::make_index_sequence<kSize - 2>{}); + void Generate(MaglevAssembler* masm) override { + std::apply(function, + std::tuple_cat(std::make_tuple(masm), std::move(args))); } private: - template <size_t... I> - auto DoCall(MaglevAssembler* masm, Label* return_label, - std::index_sequence<I...>) { - // TODO(leszeks): This could be replaced with std::apply in C++17. - return function(masm, return_label, std::get<I>(args)...); - } - FunctionPointer function; Tuple args; }; @@ -234,6 +234,16 @@ class DeferredCodeInfoImpl final : public DeferredCodeInfo { template <typename Function, typename... Args> inline DeferredCodeInfo* MaglevAssembler::PushDeferredCode( Function&& deferred_code_gen, Args&&... args) { + using FunctionPointer = + typename detail::FunctionArgumentsTupleHelper<Function>::FunctionPointer; + static_assert( + std::is_invocable_v<FunctionPointer, MaglevAssembler*, + decltype(detail::CopyForDeferred( + std::declval<MaglevCompilationInfo*>(), + std::declval<Args>()))...>, + "Parameters of deferred_code_gen function should match arguments into " + "PushDeferredCode"); + using DeferredCodeInfoT = detail::DeferredCodeInfoImpl<Function>; DeferredCodeInfoT* deferred_code = compilation_info()->zone()->New<DeferredCodeInfoT>( @@ -252,11 +262,10 @@ inline void MaglevAssembler::JumpToDeferredIf(Condition cond, Args&&... args) { DeferredCodeInfo* deferred_code = PushDeferredCode<Function, Args...>( std::forward<Function>(deferred_code_gen), std::forward<Args>(args)...); - if (FLAG_code_comments) { + if (v8_flags.code_comments) { RecordComment("-- Jump to deferred code"); } j(cond, &deferred_code->deferred_code_label); - bind(&deferred_code->return_label); } // --- diff --git a/deps/v8/src/maglev/maglev-assembler.h b/deps/v8/src/maglev/maglev-assembler.h index ec85919ef07b33..0bfac0bbdf512d 100644 --- a/deps/v8/src/maglev/maglev-assembler.h +++ b/deps/v8/src/maglev/maglev-assembler.h @@ -12,25 +12,6 @@ namespace v8 { namespace internal { namespace maglev { -// Label allowed to be passed to deferred code. -class ZoneLabelRef { - public: - explicit ZoneLabelRef(Zone* zone) : label_(zone->New<Label>()) {} - - static ZoneLabelRef UnsafeFromLabelPointer(Label* label) { - // This is an unsafe operation, {label} must be zone allocated. - return ZoneLabelRef(label); - } - - Label* operator*() { return label_; } - - private: - Label* label_; - - // Unsafe constructor. {label} must be zone allocated. - explicit ZoneLabelRef(Label* label) : label_(label) {} -}; - class MaglevAssembler : public MacroAssembler { public: explicit MaglevAssembler(MaglevCodeGenState* code_gen_state) @@ -103,6 +84,27 @@ class MaglevAssembler : public MacroAssembler { MaglevCodeGenState* const code_gen_state_; }; +// Label allowed to be passed to deferred code. +class ZoneLabelRef { + public: + explicit ZoneLabelRef(Zone* zone) : label_(zone->New<Label>()) {} + explicit inline ZoneLabelRef(MaglevAssembler* masm) + : ZoneLabelRef(masm->compilation_info()->zone()) {} + + static ZoneLabelRef UnsafeFromLabelPointer(Label* label) { + // This is an unsafe operation, {label} must be zone allocated. + return ZoneLabelRef(label); + } + + Label* operator*() { return label_; } + + private: + Label* label_; + + // Unsafe constructor. {label} must be zone allocated. + explicit ZoneLabelRef(Label* label) : label_(label) {} +}; + } // namespace maglev } // namespace internal } // namespace v8 diff --git a/deps/v8/src/maglev/maglev-basic-block.h b/deps/v8/src/maglev/maglev-basic-block.h index 11ca4c1c69e892..12c652942a213e 100644 --- a/deps/v8/src/maglev/maglev-basic-block.h +++ b/deps/v8/src/maglev/maglev-basic-block.h @@ -52,11 +52,6 @@ class BasicBlock { bool is_empty_block() const { return is_empty_block_; } - BasicBlock* empty_block_predecessor() const { - DCHECK(is_empty_block()); - return empty_block_predecessor_; - } - MergePointRegisterState& empty_block_register_state() { DCHECK(is_empty_block()); return *empty_block_register_state_; @@ -67,13 +62,12 @@ class BasicBlock { empty_block_register_state_ = register_state; } - void set_empty_block_predecessor(BasicBlock* predecessor) { + void set_empty_block() { DCHECK(nodes_.is_empty()); DCHECK(control_node()->Is<Jump>()); DCHECK_NULL(state_); is_empty_block_ = true; empty_block_register_state_ = nullptr; - empty_block_predecessor_ = predecessor; } Phi::List* phis() const { @@ -112,7 +106,6 @@ class BasicBlock { MergePointInterpreterFrameState* state_; MergePointRegisterState* empty_block_register_state_; }; - BasicBlock* empty_block_predecessor_; Label label_; }; diff --git a/deps/v8/src/maglev/maglev-code-gen-state.h b/deps/v8/src/maglev/maglev-code-gen-state.h index 01fdb8216b3624..72c4c42d067ba8 100644 --- a/deps/v8/src/maglev/maglev-code-gen-state.h +++ b/deps/v8/src/maglev/maglev-code-gen-state.h @@ -24,16 +24,16 @@ class MaglevAssembler; class DeferredCodeInfo { public: - virtual void Generate(MaglevAssembler* masm, Label* return_label) = 0; + virtual void Generate(MaglevAssembler* masm) = 0; Label deferred_code_label; - Label return_label; }; class MaglevCodeGenState { public: - MaglevCodeGenState(MaglevCompilationInfo* compilation_info, + MaglevCodeGenState(Isolate* isolate, MaglevCompilationInfo* compilation_info, MaglevSafepointTableBuilder* safepoint_table_builder) - : compilation_info_(compilation_info), + : isolate_(isolate), + compilation_info_(compilation_info), safepoint_table_builder_(safepoint_table_builder) {} void set_tagged_slots(int slots) { tagged_slots_ = slots; } @@ -45,6 +45,9 @@ class MaglevCodeGenState { const std::vector<DeferredCodeInfo*>& deferred_code() const { return deferred_code_; } + std::vector<DeferredCodeInfo*> TakeDeferredCode() { + return std::exchange(deferred_code_, std::vector<DeferredCodeInfo*>()); + } void PushEagerDeopt(EagerDeoptInfo* info) { eager_deopts_.push_back(info); } void PushLazyDeopt(LazyDeoptInfo* info) { lazy_deopts_.push_back(info); } const std::vector<EagerDeoptInfo*>& eager_deopts() const { @@ -60,7 +63,7 @@ class MaglevCodeGenState { compiler::NativeContextRef native_context() const { return broker()->target_native_context(); } - Isolate* isolate() const { return compilation_info_->isolate(); } + Isolate* isolate() const { return isolate_; } compiler::JSHeapBroker* broker() const { return compilation_info_->broker(); } MaglevGraphLabeller* graph_labeller() const { return compilation_info_->graph_labeller(); @@ -73,6 +76,7 @@ class MaglevCodeGenState { MaglevCompilationInfo* compilation_info() const { return compilation_info_; } private: + Isolate* const isolate_; MaglevCompilationInfo* const compilation_info_; MaglevSafepointTableBuilder* const safepoint_table_builder_; diff --git a/deps/v8/src/maglev/maglev-code-generator.cc b/deps/v8/src/maglev/maglev-code-generator.cc index e1578c118bd5cf..7c72a9e040b6aa 100644 --- a/deps/v8/src/maglev/maglev-code-generator.cc +++ b/deps/v8/src/maglev/maglev-code-generator.cc @@ -286,6 +286,7 @@ class ParallelMoveResolver { if (has_cycle) { if (!scratch_has_cycle_start_) { Pop(kScratchRegT); + scratch_has_cycle_start_ = true; } EmitMovesFromSource(kScratchRegT, targets); scratch_has_cycle_start_ = false; @@ -366,6 +367,7 @@ class ParallelMoveResolver { } if (scratch_has_cycle_start_ && !targets.stack_slots.empty()) { Push(kScratchRegT); + scratch_has_cycle_start_ = false; } for (uint32_t target_slot : targets.stack_slots) { DCHECK_EQ(moves_from_stack_slot_.find(target_slot), @@ -432,203 +434,210 @@ class ParallelMoveResolver { class ExceptionHandlerTrampolineBuilder { public: + static void Build(MaglevAssembler* masm, NodeBase* node) { + ExceptionHandlerTrampolineBuilder builder(masm); + builder.EmitTrampolineFor(node); + } + + private: explicit ExceptionHandlerTrampolineBuilder(MaglevAssembler* masm) : masm_(masm) {} + struct Move { + explicit Move(const ValueLocation& target, ValueNode* source) + : target(target), source(source) {} + const ValueLocation& target; + ValueNode* const source; + }; + using MoveVector = base::SmallVector<Move, 16>; + void EmitTrampolineFor(NodeBase* node) { DCHECK(node->properties().can_throw()); - ExceptionHandlerInfo* handler_info = node->exception_handler_info(); + ExceptionHandlerInfo* const handler_info = node->exception_handler_info(); DCHECK(handler_info->HasExceptionHandler()); + BasicBlock* const catch_block = handler_info->catch_block.block_ptr(); + LazyDeoptInfo* const deopt_info = node->lazy_deopt_info(); + + // The exception handler trampoline resolves moves for exception phis and + // then jumps to the actual catch block. There are a few points worth + // noting: + // + // - All source locations are assumed to be stack slots, except the + // accumulator which is stored in kReturnRegister0. We don't emit an + // explicit move for it, instead it is pushed and popped at the boundaries + // of the entire move sequence (necessary due to materialisation). + // + // - Some values may require materialisation, i.e. heap number construction + // through calls to the NewHeapNumber builtin. To avoid potential conflicts + // with other moves (which may happen due to stack slot reuse, i.e. a + // target location of move A may equal source location of move B), we + // materialise and push results to new temporary stack slots before the + // main move sequence, and then pop results into their final target + // locations afterwards. Note this is only safe because a) materialised + // values are tagged and b) the stack walk treats unknown stack slots as + // tagged. + + // TODO(v8:7700): Handle inlining. - BasicBlock* block = handler_info->catch_block.block_ptr(); - LazyDeoptInfo* deopt_info = node->lazy_deopt_info(); + ParallelMoveResolver<Register> direct_moves(masm_); + MoveVector materialising_moves; + bool save_accumulator = false; + RecordMoves(deopt_info->unit, catch_block, deopt_info->state.register_frame, + &direct_moves, &materialising_moves, &save_accumulator); __ bind(&handler_info->trampoline_entry); - ClearState(); - // TODO(v8:7700): Handle inlining. - RecordMoves(deopt_info->unit, block, deopt_info->state.register_frame); - // We do moves that need to materialise values first, since we might need to - // call a builtin to create a HeapNumber, and therefore we would need to - // spill all registers. - DoMaterialiseMoves(); - // Move the rest, we will not call HeapNumber anymore. - DoDirectMoves(); - // Jump to the catch block. - __ jmp(block->label()); + __ RecordComment("-- Exception handler trampoline START"); + EmitMaterialisationsAndPushResults(materialising_moves, save_accumulator); + __ RecordComment("EmitMoves"); + direct_moves.EmitMoves(); + EmitPopMaterialisedResults(materialising_moves, save_accumulator); + __ jmp(catch_block->label()); + __ RecordComment("-- Exception handler trampoline END"); } - private: - MaglevAssembler* const masm_; - using Move = std::pair<const ValueLocation&, ValueNode*>; - base::SmallVector<Move, 16> direct_moves_; - base::SmallVector<Move, 16> materialisation_moves_; - bool save_accumulator_ = false; - MacroAssembler* masm() const { return masm_; } - void ClearState() { - direct_moves_.clear(); - materialisation_moves_.clear(); - save_accumulator_ = false; - } - - void RecordMoves(const MaglevCompilationUnit& unit, BasicBlock* block, - const CompactInterpreterFrameState* register_frame) { - for (Phi* phi : *block->phis()) { - DCHECK_EQ(phi->input_count(), 0); + void RecordMoves(const MaglevCompilationUnit& unit, BasicBlock* catch_block, + const CompactInterpreterFrameState* register_frame, + ParallelMoveResolver<Register>* direct_moves, + MoveVector* materialising_moves, bool* save_accumulator) { + for (Phi* phi : *catch_block->phis()) { + DCHECK(phi->is_exception_phi()); if (!phi->has_valid_live_range()) continue; + + const ValueLocation& target = phi->result(); if (phi->owner() == interpreter::Register::virtual_accumulator()) { // If the accumulator is live, then it is the exception object located - // at kReturnRegister0. This is also the first phi in the list. - DCHECK_EQ(phi->result().AssignedGeneralRegister(), kReturnRegister0); - save_accumulator_ = true; + // at kReturnRegister0. We don't emit a move for it since the value is + // already in the right spot, but we do have to ensure it isn't + // clobbered by calls to the NewHeapNumber builtin during + // materialisation. + DCHECK_EQ(target.AssignedGeneralRegister(), kReturnRegister0); + *save_accumulator = true; continue; } - ValueNode* value = register_frame->GetValueOf(phi->owner(), unit); - DCHECK_NOT_NULL(value); - switch (value->properties().value_representation()) { + + ValueNode* const source = register_frame->GetValueOf(phi->owner(), unit); + DCHECK_NOT_NULL(source); + // All registers must have been spilled due to the call. + // TODO(jgruber): Which call? Because any throw requires at least a call + // to Runtime::kThrowFoo? + DCHECK(!source->allocation().IsRegister()); + + switch (source->properties().value_representation()) { case ValueRepresentation::kTagged: - // All registers should have been spilled due to the call. - DCHECK(!value->allocation().IsRegister()); - direct_moves_.emplace_back(phi->result(), value); + direct_moves->RecordMove( + source, source->allocation(), + compiler::AllocatedOperand::cast(target.operand())); break; case ValueRepresentation::kInt32: - if (value->allocation().IsConstant()) { - direct_moves_.emplace_back(phi->result(), value); + if (source->allocation().IsConstant()) { + // TODO(jgruber): Why is it okay for Int32 constants to remain + // untagged while non-constants are unconditionally smi-tagged or + // converted to a HeapNumber during materialisation? + direct_moves->RecordMove( + source, source->allocation(), + compiler::AllocatedOperand::cast(target.operand())); } else { - materialisation_moves_.emplace_back(phi->result(), value); + materialising_moves->emplace_back(target, source); } break; case ValueRepresentation::kFloat64: - materialisation_moves_.emplace_back(phi->result(), value); + materialising_moves->emplace_back(target, source); break; } } } - void DoMaterialiseMoves() { - if (materialisation_moves_.size() == 0) return; - if (save_accumulator_) { + void EmitMaterialisationsAndPushResults(const MoveVector& moves, + bool save_accumulator) const { + if (moves.size() == 0) return; + + // It's possible to optimize this further, at the cost of additional + // complexity: + // + // - If the target location is a register, we could theoretically move the + // materialised result there immediately, with the additional complication + // that following calls to NewHeapNumber may clobber the register. + // + // - If the target location is a stack slot which is neither a source nor + // target slot for any other moves (direct or materialising), we could move + // the result there directly instead of pushing and later popping it. This + // doesn't seem worth the extra code complexity though, given we are + // talking about a presumably infrequent case for exception handlers. + + __ RecordComment("EmitMaterialisationsAndPushResults"); + if (save_accumulator) __ Push(kReturnRegister0); + for (const Move& move : moves) { + MaterialiseTo(move.source, kReturnRegister0); __ Push(kReturnRegister0); } - for (auto it = materialisation_moves_.begin(); - it < materialisation_moves_.end(); it++) { - switch (it->second->properties().value_representation()) { - case ValueRepresentation::kInt32: { - EmitMoveInt32ToReturnValue0(it->second); - break; - } - case ValueRepresentation::kFloat64: - EmitMoveFloat64ToReturnValue0(it->second); - break; - case ValueRepresentation::kTagged: - UNREACHABLE(); - } - if (it->first.operand().IsStackSlot()) { - // If the target is in a stack sot, we can immediately move - // the result to it. - __ movq(ToMemOperand(it->first), kReturnRegister0); - } else { - // We spill the result to the stack, in order to be able to call the - // NewHeapNumber builtin again, however we don't need to push the result - // of the last one. - if (it != materialisation_moves_.end() - 1) { - __ Push(kReturnRegister0); - } - } - } - // If the last move target is a register, the result should be in - // kReturnValue0, so so we emit a simple move. Otherwise it has already been - // moved. - const ValueLocation& last_move_target = - materialisation_moves_.rbegin()->first; - if (last_move_target.operand().IsRegister()) { - __ Move(last_move_target.AssignedGeneralRegister(), kReturnRegister0); - } - // And then pop the rest. - for (auto it = materialisation_moves_.rbegin() + 1; - it < materialisation_moves_.rend(); it++) { - if (it->first.operand().IsRegister()) { - __ Pop(it->first.AssignedGeneralRegister()); - } - } - if (save_accumulator_) { - __ Pop(kReturnRegister0); - } } - void DoDirectMoves() { - for (auto& [target, value] : direct_moves_) { - if (value->allocation().IsConstant()) { - if (Int32Constant* constant = value->TryCast<Int32Constant>()) { - EmitMove(target, Smi::FromInt(constant->value())); - } else { - // Int32 and Float64 constants should have already been dealt with. - DCHECK_EQ(value->properties().value_representation(), - ValueRepresentation::kTagged); - EmitConstantLoad(target, value); - } + void EmitPopMaterialisedResults(const MoveVector& moves, + bool save_accumulator) const { + if (moves.size() == 0) return; + __ RecordComment("EmitPopMaterialisedResults"); + for (auto it = moves.rbegin(); it < moves.rend(); it++) { + const ValueLocation& target = it->target; + if (target.operand().IsRegister()) { + __ Pop(target.AssignedGeneralRegister()); } else { - EmitMove(target, ToMemOperand(value)); + DCHECK(target.operand().IsStackSlot()); + __ Pop(kScratchRegister); + __ movq(masm_->ToMemOperand(target.operand()), kScratchRegister); } } - } - void EmitMoveInt32ToReturnValue0(ValueNode* value) { - // We consider Int32Constants together with tagged values. - DCHECK(!value->allocation().IsConstant()); - using D = NewHeapNumberDescriptor; - Label done; - __ movq(kReturnRegister0, ToMemOperand(value)); - __ addl(kReturnRegister0, kReturnRegister0); - __ j(no_overflow, &done); - // If we overflow, instead of bailing out (deopting), we change - // representation to a HeapNumber. - __ Cvtlsi2sd(D::GetDoubleRegisterParameter(D::kValue), ToMemOperand(value)); - __ CallBuiltin(Builtin::kNewHeapNumber); - __ bind(&done); + if (save_accumulator) __ Pop(kReturnRegister0); } - void EmitMoveFloat64ToReturnValue0(ValueNode* value) { + void MaterialiseTo(ValueNode* value, Register dst) const { using D = NewHeapNumberDescriptor; - if (Float64Constant* constant = value->TryCast<Float64Constant>()) { - __ Move(D::GetDoubleRegisterParameter(D::kValue), constant->value()); - } else { - __ Movsd(D::GetDoubleRegisterParameter(D::kValue), ToMemOperand(value)); + switch (value->properties().value_representation()) { + case ValueRepresentation::kInt32: { + // We consider Int32Constants together with tagged values. + DCHECK(!value->allocation().IsConstant()); + Label done; + __ movq(dst, ToMemOperand(value)); + __ addl(dst, dst); + __ j(no_overflow, &done); + // If we overflow, instead of bailing out (deopting), we change + // representation to a HeapNumber. + __ Cvtlsi2sd(D::GetDoubleRegisterParameter(D::kValue), + ToMemOperand(value)); + __ CallBuiltin(Builtin::kNewHeapNumber); + __ Move(dst, kReturnRegister0); + __ bind(&done); + break; + } + case ValueRepresentation::kFloat64: + if (Float64Constant* constant = value->TryCast<Float64Constant>()) { + __ Move(D::GetDoubleRegisterParameter(D::kValue), constant->value()); + } else { + __ Movsd(D::GetDoubleRegisterParameter(D::kValue), + ToMemOperand(value)); + } + __ CallBuiltin(Builtin::kNewHeapNumber); + __ Move(dst, kReturnRegister0); + break; + case ValueRepresentation::kTagged: + UNREACHABLE(); } - __ CallBuiltin(Builtin::kNewHeapNumber); } - MemOperand ToMemOperand(ValueNode* node) { + MemOperand ToMemOperand(ValueNode* node) const { DCHECK(node->allocation().IsAnyStackSlot()); return masm_->ToMemOperand(node->allocation()); } - MemOperand ToMemOperand(const ValueLocation& location) { + MemOperand ToMemOperand(const ValueLocation& location) const { DCHECK(location.operand().IsStackSlot()); return masm_->ToMemOperand(location.operand()); } - template <typename Operand> - void EmitMove(const ValueLocation& dst, Operand src) { - if (dst.operand().IsRegister()) { - __ Move(dst.AssignedGeneralRegister(), src); - } else { - __ Move(kScratchRegister, src); - __ movq(ToMemOperand(dst), kScratchRegister); - } - } - - void EmitConstantLoad(const ValueLocation& dst, ValueNode* value) { - DCHECK(value->allocation().IsConstant()); - if (dst.operand().IsRegister()) { - value->LoadToRegister(masm_, dst.AssignedGeneralRegister()); - } else { - value->LoadToRegister(masm_, kScratchRegister); - __ movq(ToMemOperand(dst), kScratchRegister); - } - } + MaglevAssembler* const masm_; }; class MaglevCodeGeneratingNodeProcessor { @@ -637,134 +646,145 @@ class MaglevCodeGeneratingNodeProcessor { : masm_(masm) {} void PreProcessGraph(Graph* graph) { - if (FLAG_maglev_break_on_entry) { + code_gen_state()->set_untagged_slots(graph->untagged_stack_slots()); + code_gen_state()->set_tagged_slots(graph->tagged_stack_slots()); + + if (v8_flags.maglev_break_on_entry) { __ int3(); } - __ BailoutIfDeoptimized(rbx); + if (v8_flags.maglev_ool_prologue) { + // Call the out-of-line prologue (with parameters passed on the stack). + __ Push(Immediate(code_gen_state()->stack_slots() * kSystemPointerSize)); + __ Push(Immediate(code_gen_state()->tagged_slots() * kSystemPointerSize)); + __ CallBuiltin(Builtin::kMaglevOutOfLinePrologue); + } else { + __ BailoutIfDeoptimized(rbx); - // Tiering support. - // TODO(jgruber): Extract to a builtin (the tiering prologue is ~230 bytes - // per Maglev code object on x64). - { - // Scratch registers. Don't clobber regs related to the calling - // convention (e.g. kJavaScriptCallArgCountRegister). - Register flags = rcx; - Register feedback_vector = r9; - - // Load the feedback vector. - __ LoadTaggedPointerField( - feedback_vector, - FieldOperand(kJSFunctionRegister, JSFunction::kFeedbackCellOffset)); - __ LoadTaggedPointerField( - feedback_vector, FieldOperand(feedback_vector, Cell::kValueOffset)); - __ AssertFeedbackVector(feedback_vector); - - Label flags_need_processing, next; - __ LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing( - flags, feedback_vector, CodeKind::MAGLEV, &flags_need_processing); - __ jmp(&next); - - __ bind(&flags_need_processing); + // Tiering support. + // TODO(jgruber): Extract to a builtin (the tiering prologue is ~230 bytes + // per Maglev code object on x64). { - ASM_CODE_COMMENT_STRING(masm(), "Optimized marker check"); - __ MaybeOptimizeCodeOrTailCallOptimizedCodeSlot( - flags, feedback_vector, kJSFunctionRegister, JumpMode::kJump); - __ Trap(); - } - - __ bind(&next); - } - - __ EnterFrame(StackFrame::MAGLEV); + // Scratch registers. Don't clobber regs related to the calling + // convention (e.g. kJavaScriptCallArgCountRegister). + Register flags = rcx; + Register feedback_vector = r9; + + // Load the feedback vector. + __ LoadTaggedPointerField( + feedback_vector, + FieldOperand(kJSFunctionRegister, JSFunction::kFeedbackCellOffset)); + __ LoadTaggedPointerField( + feedback_vector, FieldOperand(feedback_vector, Cell::kValueOffset)); + __ AssertFeedbackVector(feedback_vector); + + Label flags_need_processing, next; + __ LoadFeedbackVectorFlagsAndJumpIfNeedsProcessing( + flags, feedback_vector, CodeKind::MAGLEV, &flags_need_processing); + __ jmp(&next); + + __ bind(&flags_need_processing); + { + ASM_CODE_COMMENT_STRING(masm(), "Optimized marker check"); + __ OptimizeCodeOrTailCallOptimizedCodeSlot( + flags, feedback_vector, kJSFunctionRegister, JumpMode::kJump); + __ Trap(); + } - // Save arguments in frame. - // TODO(leszeks): Consider eliding this frame if we don't make any calls - // that could clobber these registers. - __ Push(kContextRegister); - __ Push(kJSFunctionRegister); // Callee's JS function. - __ Push(kJavaScriptCallArgCountRegister); // Actual argument count. + __ bind(&next); + } - code_gen_state()->set_untagged_slots(graph->untagged_stack_slots()); - code_gen_state()->set_tagged_slots(graph->tagged_stack_slots()); + __ EnterFrame(StackFrame::MAGLEV); - { - ASM_CODE_COMMENT_STRING(masm(), " Stack/interrupt check"); - // Stack check. This folds the checks for both the interrupt stack limit - // check and the real stack limit into one by just checking for the - // interrupt limit. The interrupt limit is either equal to the real stack - // limit or tighter. By ensuring we have space until that limit after - // building the frame we can quickly precheck both at once. - __ Move(kScratchRegister, rsp); - // TODO(leszeks): Include a max call argument size here. - __ subq(kScratchRegister, - Immediate(code_gen_state()->stack_slots() * kSystemPointerSize)); - __ cmpq(kScratchRegister, - __ StackLimitAsOperand(StackLimitKind::kInterruptStackLimit)); + // Save arguments in frame. + // TODO(leszeks): Consider eliding this frame if we don't make any calls + // that could clobber these registers. + __ Push(kContextRegister); + __ Push(kJSFunctionRegister); // Callee's JS function. + __ Push(kJavaScriptCallArgCountRegister); // Actual argument count. - __ j(below, &deferred_call_stack_guard_); - __ bind(&deferred_call_stack_guard_return_); - } + { + ASM_CODE_COMMENT_STRING(masm(), " Stack/interrupt check"); + // Stack check. This folds the checks for both the interrupt stack limit + // check and the real stack limit into one by just checking for the + // interrupt limit. The interrupt limit is either equal to the real + // stack limit or tighter. By ensuring we have space until that limit + // after building the frame we can quickly precheck both at once. + __ Move(kScratchRegister, rsp); + // TODO(leszeks): Include a max call argument size here. + __ subq(kScratchRegister, Immediate(code_gen_state()->stack_slots() * + kSystemPointerSize)); + __ cmpq(kScratchRegister, + __ StackLimitAsOperand(StackLimitKind::kInterruptStackLimit)); + + __ j(below, &deferred_call_stack_guard_); + __ bind(&deferred_call_stack_guard_return_); + } - // Initialize stack slots. - if (graph->tagged_stack_slots() > 0) { - ASM_CODE_COMMENT_STRING(masm(), "Initializing stack slots"); - // TODO(leszeks): Consider filling with xmm + movdqa instead. - __ Move(rax, Immediate(0)); - - // Magic value. Experimentally, an unroll size of 8 doesn't seem any worse - // than fully unrolled pushes. - const int kLoopUnrollSize = 8; - int tagged_slots = graph->tagged_stack_slots(); - if (tagged_slots < 2 * kLoopUnrollSize) { - // If the frame is small enough, just unroll the frame fill completely. - for (int i = 0; i < tagged_slots; ++i) { - __ pushq(rax); - } - } else { - // Extract the first few slots to round to the unroll size. - int first_slots = tagged_slots % kLoopUnrollSize; - for (int i = 0; i < first_slots; ++i) { - __ pushq(rax); - } - __ Move(rbx, Immediate(tagged_slots / kLoopUnrollSize)); - // We enter the loop unconditionally, so make sure we need to loop at - // least once. - DCHECK_GT(tagged_slots / kLoopUnrollSize, 0); - Label loop; - __ bind(&loop); - for (int i = 0; i < kLoopUnrollSize; ++i) { - __ pushq(rax); + // Initialize stack slots. + if (graph->tagged_stack_slots() > 0) { + ASM_CODE_COMMENT_STRING(masm(), "Initializing stack slots"); + // TODO(leszeks): Consider filling with xmm + movdqa instead. + __ Move(rax, Immediate(0)); + + // Magic value. Experimentally, an unroll size of 8 doesn't seem any + // worse than fully unrolled pushes. + const int kLoopUnrollSize = 8; + int tagged_slots = graph->tagged_stack_slots(); + if (tagged_slots < 2 * kLoopUnrollSize) { + // If the frame is small enough, just unroll the frame fill + // completely. + for (int i = 0; i < tagged_slots; ++i) { + __ pushq(rax); + } + } else { + // Extract the first few slots to round to the unroll size. + int first_slots = tagged_slots % kLoopUnrollSize; + for (int i = 0; i < first_slots; ++i) { + __ pushq(rax); + } + __ Move(rbx, Immediate(tagged_slots / kLoopUnrollSize)); + // We enter the loop unconditionally, so make sure we need to loop at + // least once. + DCHECK_GT(tagged_slots / kLoopUnrollSize, 0); + Label loop; + __ bind(&loop); + for (int i = 0; i < kLoopUnrollSize; ++i) { + __ pushq(rax); + } + __ decl(rbx); + __ j(greater, &loop); } - __ decl(rbx); - __ j(greater, &loop); } - } - if (graph->untagged_stack_slots() > 0) { - // Extend rsp by the size of the remaining untagged part of the frame, no - // need to initialise these. - __ subq(rsp, - Immediate(graph->untagged_stack_slots() * kSystemPointerSize)); + if (graph->untagged_stack_slots() > 0) { + // Extend rsp by the size of the remaining untagged part of the frame, + // no need to initialise these. + __ subq(rsp, + Immediate(graph->untagged_stack_slots() * kSystemPointerSize)); + } } } void PostProcessGraph(Graph*) { __ int3(); - __ bind(&deferred_call_stack_guard_); - ASM_CODE_COMMENT_STRING(masm(), "Stack/interrupt call"); - // Save any registers that can be referenced by RegisterInput. - // TODO(leszeks): Only push those that are used by the graph. - __ PushAll(RegisterInput::kAllowedRegisters); - // Push the frame size - __ Push(Immediate( - Smi::FromInt(code_gen_state()->stack_slots() * kSystemPointerSize))); - __ CallRuntime(Runtime::kStackGuardWithGap, 1); - __ PopAll(RegisterInput::kAllowedRegisters); - __ jmp(&deferred_call_stack_guard_return_); + + if (!v8_flags.maglev_ool_prologue) { + __ bind(&deferred_call_stack_guard_); + ASM_CODE_COMMENT_STRING(masm(), "Stack/interrupt call"); + // Save any registers that can be referenced by RegisterInput. + // TODO(leszeks): Only push those that are used by the graph. + __ PushAll(RegisterInput::kAllowedRegisters); + // Push the frame size + __ Push(Immediate( + Smi::FromInt(code_gen_state()->stack_slots() * kSystemPointerSize))); + __ CallRuntime(Runtime::kStackGuardWithGap, 1); + __ PopAll(RegisterInput::kAllowedRegisters); + __ jmp(&deferred_call_stack_guard_return_); + } } void PreProcessBasicBlock(BasicBlock* block) { - if (FLAG_code_comments) { + if (v8_flags.code_comments) { std::stringstream ss; ss << "-- Block b" << graph_labeller()->BlockId(block); __ RecordComment(ss.str()); @@ -775,14 +795,14 @@ class MaglevCodeGeneratingNodeProcessor { template <typename NodeT> void Process(NodeT* node, const ProcessingState& state) { - if (FLAG_code_comments) { + if (v8_flags.code_comments) { std::stringstream ss; ss << "-- " << graph_labeller()->NodeId(node) << ": " << PrintNode(graph_labeller(), node); __ RecordComment(ss.str()); } - if (FLAG_debug_code) { + if (v8_flags.debug_code) { __ movq(kScratchRegister, rbp); __ subq(kScratchRegister, rsp); __ cmpq(kScratchRegister, @@ -806,7 +826,7 @@ class MaglevCodeGeneratingNodeProcessor { compiler::AllocatedOperand::cast(value_node->result().operand()); // We shouldn't spill nodes which already output to the stack. if (!source.IsAnyStackSlot()) { - if (FLAG_code_comments) __ RecordComment("-- Spill:"); + if (v8_flags.code_comments) __ RecordComment("-- Spill:"); if (source.IsRegister()) { __ movq(masm()->GetStackSlot(value_node->spill_slot()), ToRegister(source)); @@ -851,7 +871,7 @@ class MaglevCodeGeneratingNodeProcessor { // TODO(leszeks): We should remove dead phis entirely and turn this into // a DCHECK. if (!phi->has_valid_live_range()) { - if (FLAG_code_comments) { + if (v8_flags.code_comments) { std::stringstream ss; ss << "-- * " << phi->input(state.block()->predecessor_id()).operand() << " → " @@ -866,7 +886,7 @@ class MaglevCodeGeneratingNodeProcessor { compiler::InstructionOperand source = input.operand(); compiler::AllocatedOperand target = compiler::AllocatedOperand::cast(phi->result().operand()); - if (FLAG_code_comments) { + if (v8_flags.code_comments) { std::stringstream ss; ss << "-- * " << source << " → " << target << " (n" << graph_labeller()->NodeId(phi) << ")"; @@ -889,7 +909,7 @@ class MaglevCodeGeneratingNodeProcessor { if (LoadMergeState(state, &node, &merge)) { compiler::InstructionOperand source = merge->operand(predecessor_id); - if (FLAG_code_comments) { + if (v8_flags.code_comments) { std::stringstream ss; ss << "-- * " << source << " → " << reg; __ RecordComment(ss.str()); @@ -909,7 +929,7 @@ class MaglevCodeGeneratingNodeProcessor { if (LoadMergeState(state, &node, &merge)) { compiler::InstructionOperand source = merge->operand(predecessor_id); - if (FLAG_code_comments) { + if (v8_flags.code_comments) { std::stringstream ss; ss << "-- * " << source << " → " << reg; __ RecordComment(ss.str()); @@ -943,17 +963,19 @@ class MaglevCodeGeneratingNodeProcessor { class MaglevCodeGeneratorImpl final { public: - static MaybeHandle<Code> Generate(MaglevCompilationInfo* compilation_info, + static MaybeHandle<Code> Generate(Isolate* isolate, + MaglevCompilationInfo* compilation_info, Graph* graph) { - return MaglevCodeGeneratorImpl(compilation_info, graph).Generate(); + return MaglevCodeGeneratorImpl(isolate, compilation_info, graph).Generate(); } private: - MaglevCodeGeneratorImpl(MaglevCompilationInfo* compilation_info, Graph* graph) + MaglevCodeGeneratorImpl(Isolate* isolate, + MaglevCompilationInfo* compilation_info, Graph* graph) : safepoint_table_builder_(compilation_info->zone(), graph->tagged_stack_slots(), graph->untagged_stack_slots()), - code_gen_state_(compilation_info, safepoint_table_builder()), + code_gen_state_(isolate, compilation_info, safepoint_table_builder()), masm_(&code_gen_state_), processor_(&masm_), graph_(graph) {} @@ -968,15 +990,20 @@ class MaglevCodeGeneratorImpl final { processor_.ProcessGraph(graph_); EmitDeferredCode(); EmitDeopts(); - EmitExceptionHandlersTrampolines(); + EmitExceptionHandlerTrampolines(); } void EmitDeferredCode() { - for (DeferredCodeInfo* deferred_code : code_gen_state_.deferred_code()) { - __ RecordComment("-- Deferred block"); - __ bind(&deferred_code->deferred_code_label); - deferred_code->Generate(masm(), &deferred_code->return_label); - __ Trap(); + // Loop over deferred_code() multiple times, clearing the vector on each + // outer loop, so that deferred code can itself emit deferred code. + while (!code_gen_state_.deferred_code().empty()) { + for (DeferredCodeInfo* deferred_code : + code_gen_state_.TakeDeferredCode()) { + __ RecordComment("-- Deferred block"); + __ bind(&deferred_code->deferred_code_label); + deferred_code->Generate(masm()); + __ Trap(); + } } } @@ -1014,12 +1041,11 @@ class MaglevCodeGeneratorImpl final { } } - void EmitExceptionHandlersTrampolines() { + void EmitExceptionHandlerTrampolines() { if (code_gen_state_.handlers().size() == 0) return; - ExceptionHandlerTrampolineBuilder builder(masm()); - __ RecordComment("-- Exception handlers trampolines"); + __ RecordComment("-- Exception handler trampolines"); for (NodeBase* node : code_gen_state_.handlers()) { - builder.EmitTrampolineFor(node); + ExceptionHandlerTrampolineBuilder::Build(masm(), node); } } @@ -1151,9 +1177,7 @@ class MaglevCodeGeneratorImpl final { return stack_slot_count() + StandardFrameConstants::kFixedSlotCount; } - Isolate* isolate() const { - return code_gen_state_.compilation_info()->isolate(); - } + Isolate* isolate() const { return code_gen_state_.isolate(); } MaglevAssembler* masm() { return &masm_; } MaglevSafepointTableBuilder* safepoint_table_builder() { return &safepoint_table_builder_; @@ -1171,8 +1195,8 @@ class MaglevCodeGeneratorImpl final { // static MaybeHandle<Code> MaglevCodeGenerator::Generate( - MaglevCompilationInfo* compilation_info, Graph* graph) { - return MaglevCodeGeneratorImpl::Generate(compilation_info, graph); + Isolate* isolate, MaglevCompilationInfo* compilation_info, Graph* graph) { + return MaglevCodeGeneratorImpl::Generate(isolate, compilation_info, graph); } } // namespace maglev diff --git a/deps/v8/src/maglev/maglev-code-generator.h b/deps/v8/src/maglev/maglev-code-generator.h index 8dbd8921aba423..64ac1df0bc0737 100644 --- a/deps/v8/src/maglev/maglev-code-generator.h +++ b/deps/v8/src/maglev/maglev-code-generator.h @@ -16,7 +16,8 @@ class MaglevCompilationInfo; class MaglevCodeGenerator : public AllStatic { public: - static MaybeHandle<Code> Generate(MaglevCompilationInfo* compilation_info, + static MaybeHandle<Code> Generate(Isolate* isolate, + MaglevCompilationInfo* compilation_info, Graph* graph); }; diff --git a/deps/v8/src/maglev/maglev-compilation-info.cc b/deps/v8/src/maglev/maglev-compilation-info.cc index e15a30cac74f26..300d7e66e12227 100644 --- a/deps/v8/src/maglev/maglev-compilation-info.cc +++ b/deps/v8/src/maglev/maglev-compilation-info.cc @@ -52,14 +52,17 @@ class V8_NODISCARD MaglevCompilationHandleScope final { MaglevCompilationInfo::MaglevCompilationInfo(Isolate* isolate, Handle<JSFunction> function) : zone_(isolate->allocator(), kMaglevZoneName), - isolate_(isolate), broker_(new compiler::JSHeapBroker( - isolate, zone(), FLAG_trace_heap_broker, CodeKind::MAGLEV)) -#define V(Name) , Name##_(FLAG_##Name) + isolate, zone(), v8_flags.trace_heap_broker, CodeKind::MAGLEV)) +#define V(Name) , Name##_(v8_flags.Name) MAGLEV_COMPILATION_FLAG_LIST(V) #undef V -{ - DCHECK(FLAG_maglev); + , + specialize_to_function_context_( + v8_flags.maglev_function_context_specialization && + function->raw_feedback_cell().map() == + ReadOnlyRoots(isolate).one_closure_cell_map()) { + DCHECK(v8_flags.maglev); MaglevCompilationHandleScope compilation(isolate, this); diff --git a/deps/v8/src/maglev/maglev-compilation-info.h b/deps/v8/src/maglev/maglev-compilation-info.h index 049990f222a10a..ad65623bbde191 100644 --- a/deps/v8/src/maglev/maglev-compilation-info.h +++ b/deps/v8/src/maglev/maglev-compilation-info.h @@ -34,6 +34,8 @@ class Graph; class MaglevCompilationUnit; class MaglevGraphLabeller; +// A list of v8_flag values copied into the MaglevCompilationInfo for +// guaranteed {immutable,threadsafe} access. #define MAGLEV_COMPILATION_FLAG_LIST(V) \ V(code_comments) \ V(maglev) \ @@ -51,7 +53,6 @@ class MaglevCompilationInfo final { } ~MaglevCompilationInfo(); - Isolate* isolate() const { return isolate_; } Zone* zone() { return &zone_; } compiler::JSHeapBroker* broker() const { return broker_.get(); } MaglevCompilationUnit* toplevel_compilation_unit() const { @@ -88,6 +89,10 @@ class MaglevCompilationInfo final { MAGLEV_COMPILATION_FLAG_LIST(V) #undef V + bool specialize_to_function_context() const { + return specialize_to_function_context_; + } + // Must be called from within a MaglevCompilationHandleScope. Transfers owned // handles (e.g. shared_, function_) to the new scope. void ReopenHandlesInNewHandleScope(Isolate* isolate); @@ -105,7 +110,6 @@ class MaglevCompilationInfo final { MaglevCompilationInfo(Isolate* isolate, Handle<JSFunction> function); Zone zone_; - Isolate* const isolate_; const std::unique_ptr<compiler::JSHeapBroker> broker_; // Must be initialized late since it requires an initialized heap broker. MaglevCompilationUnit* toplevel_compilation_unit_ = nullptr; @@ -123,6 +127,12 @@ class MaglevCompilationInfo final { MAGLEV_COMPILATION_FLAG_LIST(V) #undef V + // If enabled, the generated code can rely on the function context to be a + // constant (known at compile-time). This opens new optimization + // opportunities, but prevents code sharing between different function + // contexts. + const bool specialize_to_function_context_; + // 1) PersistentHandles created via PersistentHandlesScope inside of // CompilationHandleScope. // 2) Owned by MaglevCompilationInfo. diff --git a/deps/v8/src/maglev/maglev-compilation-unit.cc b/deps/v8/src/maglev/maglev-compilation-unit.cc index 16b8ae08ea49a6..590b2e3f785f92 100644 --- a/deps/v8/src/maglev/maglev-compilation-unit.cc +++ b/deps/v8/src/maglev/maglev-compilation-unit.cc @@ -40,8 +40,6 @@ compiler::JSHeapBroker* MaglevCompilationUnit::broker() const { return info_->broker(); } -Isolate* MaglevCompilationUnit::isolate() const { return info_->isolate(); } - Zone* MaglevCompilationUnit::zone() const { return info_->zone(); } bool MaglevCompilationUnit::has_graph_labeller() const { diff --git a/deps/v8/src/maglev/maglev-compilation-unit.h b/deps/v8/src/maglev/maglev-compilation-unit.h index 5281fa16fc7500..000e93696551f9 100644 --- a/deps/v8/src/maglev/maglev-compilation-unit.h +++ b/deps/v8/src/maglev/maglev-compilation-unit.h @@ -42,7 +42,6 @@ class MaglevCompilationUnit : public ZoneObject { MaglevCompilationInfo* info() const { return info_; } const MaglevCompilationUnit* caller() const { return caller_; } compiler::JSHeapBroker* broker() const; - Isolate* isolate() const; LocalIsolate* local_isolate() const; Zone* zone() const; int register_count() const { return register_count_; } diff --git a/deps/v8/src/maglev/maglev-compiler.cc b/deps/v8/src/maglev/maglev-compiler.cc index 78decb2857af6f..2855bdbf5a1e19 100644 --- a/deps/v8/src/maglev/maglev-compiler.cc +++ b/deps/v8/src/maglev/maglev-compiler.cc @@ -35,6 +35,7 @@ #include "src/maglev/maglev-graph-verifier.h" #include "src/maglev/maglev-graph.h" #include "src/maglev/maglev-interpreter-frame-state.h" +#include "src/maglev/maglev-ir-inl.h" #include "src/maglev/maglev-ir.h" #include "src/maglev/maglev-regalloc.h" #include "src/maglev/maglev-vreg-allocator.h" @@ -175,56 +176,24 @@ class UseMarkingProcessor { } } - void MarkCheckpointNodes(NodeBase* node, const MaglevCompilationUnit& unit, - const CheckpointedInterpreterState* checkpoint_state, - InputLocation* input_locations, - LoopUsedNodes* loop_used_nodes, - const ProcessingState& state, int& index) { - if (checkpoint_state->parent) { - MarkCheckpointNodes(node, *unit.caller(), checkpoint_state->parent, - input_locations, loop_used_nodes, state, index); - } - - const CompactInterpreterFrameState* register_frame = - checkpoint_state->register_frame; - int use_id = node->id(); - - register_frame->ForEachValue( - unit, [&](ValueNode* node, interpreter::Register reg) { - MarkUse(node, use_id, &input_locations[index++], loop_used_nodes); - }); - } void MarkCheckpointNodes(NodeBase* node, const EagerDeoptInfo* deopt_info, LoopUsedNodes* loop_used_nodes, const ProcessingState& state) { - int index = 0; - MarkCheckpointNodes(node, deopt_info->unit, &deopt_info->state, - deopt_info->input_locations, loop_used_nodes, state, - index); + int use_id = node->id(); + detail::DeepForEachInput( + deopt_info, + [&](ValueNode* node, interpreter::Register reg, InputLocation* input) { + MarkUse(node, use_id, input, loop_used_nodes); + }); } void MarkCheckpointNodes(NodeBase* node, const LazyDeoptInfo* deopt_info, LoopUsedNodes* loop_used_nodes, const ProcessingState& state) { - int index = 0; - - if (deopt_info->state.parent) { - MarkCheckpointNodes(node, *deopt_info->unit.caller(), - deopt_info->state.parent, deopt_info->input_locations, - loop_used_nodes, state, index); - } - - // Handle the top-of-frame info manually, since we have to handle the result - // location. - const CompactInterpreterFrameState* register_frame = - deopt_info->state.register_frame; int use_id = node->id(); - - register_frame->ForEachValue( - deopt_info->unit, [&](ValueNode* node, interpreter::Register reg) { - // Skip over the result location. - if (deopt_info->IsResultRegister(reg)) return; - MarkUse(node, use_id, &deopt_info->input_locations[index++], - loop_used_nodes); + detail::DeepForEachInput( + deopt_info, + [&](ValueNode* node, interpreter::Register reg, InputLocation* input) { + MarkUse(node, use_id, input, loop_used_nodes); }); } @@ -264,14 +233,13 @@ class TranslationArrayProcessor { } private: - const InputLocation* EmitDeoptFrame(const MaglevCompilationUnit& unit, - const CheckpointedInterpreterState& state, - const InputLocation* input_locations) { + void EmitDeoptFrame(const MaglevCompilationUnit& unit, + const CheckpointedInterpreterState& state, + const InputLocation* input_locations) { if (state.parent) { // Deopt input locations are in the order of deopt frame emission, so // update the pointer after emitting the parent frame. - input_locations = - EmitDeoptFrame(*unit.caller(), *state.parent, input_locations); + EmitDeoptFrame(*unit.caller(), *state.parent, input_locations); } // Returns are used for updating an accumulator or register after a lazy @@ -283,9 +251,8 @@ class TranslationArrayProcessor { GetDeoptLiteral(*unit.shared_function_info().object()), unit.register_count(), return_offset, return_count); - return EmitDeoptFrameValues(unit, state.register_frame, input_locations, - interpreter::Register::invalid_value(), - return_count); + EmitDeoptFrameValues(unit, state.register_frame, input_locations, + interpreter::Register::invalid_value(), return_count); } void EmitEagerDeopt(EagerDeoptInfo* deopt_info) { @@ -314,8 +281,8 @@ class TranslationArrayProcessor { if (deopt_info->state.parent) { // Deopt input locations are in the order of deopt frame emission, so // update the pointer after emitting the parent frame. - input_locations = EmitDeoptFrame( - *unit.caller(), *deopt_info->state.parent, input_locations); + EmitDeoptFrame(*unit.caller(), *deopt_info->state.parent, + input_locations); } // Return offsets are counted from the end of the translation frame, which @@ -431,10 +398,10 @@ class TranslationArrayProcessor { result_location.index() + result_size - 1); } - const InputLocation* EmitDeoptFrameValues( + void EmitDeoptFrameValues( const MaglevCompilationUnit& compilation_unit, const CompactInterpreterFrameState* checkpoint_state, - const InputLocation* input_locations, + const InputLocation*& input_location, interpreter::Register result_location, int result_size) { // Closure if (compilation_unit.inlining_depth() == 0) { @@ -449,7 +416,6 @@ class TranslationArrayProcessor { // TODO(leszeks): The input locations array happens to be in the same order // as parameters+context+locals+accumulator are accessed here. We should // make this clearer and guard against this invariant failing. - const InputLocation* input_location = input_locations; // Parameters { @@ -461,9 +427,9 @@ class TranslationArrayProcessor { translation_array_builder().StoreOptimizedOut(); } else { EmitDeoptFrameSingleValue(value, *input_location); + input_location++; } i++; - input_location++; }); } @@ -478,18 +444,15 @@ class TranslationArrayProcessor { checkpoint_state->ForEachLocal( compilation_unit, [&](ValueNode* value, interpreter::Register reg) { DCHECK_LE(i, reg.index()); - if (InReturnValues(reg, result_location, result_size)) { - input_location++; - return; - } + if (InReturnValues(reg, result_location, result_size)) return; while (i < reg.index()) { translation_array_builder().StoreOptimizedOut(); i++; } DCHECK_EQ(i, reg.index()); EmitDeoptFrameSingleValue(value, *input_location); - i++; input_location++; + i++; }); while (i < compilation_unit.register_count()) { translation_array_builder().StoreOptimizedOut(); @@ -504,12 +467,11 @@ class TranslationArrayProcessor { result_location, result_size)) { ValueNode* value = checkpoint_state->accumulator(compilation_unit); EmitDeoptFrameSingleValue(value, *input_location); + input_location++; } else { translation_array_builder().StoreOptimizedOut(); } } - - return input_location; } int GetDeoptLiteral(Object obj) { @@ -539,13 +501,14 @@ void MaglevCompiler::Compile(LocalIsolate* local_isolate, compiler::UnparkedScopeIfNeeded unparked_scope(compilation_info->broker()); // Build graph. - if (FLAG_print_maglev_code || FLAG_code_comments || FLAG_print_maglev_graph || - FLAG_trace_maglev_graph_building || FLAG_trace_maglev_regalloc) { + if (v8_flags.print_maglev_code || v8_flags.code_comments || + v8_flags.print_maglev_graph || v8_flags.trace_maglev_graph_building || + v8_flags.trace_maglev_regalloc) { compilation_info->set_graph_labeller(new MaglevGraphLabeller()); } - if (FLAG_print_maglev_code || FLAG_print_maglev_graph || - FLAG_trace_maglev_graph_building || FLAG_trace_maglev_regalloc) { + if (v8_flags.print_maglev_code || v8_flags.print_maglev_graph || + v8_flags.trace_maglev_graph_building || v8_flags.trace_maglev_regalloc) { MaglevCompilationUnit* top_level_unit = compilation_info->toplevel_compilation_unit(); std::cout << "Compiling " << Brief(*top_level_unit->function().object()) @@ -561,7 +524,7 @@ void MaglevCompiler::Compile(LocalIsolate* local_isolate, graph_builder.Build(); - if (FLAG_print_maglev_graph) { + if (v8_flags.print_maglev_graph) { std::cout << "\nAfter graph buiding" << std::endl; PrintGraph(std::cout, compilation_info, graph_builder.graph()); } @@ -579,7 +542,7 @@ void MaglevCompiler::Compile(LocalIsolate* local_isolate, processor.ProcessGraph(graph_builder.graph()); } - if (FLAG_print_maglev_graph) { + if (v8_flags.print_maglev_graph) { std::cout << "After node processor" << std::endl; PrintGraph(std::cout, compilation_info, graph_builder.graph()); } @@ -587,7 +550,7 @@ void MaglevCompiler::Compile(LocalIsolate* local_isolate, StraightForwardRegisterAllocator allocator(compilation_info, graph_builder.graph()); - if (FLAG_print_maglev_graph) { + if (v8_flags.print_maglev_graph) { std::cout << "After register allocation" << std::endl; PrintGraph(std::cout, compilation_info, graph_builder.graph()); } @@ -602,7 +565,7 @@ void MaglevCompiler::Compile(LocalIsolate* local_isolate, // static MaybeHandle<CodeT> MaglevCompiler::GenerateCode( - MaglevCompilationInfo* compilation_info) { + Isolate* isolate, MaglevCompilationInfo* compilation_info) { Graph* const graph = compilation_info->graph(); if (graph == nullptr) { // Compilation failed. @@ -614,7 +577,8 @@ MaybeHandle<CodeT> MaglevCompiler::GenerateCode( } Handle<Code> code; - if (!MaglevCodeGenerator::Generate(compilation_info, graph).ToHandle(&code)) { + if (!MaglevCodeGenerator::Generate(isolate, compilation_info, graph) + .ToHandle(&code)) { compilation_info->toplevel_compilation_unit() ->shared_function_info() .object() @@ -629,11 +593,10 @@ MaybeHandle<CodeT> MaglevCompiler::GenerateCode( return {}; } - if (FLAG_print_maglev_code) { + if (v8_flags.print_maglev_code) { code->Print(); } - Isolate* const isolate = compilation_info->isolate(); isolate->native_context()->AddOptimizedCode(ToCodeT(*code)); return ToCodeT(code, isolate); } diff --git a/deps/v8/src/maglev/maglev-compiler.h b/deps/v8/src/maglev/maglev-compiler.h index 1bb7ad6d376a59..febe387d87542c 100644 --- a/deps/v8/src/maglev/maglev-compiler.h +++ b/deps/v8/src/maglev/maglev-compiler.h @@ -30,7 +30,7 @@ class MaglevCompiler : public AllStatic { // Called on the main thread after Compile has completed. // TODO(v8:7700): Move this to a different class? static MaybeHandle<CodeT> GenerateCode( - MaglevCompilationInfo* compilation_info); + Isolate* isolate, MaglevCompilationInfo* compilation_info); }; } // namespace maglev diff --git a/deps/v8/src/maglev/maglev-concurrent-dispatcher.cc b/deps/v8/src/maglev/maglev-concurrent-dispatcher.cc index 87756c21a0f881..cb8e1044ee5aa9 100644 --- a/deps/v8/src/maglev/maglev-concurrent-dispatcher.cc +++ b/deps/v8/src/maglev/maglev-concurrent-dispatcher.cc @@ -88,7 +88,7 @@ MaglevCompilationJob::MaglevCompilationJob( std::unique_ptr<MaglevCompilationInfo>&& info) : OptimizedCompilationJob(kMaglevCompilerName, State::kReadyToPrepare), info_(std::move(info)) { - DCHECK(FLAG_maglev); + DCHECK(v8_flags.maglev); } MaglevCompilationJob::~MaglevCompilationJob() = default; @@ -108,7 +108,7 @@ CompilationJob::Status MaglevCompilationJob::ExecuteJobImpl( CompilationJob::Status MaglevCompilationJob::FinalizeJobImpl(Isolate* isolate) { Handle<CodeT> codet; - if (!maglev::MaglevCompiler::GenerateCode(info()).ToHandle(&codet)) { + if (!maglev::MaglevCompiler::GenerateCode(isolate, info()).ToHandle(&codet)) { return CompilationJob::FAILED; } info()->toplevel_compilation_unit()->function().object()->set_code(*codet); @@ -119,6 +119,10 @@ Handle<JSFunction> MaglevCompilationJob::function() const { return info_->toplevel_compilation_unit()->function().object(); } +bool MaglevCompilationJob::specialize_to_function_context() const { + return info_->specialize_to_function_context(); +} + // The JobTask is posted to V8::GetCurrentPlatform(). It's responsible for // processing the incoming queue on a worker thread. class MaglevConcurrentDispatcher::JobTask final : public v8::JobTask { @@ -134,6 +138,9 @@ class MaglevConcurrentDispatcher::JobTask final : public v8::JobTask { std::unique_ptr<MaglevCompilationJob> job; if (!incoming_queue()->Dequeue(&job)) break; DCHECK_NOT_NULL(job); + TRACE_EVENT_WITH_FLOW0( + TRACE_DISABLED_BY_DEFAULT("v8.compile"), "V8.MaglevBackground", + job.get(), TRACE_EVENT_FLAG_FLOW_IN | TRACE_EVENT_FLAG_FLOW_OUT); RuntimeCallStats* rcs = nullptr; // TODO(v8:7700): Implement. CompilationJob::Status status = job->ExecuteJob(rcs, &local_isolate); CHECK_EQ(status, CompilationJob::SUCCEEDED); @@ -157,7 +164,7 @@ class MaglevConcurrentDispatcher::JobTask final : public v8::JobTask { MaglevConcurrentDispatcher::MaglevConcurrentDispatcher(Isolate* isolate) : isolate_(isolate) { - if (FLAG_concurrent_recompilation && FLAG_maglev) { + if (v8_flags.concurrent_recompilation && v8_flags.maglev) { job_handle_ = V8::GetCurrentPlatform()->PostJob( TaskPriority::kUserVisible, std::make_unique<JobTask>(this)); DCHECK(is_enabled()); @@ -188,6 +195,9 @@ void MaglevConcurrentDispatcher::FinalizeFinishedJobs() { while (!outgoing_queue_.IsEmpty()) { std::unique_ptr<MaglevCompilationJob> job; outgoing_queue_.Dequeue(&job); + TRACE_EVENT_WITH_FLOW0(TRACE_DISABLED_BY_DEFAULT("v8.compile"), + "V8.MaglevConcurrentFinalize", job.get(), + TRACE_EVENT_FLAG_FLOW_IN); Compiler::FinalizeMaglevCompilationJob(job.get(), isolate_); } } diff --git a/deps/v8/src/maglev/maglev-concurrent-dispatcher.h b/deps/v8/src/maglev/maglev-concurrent-dispatcher.h index 5510fa7c9efc14..09f046eee2a74b 100644 --- a/deps/v8/src/maglev/maglev-concurrent-dispatcher.h +++ b/deps/v8/src/maglev/maglev-concurrent-dispatcher.h @@ -56,6 +56,8 @@ class MaglevCompilationJob final : public OptimizedCompilationJob { Handle<JSFunction> function() const; + bool specialize_to_function_context() const; + base::TimeDelta time_taken_to_prepare() { return time_taken_to_prepare_; } base::TimeDelta time_taken_to_execute() { return time_taken_to_execute_; } base::TimeDelta time_taken_to_finalize() { return time_taken_to_finalize_; } diff --git a/deps/v8/src/maglev/maglev-graph-builder.cc b/deps/v8/src/maglev/maglev-graph-builder.cc index 27d0b7d75d1e84..d47b82713b4b96 100644 --- a/deps/v8/src/maglev/maglev-graph-builder.cc +++ b/deps/v8/src/maglev/maglev-graph-builder.cc @@ -9,6 +9,7 @@ #include "src/builtins/builtins-constructor.h" #include "src/codegen/interface-descriptors-inl.h" #include "src/common/globals.h" +#include "src/compiler/access-info.h" #include "src/compiler/compilation-dependencies.h" #include "src/compiler/feedback-source.h" #include "src/compiler/heap-refs.h" @@ -30,10 +31,59 @@ #include "src/objects/property-details.h" #include "src/objects/slots-inl.h" -namespace v8 { -namespace internal { +namespace v8::internal::maglev { -namespace maglev { +namespace { + +ValueNode* TryGetParentContext(ValueNode* node) { + if (CreateFunctionContext* n = node->TryCast<CreateFunctionContext>()) { + return n->context().node(); + } + + if (CallRuntime* n = node->TryCast<CallRuntime>()) { + switch (n->function_id()) { + case Runtime::kPushBlockContext: + case Runtime::kPushCatchContext: + case Runtime::kNewFunctionContext: + return n->context().node(); + default: + break; + } + } + + return nullptr; +} + +// Attempts to walk up the context chain through the graph in order to reduce +// depth and thus the number of runtime loads. +void MinimizeContextChainDepth(ValueNode** context, size_t* depth) { + while (*depth > 0) { + ValueNode* parent_context = TryGetParentContext(*context); + if (parent_context == nullptr) return; + *context = parent_context; + (*depth)--; + } +} + +class FunctionContextSpecialization final : public AllStatic { + public: + static base::Optional<compiler::ContextRef> TryToRef( + const MaglevCompilationUnit* unit, ValueNode* context, size_t* depth) { + DCHECK(unit->info()->specialize_to_function_context()); + base::Optional<compiler::ContextRef> ref; + if (InitialValue* n = context->TryCast<InitialValue>()) { + if (n->source().is_current_context()) { + ref = unit->function().context(); + } + } else if (Constant* n = context->TryCast<Constant>()) { + ref = n->ref().AsContext(); + } + if (!ref.has_value()) return {}; + return ref->previous(depth); + } +}; + +} // namespace MaglevGraphBuilder::MaglevGraphBuilder(LocalIsolate* local_isolate, MaglevCompilationUnit* compilation_unit, @@ -76,11 +126,10 @@ MaglevGraphBuilder::MaglevGraphBuilder(LocalIsolate* local_isolate, void MaglevGraphBuilder::StartPrologue() { current_block_ = zone()->New<BasicBlock>(nullptr); - block_offset_ = -1; } BasicBlock* MaglevGraphBuilder::EndPrologue() { - BasicBlock* first_block = CreateBlock<Jump>({}, &jump_targets_[0]); + BasicBlock* first_block = FinishBlock<Jump>({}, &jump_targets_[0]); MergeIntoFrameState(first_block, 0); return first_block; } @@ -133,12 +182,12 @@ void MaglevGraphBuilder::BuildMergeStates() { const compiler::LoopInfo& loop_info = offset_and_info.second; const compiler::BytecodeLivenessState* liveness = GetInLivenessFor(offset); DCHECK_NULL(merge_states_[offset]); - if (FLAG_trace_maglev_graph_building) { + if (v8_flags.trace_maglev_graph_building) { std::cout << "- Creating loop merge state at @" << offset << std::endl; } merge_states_[offset] = MergePointInterpreterFrameState::NewForLoop( - *compilation_unit_, offset, NumPredecessors(offset), liveness, - &loop_info); + current_interpreter_frame_, *compilation_unit_, offset, + NumPredecessors(offset), liveness, &loop_info); } if (bytecode().handler_table_size() > 0) { @@ -150,7 +199,7 @@ void MaglevGraphBuilder::BuildMergeStates() { GetInLivenessFor(offset); DCHECK_EQ(NumPredecessors(offset), 0); DCHECK_NULL(merge_states_[offset]); - if (FLAG_trace_maglev_graph_building) { + if (v8_flags.trace_maglev_graph_building) { std::cout << "- Creating exception merge state at @" << offset << ", context register r" << context_reg.index() << std::endl; } @@ -326,13 +375,9 @@ void MaglevGraphBuilder::BuildGenericBinarySmiOperationNode() { template <Operation kOperation> void MaglevGraphBuilder::BuildInt32BinaryOperationNode() { // TODO(v8:7700): Do constant folding. - ValueNode *left, *right; - if (IsRegisterEqualToAccumulator(0)) { - left = right = LoadRegisterInt32(0); - } else { - left = LoadRegisterInt32(0); - right = GetAccumulatorInt32(); - } + ValueNode* left = LoadRegisterInt32(0); + ValueNode* right = GetAccumulatorInt32(); + SetAccumulator(AddNewInt32BinaryOperationNode<kOperation>({left, right})); } @@ -362,13 +407,9 @@ void MaglevGraphBuilder::BuildFloat64BinarySmiOperationNode() { template <Operation kOperation> void MaglevGraphBuilder::BuildFloat64BinaryOperationNode() { // TODO(v8:7700): Do constant folding. - ValueNode *left, *right; - if (IsRegisterEqualToAccumulator(0)) { - left = right = LoadRegisterFloat64(0); - } else { - left = LoadRegisterFloat64(0); - right = GetAccumulatorFloat64(); - } + ValueNode* left = LoadRegisterFloat64(0); + ValueNode* right = GetAccumulatorFloat64(); + SetAccumulator(AddNewFloat64BinaryOperationNode<kOperation>({left, right})); } @@ -488,9 +529,21 @@ bool MaglevGraphBuilder::TryBuildCompareOperation(Operation operation, } BasicBlock* block = FinishBlock<CompareControlNode>( - next_offset(), {left, right}, operation, &jump_targets_[true_offset], + {left, right}, operation, &jump_targets_[true_offset], &jump_targets_[false_offset]); + if (true_offset == iterator_.GetJumpTargetOffset()) { + block->control_node() + ->Cast<BranchControlNode>() + ->set_true_interrupt_correction( + iterator_.GetRelativeJumpTargetOffset()); + } else { + block->control_node() + ->Cast<BranchControlNode>() + ->set_false_interrupt_correction( + iterator_.GetRelativeJumpTargetOffset()); + } MergeIntoFrameState(block, iterator_.GetJumpTargetOffset()); + StartFallthroughBlock(next_offset(), block); return true; } @@ -504,13 +557,9 @@ void MaglevGraphBuilder::VisitCompareOperation() { return; case CompareOperationHint::kSignedSmall: if (BinaryOperationHasInt32FastPath<kOperation>()) { - ValueNode *left, *right; - if (IsRegisterEqualToAccumulator(0)) { - left = right = LoadRegisterInt32(0); - } else { - left = LoadRegisterInt32(0); - right = GetAccumulatorInt32(); - } + ValueNode* left = LoadRegisterInt32(0); + ValueNode* right = GetAccumulatorInt32(); + if (TryBuildCompareOperation<BranchIfInt32Compare>(kOperation, left, right)) { return; @@ -522,13 +571,9 @@ void MaglevGraphBuilder::VisitCompareOperation() { break; case CompareOperationHint::kNumber: if (BinaryOperationHasFloat64FastPath<kOperation>()) { - ValueNode *left, *right; - if (IsRegisterEqualToAccumulator(0)) { - left = right = LoadRegisterFloat64(0); - } else { - left = LoadRegisterFloat64(0); - right = GetAccumulatorFloat64(); - } + ValueNode* left = LoadRegisterFloat64(0); + ValueNode* right = GetAccumulatorFloat64(); + if (TryBuildCompareOperation<BranchIfFloat64Compare>(kOperation, left, right)) { return; @@ -549,10 +594,28 @@ void MaglevGraphBuilder::VisitCompareOperation() { kOperation == Operation::kStrictEqual); ValueNode *left, *right; if (IsRegisterEqualToAccumulator(0)) { - left = right = LoadRegister<CheckedInternalizedString>(0); + interpreter::Register reg = iterator_.GetRegisterOperand(0); + ValueNode* value = GetTaggedValue(reg); + if (!value->Is<CheckedInternalizedString>()) { + value = AddNewNode<CheckedInternalizedString>({value}); + current_interpreter_frame_.set(reg, value); + current_interpreter_frame_.set( + interpreter::Register::virtual_accumulator(), value); + } + left = right = value; } else { - left = LoadRegister<CheckedInternalizedString>(0); - right = GetAccumulator<CheckedInternalizedString>(); + interpreter::Register reg = iterator_.GetRegisterOperand(0); + left = GetTaggedValue(reg); + if (!left->Is<CheckedInternalizedString>()) { + left = AddNewNode<CheckedInternalizedString>({left}); + current_interpreter_frame_.set(reg, left); + } + right = GetAccumulatorTagged(); + if (!right->Is<CheckedInternalizedString>()) { + right = AddNewNode<CheckedInternalizedString>({right}); + current_interpreter_frame_.set( + interpreter::Register::virtual_accumulator(), right); + } } if (TryBuildCompareOperation<BranchIfReferenceCompare>(kOperation, left, right)) { @@ -564,16 +627,10 @@ void MaglevGraphBuilder::VisitCompareOperation() { case CompareOperationHint::kSymbol: { DCHECK(kOperation == Operation::kEqual || kOperation == Operation::kStrictEqual); - ValueNode *left, *right; - if (IsRegisterEqualToAccumulator(0)) { - left = right = LoadRegisterTagged(0); - BuildCheckSymbol(left); - } else { - left = LoadRegisterTagged(0); - right = GetAccumulatorTagged(); - BuildCheckSymbol(left); - BuildCheckSymbol(right); - } + ValueNode* left = LoadRegisterTagged(0); + ValueNode* right = GetAccumulatorTagged(); + BuildCheckSymbol(left); + BuildCheckSymbol(right); if (TryBuildCompareOperation<BranchIfReferenceCompare>(kOperation, left, right)) { return; @@ -617,12 +674,69 @@ void MaglevGraphBuilder::VisitLdaConstant() { SetAccumulator(GetConstant(GetRefOperand<HeapObject>(0))); } -void MaglevGraphBuilder::VisitLdaContextSlot() { - ValueNode* context = LoadRegisterTagged(0); - int slot_index = iterator_.GetIndexOperand(1); - int depth = iterator_.GetUnsignedImmediateOperand(2); +bool MaglevGraphBuilder::TrySpecializeLoadContextSlotToFunctionContext( + ValueNode** context, size_t* depth, int slot_index, + ContextSlotMutability slot_mutability) { + DCHECK(compilation_unit_->info()->specialize_to_function_context()); + + size_t new_depth = *depth; + base::Optional<compiler::ContextRef> maybe_context_ref = + FunctionContextSpecialization::TryToRef(compilation_unit_, *context, + &new_depth); + if (!maybe_context_ref.has_value()) return false; + + compiler::ContextRef context_ref = maybe_context_ref.value(); + if (slot_mutability == kMutable || new_depth != 0) { + *depth = new_depth; + *context = GetConstant(context_ref); + return false; + } + + base::Optional<compiler::ObjectRef> maybe_slot_value = + context_ref.get(slot_index); + if (!maybe_slot_value.has_value()) { + *depth = new_depth; + *context = GetConstant(context_ref); + return false; + } + + compiler::ObjectRef slot_value = maybe_slot_value.value(); + if (slot_value.IsHeapObject()) { + // Even though the context slot is immutable, the context might have escaped + // before the function to which it belongs has initialized the slot. We + // must be conservative and check if the value in the slot is currently the + // hole or undefined. Only if it is neither of these, can we be sure that it + // won't change anymore. + // + // See also: JSContextSpecialization::ReduceJSLoadContext. + compiler::OddballType oddball_type = + slot_value.AsHeapObject().map().oddball_type(); + if (oddball_type == compiler::OddballType::kUndefined || + oddball_type == compiler::OddballType::kHole) { + *depth = new_depth; + *context = GetConstant(context_ref); + return false; + } + } - for (int i = 0; i < depth; ++i) { + // Fold the load of the immutable slot. + + SetAccumulator(GetConstant(slot_value)); + return true; +} + +void MaglevGraphBuilder::BuildLoadContextSlot( + ValueNode* context, size_t depth, int slot_index, + ContextSlotMutability slot_mutability) { + MinimizeContextChainDepth(&context, &depth); + + if (compilation_unit_->info()->specialize_to_function_context() && + TrySpecializeLoadContextSlotToFunctionContext( + &context, &depth, slot_index, slot_mutability)) { + return; // Our work here is done. + } + + for (size_t i = 0; i < depth; ++i) { context = AddNewNode<LoadTaggedField>( {context}, Context::OffsetOfElementAt(Context::PREVIOUS_INDEX)); } @@ -630,28 +744,47 @@ void MaglevGraphBuilder::VisitLdaContextSlot() { SetAccumulator(AddNewNode<LoadTaggedField>( {context}, Context::OffsetOfElementAt(slot_index))); } + +void MaglevGraphBuilder::VisitLdaContextSlot() { + ValueNode* context = LoadRegisterTagged(0); + int slot_index = iterator_.GetIndexOperand(1); + size_t depth = iterator_.GetUnsignedImmediateOperand(2); + BuildLoadContextSlot(context, depth, slot_index, kMutable); +} void MaglevGraphBuilder::VisitLdaImmutableContextSlot() { - // TODO(leszeks): Consider context specialising. - VisitLdaContextSlot(); + ValueNode* context = LoadRegisterTagged(0); + int slot_index = iterator_.GetIndexOperand(1); + size_t depth = iterator_.GetUnsignedImmediateOperand(2); + BuildLoadContextSlot(context, depth, slot_index, kImmutable); } void MaglevGraphBuilder::VisitLdaCurrentContextSlot() { ValueNode* context = GetContext(); int slot_index = iterator_.GetIndexOperand(0); - - SetAccumulator(AddNewNode<LoadTaggedField>( - {context}, Context::OffsetOfElementAt(slot_index))); + BuildLoadContextSlot(context, 0, slot_index, kMutable); } void MaglevGraphBuilder::VisitLdaImmutableCurrentContextSlot() { - // TODO(leszeks): Consider context specialising. - VisitLdaCurrentContextSlot(); + ValueNode* context = GetContext(); + int slot_index = iterator_.GetIndexOperand(0); + BuildLoadContextSlot(context, 0, slot_index, kImmutable); } void MaglevGraphBuilder::VisitStaContextSlot() { ValueNode* context = LoadRegisterTagged(0); int slot_index = iterator_.GetIndexOperand(1); - int depth = iterator_.GetUnsignedImmediateOperand(2); + size_t depth = iterator_.GetUnsignedImmediateOperand(2); + + MinimizeContextChainDepth(&context, &depth); + + if (compilation_unit_->info()->specialize_to_function_context()) { + base::Optional<compiler::ContextRef> maybe_ref = + FunctionContextSpecialization::TryToRef(compilation_unit_, context, + &depth); + if (maybe_ref.has_value()) { + context = GetConstant(maybe_ref.value()); + } + } - for (int i = 0; i < depth; ++i) { + for (size_t i = 0; i < depth; ++i) { context = AddNewNode<LoadTaggedField>( {context}, Context::OffsetOfElementAt(Context::PREVIOUS_INDEX)); } @@ -918,28 +1051,54 @@ void MaglevGraphBuilder::VisitStaLookupSlot() { SetAccumulator(BuildCallRuntime(StaLookupSlotFunction(flags), {name, value})); } +namespace { +NodeType StaticTypeForNode(ValueNode* node) { + DCHECK(node->is_tagged()); + switch (node->opcode()) { + case Opcode::kCheckedSmiTag: + case Opcode::kSmiConstant: + return NodeType::kSmi; + case Opcode::kConstant: { + compiler::HeapObjectRef ref = node->Cast<Constant>()->object(); + if (ref.IsString()) { + return NodeType::kString; + } else if (ref.IsSymbol()) { + return NodeType::kSymbol; + } else if (ref.IsHeapNumber()) { + return NodeType::kHeapNumber; + } + return NodeType::kHeapObjectWithKnownMap; + } + default: + return NodeType::kUnknown; + } +} +} // namespace + void MaglevGraphBuilder::BuildCheckSmi(ValueNode* object) { - NodeInfo* known_info = known_node_aspects().GetInfoFor(object); - if (NodeInfo::IsSmi(known_info)) return; + NodeInfo* known_info = known_node_aspects().GetOrCreateInfoFor(object); + if (known_info->is_smi()) return; + known_info->type = StaticTypeForNode(object); + if (known_info->is_smi()) return; // TODO(leszeks): Figure out a way to also handle CheckedSmiUntag. AddNewNode<CheckSmi>({object}); - known_node_aspects().InsertOrUpdateNodeType(object, known_info, - NodeType::kSmi); + known_info->type = NodeType::kSmi; } void MaglevGraphBuilder::BuildCheckHeapObject(ValueNode* object) { - NodeInfo* known_info = known_node_aspects().GetInfoFor(object); - if (NodeInfo::IsAnyHeapObject(known_info)) return; + NodeInfo* known_info = known_node_aspects().GetOrCreateInfoFor(object); + if (known_info->is_any_heap_object()) return; + known_info->type = StaticTypeForNode(object); + if (known_info->is_any_heap_object()) return; AddNewNode<CheckHeapObject>({object}); - known_node_aspects().InsertOrUpdateNodeType(object, known_info, - NodeType::kAnyHeapObject); + known_info->type = NodeType::kAnyHeapObject; } namespace { CheckType GetCheckType(NodeInfo* known_info) { - if (NodeInfo::IsAnyHeapObject(known_info)) { + if (known_info->is_any_heap_object()) { return CheckType::kOmitHeapObjectCheck; } return CheckType::kCheckHeapObject; @@ -947,21 +1106,23 @@ CheckType GetCheckType(NodeInfo* known_info) { } // namespace void MaglevGraphBuilder::BuildCheckString(ValueNode* object) { - NodeInfo* known_info = known_node_aspects().GetInfoFor(object); - if (NodeInfo::IsString(known_info)) return; + NodeInfo* known_info = known_node_aspects().GetOrCreateInfoFor(object); + if (known_info->is_string()) return; + known_info->type = StaticTypeForNode(object); + if (known_info->is_string()) return; AddNewNode<CheckString>({object}, GetCheckType(known_info)); - known_node_aspects().InsertOrUpdateNodeType(object, known_info, - NodeType::kString); + known_info->type = NodeType::kString; } void MaglevGraphBuilder::BuildCheckSymbol(ValueNode* object) { - NodeInfo* known_info = known_node_aspects().GetInfoFor(object); - if (NodeInfo::IsSymbol(known_info)) return; + NodeInfo* known_info = known_node_aspects().GetOrCreateInfoFor(object); + if (known_info->is_symbol()) return; + known_info->type = StaticTypeForNode(object); + if (known_info->is_symbol()) return; AddNewNode<CheckSymbol>({object}, GetCheckType(known_info)); - known_node_aspects().InsertOrUpdateNodeType(object, known_info, - NodeType::kSymbol); + known_info->type = NodeType::kSymbol; } void MaglevGraphBuilder::BuildMapCheck(ValueNode* object, @@ -975,10 +1136,32 @@ void MaglevGraphBuilder::BuildMapCheck(ValueNode* object, // Map is already checked. return; } - // TODO(leszeks): Insert an unconditional deopt if the known type doesn't - // match the required type. + // TODO(leszeks): Insert an unconditional deopt if the known map doesn't + // match the required map. + } + NodeInfo* known_info = known_node_aspects().GetOrCreateInfoFor(object); + if (known_info->type == NodeType::kUnknown) { + known_info->type = StaticTypeForNode(object); + if (known_info->type == NodeType::kHeapObjectWithKnownMap) { + // The only case where the type becomes a heap-object with a known map is + // when the object is a constant. + DCHECK(object->Is<Constant>()); + // For constants with stable maps that match the desired map, we don't + // need to emit a map check, and can use the dependency -- we can't do + // this for unstable maps because the constant could migrate during + // compilation. + // TODO(leszeks): Insert an unconditional deopt if the constant map + // doesn't match the required map. + compiler::MapRef constant_map = object->Cast<Constant>()->object().map(); + if (constant_map.equals(map) && map.is_stable()) { + DCHECK_EQ(&map_of_maps, &known_node_aspects().stable_maps); + map_of_maps.emplace(object, map); + broker()->dependencies()->DependOnStableMap(map); + return; + } + } } - NodeInfo* known_info = known_node_aspects().GetInfoFor(object); + if (map.is_migration_target()) { AddNewNode<CheckMapsWithMigration>({object}, map, GetCheckType(known_info)); } else { @@ -986,210 +1169,384 @@ void MaglevGraphBuilder::BuildMapCheck(ValueNode* object, } map_of_maps.emplace(object, map); if (map.is_stable()) { - compilation_unit_->broker()->dependencies()->DependOnStableMap(map); + broker()->dependencies()->DependOnStableMap(map); + } + known_info->type = NodeType::kHeapObjectWithKnownMap; +} + +bool MaglevGraphBuilder::TryFoldLoadDictPrototypeConstant( + compiler::PropertyAccessInfo access_info) { + DCHECK(V8_DICT_PROPERTY_CONST_TRACKING_BOOL); + DCHECK(access_info.IsDictionaryProtoDataConstant()); + DCHECK(access_info.holder().has_value()); + + base::Optional<compiler::ObjectRef> constant = + access_info.holder()->GetOwnDictionaryProperty( + access_info.dictionary_index(), broker()->dependencies()); + if (!constant.has_value()) return false; + + for (compiler::MapRef map : access_info.lookup_start_object_maps()) { + Handle<Map> map_handle = map.object(); + // Non-JSReceivers that passed AccessInfoFactory::ComputePropertyAccessInfo + // must have different lookup start map. + if (!map_handle->IsJSReceiverMap()) { + // Perform the implicit ToObject for primitives here. + // Implemented according to ES6 section 7.3.2 GetV (V, P). + JSFunction constructor = + Map::GetConstructorFunction( + *map_handle, *broker()->target_native_context().object()) + .value(); + // {constructor.initial_map()} is loaded/stored with acquire-release + // semantics for constructors. + map = MakeRefAssumeMemoryFence(broker(), constructor.initial_map()); + DCHECK(map.object()->IsJSObjectMap()); + } + broker()->dependencies()->DependOnConstantInDictionaryPrototypeChain( + map, access_info.name(), constant.value(), PropertyKind::kData); + } + + SetAccumulator(GetConstant(constant.value())); + return true; +} + +bool MaglevGraphBuilder::TryFoldLoadConstantDataField( + compiler::PropertyAccessInfo access_info) { + if (access_info.holder().has_value()) { + base::Optional<compiler::ObjectRef> constant = + access_info.holder()->GetOwnFastDataProperty( + access_info.field_representation(), access_info.field_index(), + broker()->dependencies()); + if (constant.has_value()) { + SetAccumulator(GetConstant(constant.value())); + return true; + } } - known_node_aspects().InsertOrUpdateNodeType( - object, known_info, NodeType::kHeapObjectWithKnownMap); + // TODO(victorgomes): Check if lookup_start_object is a constant object and + // unfold the load. + return false; } -bool MaglevGraphBuilder::TryBuildMonomorphicLoad(ValueNode* receiver, - ValueNode* lookup_start_object, - const compiler::MapRef& map, - MaybeObjectHandle handler) { - if (handler.is_null()) return false; +bool MaglevGraphBuilder::TryBuildPropertyGetterCall( + compiler::PropertyAccessInfo access_info, ValueNode* receiver) { + compiler::ObjectRef constant = access_info.constant().value(); - if (handler->IsSmi()) { - return TryBuildMonomorphicLoadFromSmiHandler(receiver, lookup_start_object, - map, handler->ToSmi().value()); + if (access_info.IsDictionaryProtoAccessorConstant()) { + // For fast mode holders we recorded dependencies in BuildPropertyLoad. + for (const compiler::MapRef map : access_info.lookup_start_object_maps()) { + broker()->dependencies()->DependOnConstantInDictionaryPrototypeChain( + map, access_info.name(), constant, PropertyKind::kAccessor); + } } - HeapObject ho_handler; - if (!handler->GetHeapObject(&ho_handler)) return false; - if (ho_handler.IsCodeT()) { - // TODO(leszeks): Call the code object directly. - return false; - } else if (ho_handler.IsAccessorPair()) { - // TODO(leszeks): Call the getter directly. - return false; + // Introduce the call to the getter function. + if (constant.IsJSFunction()) { + Call* call = CreateNewNode<Call>(Call::kFixedInputCount + 1, + ConvertReceiverMode::kNotNullOrUndefined, + GetConstant(constant), GetContext()); + call->set_arg(0, receiver); + SetAccumulator(AddNode(call)); + return true; } else { - return TryBuildMonomorphicLoadFromLoadHandler( - receiver, lookup_start_object, map, LoadHandler::cast(ho_handler)); + // TODO(victorgomes): API calls. + return false; } } -bool MaglevGraphBuilder::TryBuildMonomorphicLoadFromSmiHandler( - ValueNode* receiver, ValueNode* lookup_start_object, - const compiler::MapRef& map, int32_t handler) { - // Smi handler, emit a map check and LoadField. - LoadHandler::Kind kind = LoadHandler::KindBits::decode(handler); - if (kind != LoadHandler::Kind::kField) return false; - if (LoadHandler::IsWasmStructBits::decode(handler)) return false; +bool MaglevGraphBuilder::TryBuildPropertySetterCall( + compiler::PropertyAccessInfo access_info, ValueNode* receiver, + ValueNode* value) { + compiler::ObjectRef constant = access_info.constant().value(); + if (constant.IsJSFunction()) { + Call* call = CreateNewNode<Call>(Call::kFixedInputCount + 2, + ConvertReceiverMode::kNotNullOrUndefined, + GetConstant(constant), GetContext()); + call->set_arg(0, receiver); + call->set_arg(1, value); + SetAccumulator(AddNode(call)); + return true; + } else { + // TODO(victorgomes): API calls. + return false; + } +} - BuildMapCheck(lookup_start_object, map); +void MaglevGraphBuilder::BuildLoadField( + compiler::PropertyAccessInfo access_info, ValueNode* lookup_start_object) { + if (TryFoldLoadConstantDataField(access_info)) return; + // Resolve property holder. ValueNode* load_source; - if (LoadHandler::IsInobjectBits::decode(handler)) { - load_source = lookup_start_object; + if (access_info.holder().has_value()) { + load_source = GetConstant(access_info.holder().value()); } else { + load_source = lookup_start_object; + } + + FieldIndex field_index = access_info.field_index(); + if (!field_index.is_inobject()) { // The field is in the property array, first load it from there. load_source = AddNewNode<LoadTaggedField>( - {lookup_start_object}, JSReceiver::kPropertiesOrHashOffset); - } - int field_index = LoadHandler::FieldIndexBits::decode(handler); - if (LoadHandler::IsDoubleBits::decode(handler)) { - FieldIndex field = FieldIndex::ForSmiLoadHandler(*map.object(), handler); - DescriptorArray descriptors = *map.instance_descriptors().object(); - InternalIndex index = - descriptors.Search(field.property_index(), *map.object()); - DCHECK(index.is_found()); - DCHECK(Representation::Double().CanBeInPlaceChangedTo( - descriptors.GetDetails(index).representation())); - const compiler::CompilationDependency* dep = - broker()->dependencies()->FieldRepresentationDependencyOffTheRecord( - map, index, Representation::Double()); - broker()->dependencies()->RecordDependency(dep); + {load_source}, JSReceiver::kPropertiesOrHashOffset); + } + // Do the load. + if (field_index.is_double()) { SetAccumulator( - AddNewNode<LoadDoubleField>({load_source}, field_index * kTaggedSize)); + AddNewNode<LoadDoubleField>({load_source}, field_index.offset())); } else { SetAccumulator( - AddNewNode<LoadTaggedField>({load_source}, field_index * kTaggedSize)); + AddNewNode<LoadTaggedField>({load_source}, field_index.offset())); } - return true; } -bool MaglevGraphBuilder::TryBuildMonomorphicLoadFromLoadHandler( - ValueNode* receiver, ValueNode* lookup_start_object, - const compiler::MapRef& map, LoadHandler handler) { - Object maybe_smi_handler = handler.smi_handler(local_isolate_); - if (!maybe_smi_handler.IsSmi()) return false; - int smi_handler = Smi::cast(maybe_smi_handler).value(); - LoadHandler::Kind kind = LoadHandler::KindBits::decode(smi_handler); - bool do_access_check_on_lookup_start_object = - LoadHandler::DoAccessCheckOnLookupStartObjectBits::decode(smi_handler); - bool lookup_on_lookup_start_object = - LoadHandler::LookupOnLookupStartObjectBits::decode(smi_handler); - if (lookup_on_lookup_start_object) return false; - if (kind != LoadHandler::Kind::kConstantFromPrototype && - kind != LoadHandler::Kind::kAccessorFromPrototype) - return false; +bool MaglevGraphBuilder::TryBuildStoreField( + compiler::PropertyAccessInfo access_info, ValueNode* receiver) { + FieldIndex field_index = access_info.field_index(); + Representation field_representation = access_info.field_representation(); - if (map.IsStringMap()) { - // Check for string maps before checking if we need to do an access check. - // Primitive strings always get the prototype from the native context - // they're operated on, so they don't need the access check. - BuildCheckString(lookup_start_object); - } else if (do_access_check_on_lookup_start_object) { - return false; + // TODO(victorgomes): Support double stores. + if (field_representation.IsDouble()) return false; + + // TODO(victorgomes): Support transition maps. + if (access_info.HasTransitionMap()) return false; + + ValueNode* store_target; + if (field_index.is_inobject()) { + store_target = receiver; } else { - BuildMapCheck(lookup_start_object, map); + // The field is in the property array, first load it from there. + store_target = AddNewNode<LoadTaggedField>( + {receiver}, JSReceiver::kPropertiesOrHashOffset); } - Object validity_cell = handler.validity_cell(local_isolate_); - if (validity_cell.IsCell(local_isolate_)) { - compiler::MapRef receiver_map = map; - if (receiver_map.IsPrimitiveMap()) { - // Perform the implicit ToObject for primitives here. - // Implemented according to ES6 section 7.3.2 GetV (V, P). - // Note: Keep sync'd with AccessInfoFactory::ComputePropertyAccessInfo. - base::Optional<compiler::JSFunctionRef> constructor = - broker()->target_native_context().GetConstructorFunction( - receiver_map); - receiver_map = constructor.value().initial_map(broker()->dependencies()); + if (field_representation.IsSmi()) { + ValueNode* value = GetAccumulatorTagged(); + BuildCheckSmi(value); + AddNewNode<StoreTaggedFieldNoWriteBarrier>({store_target, value}, + field_index.offset()); + } else if (field_representation.IsDouble()) { + // TODO(victorgomes): Implement store double. + UNREACHABLE(); + } else { + ValueNode* value = GetAccumulatorTagged(); + if (field_representation.IsHeapObject()) { + // Emit a map check for the field type, if needed, otherwise just a + // HeapObject check. + if (access_info.field_map().has_value()) { + BuildMapCheck(value, access_info.field_map().value()); + } else { + BuildCheckHeapObject(value); + } } + AddNewNode<StoreTaggedFieldWithWriteBarrier>({store_target, value}, + field_index.offset()); + } + return true; +} - compiler::MapRef proto_map = receiver_map.prototype().map(); - while (proto_map.object()->prototype_validity_cell( - local_isolate_, kRelaxedLoad) == validity_cell) { - broker()->dependencies()->DependOnStableMap(proto_map); - proto_map = proto_map.prototype().map(); +bool MaglevGraphBuilder::TryBuildPropertyLoad( + ValueNode* receiver, ValueNode* lookup_start_object, + compiler::PropertyAccessInfo const& access_info) { + if (access_info.holder().has_value() && !access_info.HasDictionaryHolder()) { + broker()->dependencies()->DependOnStablePrototypeChains( + access_info.lookup_start_object_maps(), kStartAtPrototype, + access_info.holder().value()); + } + + switch (access_info.kind()) { + case compiler::PropertyAccessInfo::kInvalid: + UNREACHABLE(); + case compiler::PropertyAccessInfo::kNotFound: + SetAccumulator(GetRootConstant(RootIndex::kUndefinedValue)); + return true; + case compiler::PropertyAccessInfo::kDataField: + case compiler::PropertyAccessInfo::kFastDataConstant: + BuildLoadField(access_info, lookup_start_object); + return true; + case compiler::PropertyAccessInfo::kDictionaryProtoDataConstant: + return TryFoldLoadDictPrototypeConstant(access_info); + case compiler::PropertyAccessInfo::kFastAccessorConstant: + case compiler::PropertyAccessInfo::kDictionaryProtoAccessorConstant: + return TryBuildPropertyGetterCall(access_info, receiver); + case compiler::PropertyAccessInfo::kModuleExport: { + ValueNode* cell = GetConstant(access_info.constant().value().AsCell()); + SetAccumulator(AddNewNode<LoadTaggedField>({cell}, Cell::kValueOffset)); + return true; } + case compiler::PropertyAccessInfo::kStringLength: + DCHECK_EQ(receiver, lookup_start_object); + SetAccumulator(AddNewNode<StringLength>({receiver})); + return true; + } +} + +bool MaglevGraphBuilder::TryBuildPropertyStore( + ValueNode* receiver, compiler::PropertyAccessInfo const& access_info) { + if (access_info.holder().has_value()) { + broker()->dependencies()->DependOnStablePrototypeChains( + access_info.lookup_start_object_maps(), kStartAtPrototype, + access_info.holder().value()); + } + + if (access_info.IsFastAccessorConstant()) { + return TryBuildPropertySetterCall(access_info, receiver, + GetAccumulatorTagged()); } else { - DCHECK_EQ(Smi::ToInt(validity_cell), Map::kPrototypeChainValid); + DCHECK(access_info.IsDataField() || access_info.IsFastDataConstant()); + return TryBuildStoreField(access_info, receiver); + } +} + +bool MaglevGraphBuilder::TryBuildPropertyAccess( + ValueNode* receiver, ValueNode* lookup_start_object, + compiler::PropertyAccessInfo const& access_info, + compiler::AccessMode access_mode) { + switch (access_mode) { + case compiler::AccessMode::kLoad: + return TryBuildPropertyLoad(receiver, lookup_start_object, access_info); + case compiler::AccessMode::kStore: + case compiler::AccessMode::kStoreInLiteral: + case compiler::AccessMode::kDefine: + DCHECK_EQ(receiver, lookup_start_object); + return TryBuildPropertyStore(receiver, access_info); + case compiler::AccessMode::kHas: + // TODO(victorgomes): BuildPropertyTest. + return false; } +} - switch (kind) { - case LoadHandler::Kind::kConstantFromPrototype: { - MaybeObject value = handler.data1(local_isolate_); - if (value.IsSmi()) { - SetAccumulator(GetSmiConstant(value.ToSmi().value())); - } else { - SetAccumulator(GetConstant(MakeRefAssumeMemoryFence( - broker(), - broker()->CanonicalPersistentHandle(value.GetHeapObject())))); - } - break; +bool MaglevGraphBuilder::TryBuildNamedAccess( + ValueNode* receiver, ValueNode* lookup_start_object, + compiler::NamedAccessFeedback const& feedback, + compiler::AccessMode access_mode) { + ZoneVector<compiler::PropertyAccessInfo> access_infos(zone()); + { + ZoneVector<compiler::PropertyAccessInfo> access_infos_for_feedback(zone()); + for (const compiler::MapRef& map : feedback.maps()) { + if (map.is_deprecated()) continue; + compiler::PropertyAccessInfo access_info = + broker()->GetPropertyAccessInfo(map, feedback.name(), access_mode, + broker()->dependencies()); + access_infos_for_feedback.push_back(access_info); } - case LoadHandler::Kind::kAccessorFromPrototype: { - MaybeObject getter = handler.data1(local_isolate_); - compiler::ObjectRef getter_ref = MakeRefAssumeMemoryFence( - broker(), - broker()->CanonicalPersistentHandle(getter.GetHeapObject())); - - Call* call = CreateNewNode<Call>(Call::kFixedInputCount + 1, - ConvertReceiverMode::kNotNullOrUndefined, - GetConstant(getter_ref), GetContext()); - call->set_arg(0, receiver); - SetAccumulator(AddNode(call)); - break; + + compiler::AccessInfoFactory access_info_factory( + broker(), broker()->dependencies(), zone()); + if (!access_info_factory.FinalizePropertyAccessInfos( + access_infos_for_feedback, access_mode, &access_infos)) { + return false; } - default: - UNREACHABLE(); } - return true; -} -bool MaglevGraphBuilder::TryBuildMonomorphicElementLoad( - ValueNode* object, ValueNode* index, const compiler::MapRef& map, - MaybeObjectHandle handler) { - if (handler.is_null()) return false; + // Check for monomorphic case. + if (access_infos.size() == 1) { + compiler::PropertyAccessInfo access_info = access_infos.front(); + const compiler::MapRef& map = + access_info.lookup_start_object_maps().front(); + if (map.IsStringMap()) { + // Check for string maps before checking if we need to do an access + // check. Primitive strings always get the prototype from the native + // context they're operated on, so they don't need the access check. + BuildCheckString(lookup_start_object); + } else { + BuildMapCheck(lookup_start_object, map); + } - if (handler->IsSmi()) { - return TryBuildMonomorphicElementLoadFromSmiHandler( - object, index, map, handler->ToSmi().value()); + // Generate the actual property access. + return TryBuildPropertyAccess(receiver, lookup_start_object, access_info, + access_mode); + } else { + // TODO(victorgomes): polymorphic case. + return false; } - return false; } -bool MaglevGraphBuilder::TryBuildMonomorphicElementLoadFromSmiHandler( - ValueNode* object, ValueNode* index, const compiler::MapRef& map, - int32_t handler) { - LoadHandler::Kind kind = LoadHandler::KindBits::decode(handler); +bool MaglevGraphBuilder::TryBuildElementAccess( + ValueNode* object, ValueNode* index, + compiler::ElementAccessFeedback const& feedback) { + // TODO(victorgomes): Implement other access modes. + if (feedback.keyed_mode().access_mode() != compiler::AccessMode::kLoad) { + return false; + } - switch (kind) { - case LoadHandler::Kind::kElement: { - if (LoadHandler::AllowOutOfBoundsBits::decode(handler)) { - return false; - } - ElementsKind elements_kind = - LoadHandler::ElementsKindBits::decode(handler); - if (!IsFastElementsKind(elements_kind)) return false; + // TODO(victorgomes): Add fast path for loading from HeapConstant. + // TODO(victorgomes): Add fast path for loading from String. - // TODO(leszeks): Handle holey elements. - if (IsHoleyElementsKind(elements_kind)) return false; - DCHECK(!LoadHandler::ConvertHoleBits::decode(handler)); + compiler::AccessInfoFactory access_info_factory( + broker(), broker()->dependencies(), zone()); + ZoneVector<compiler::ElementAccessInfo> access_infos(zone()); + if (!access_info_factory.ComputeElementAccessInfos(feedback, &access_infos) || + access_infos.empty()) { + return false; + } - BuildMapCheck(object, map); - BuildCheckSmi(index); + // Check for monomorphic case. + if (access_infos.size() == 1) { + compiler::ElementAccessInfo access_info = access_infos.front(); - if (LoadHandler::IsJsArrayBits::decode(handler)) { - DCHECK(map.IsJSArrayMap()); - AddNewNode<CheckJSArrayBounds>({object, index}); - } else { - DCHECK(!map.IsJSArrayMap()); - DCHECK(map.IsJSObjectMap()); - AddNewNode<CheckJSObjectElementsBounds>({object, index}); + // TODO(victorgomes): Support elment kind transitions. + if (access_info.transition_sources().size() != 0) return false; + + // TODO(victorgomes): Support more elements kind. + ElementsKind elements_kind = access_info.elements_kind(); + if (!IsFastElementsKind(elements_kind)) return false; + if (IsHoleyElementsKind(elements_kind)) return false; + + const compiler::MapRef& map = + access_info.lookup_start_object_maps().front(); + BuildMapCheck(object, map); + + switch (index->properties().value_representation()) { + case ValueRepresentation::kTagged: { + if (SmiConstant* constant = index->TryCast<SmiConstant>()) { + index = GetInt32Constant(constant->value().value()); + } else { + NodeInfo* node_info = known_node_aspects().GetOrCreateInfoFor(index); + if (node_info->is_smi()) { + if (!node_info->int32_alternative) { + // TODO(leszeks): This could be unchecked. + node_info->int32_alternative = + AddNewNode<CheckedSmiUntag>({index}); + } + index = node_info->int32_alternative; + } else { + // TODO(leszeks): Cache this knowledge/converted value somehow on + // the node info. + index = AddNewNode<CheckedObjectToIndex>({index}); + } + } + break; } - if (elements_kind == ElementsKind::PACKED_DOUBLE_ELEMENTS) { - SetAccumulator(AddNewNode<LoadDoubleElement>({object, index})); - } else { - DCHECK(!IsDoubleElementsKind(elements_kind)); - SetAccumulator(AddNewNode<LoadTaggedElement>({object, index})); + case ValueRepresentation::kInt32: { + // Already good. + break; + } + case ValueRepresentation::kFloat64: { + // TODO(leszeks): Pass in the index register (probably the + // accumulator), so that we can save this truncation on there as a + // conversion node. + index = AddNewNode<CheckedTruncateFloat64ToInt32>({index}); + break; } - return true; } - default: - return false; + + if (map.IsJSArrayMap()) { + AddNewNode<CheckJSArrayBounds>({object, index}); + } else { + DCHECK(map.IsJSObjectMap()); + AddNewNode<CheckJSObjectElementsBounds>({object, index}); + } + if (elements_kind == ElementsKind::PACKED_DOUBLE_ELEMENTS) { + SetAccumulator(AddNewNode<LoadDoubleElement>({object, index})); + } else { + DCHECK(!IsDoubleElementsKind(elements_kind)); + SetAccumulator(AddNewNode<LoadTaggedElement>({object, index})); + } + return true; + + } else { + // TODO(victorgomes): polymorphic case. + return false; } } @@ -1210,20 +1567,13 @@ void MaglevGraphBuilder::VisitGetNamedProperty() { DeoptimizeReason::kInsufficientTypeFeedbackForGenericNamedAccess); return; - case compiler::ProcessedFeedback::kNamedAccess: { - const compiler::NamedAccessFeedback& named_feedback = - processed_feedback.AsNamedAccess(); - if (named_feedback.maps().size() != 1) break; - compiler::MapRef map = named_feedback.maps()[0]; - - // Monomorphic load, check the handler. - // TODO(leszeks): Make GetFeedbackForPropertyAccess read the handler. - MaybeObjectHandle handler = - FeedbackNexusForSlot(slot).FindHandlerForMap(map.object()); - - if (TryBuildMonomorphicLoad(object, object, map, handler)) return; - } break; - + case compiler::ProcessedFeedback::kNamedAccess: + if (TryBuildNamedAccess(object, object, + processed_feedback.AsNamedAccess(), + compiler::AccessMode::kLoad)) { + return; + } + break; default: break; } @@ -1258,20 +1608,13 @@ void MaglevGraphBuilder::VisitGetNamedPropertyFromSuper() { DeoptimizeReason::kInsufficientTypeFeedbackForGenericNamedAccess); return; - case compiler::ProcessedFeedback::kNamedAccess: { - const compiler::NamedAccessFeedback& named_feedback = - processed_feedback.AsNamedAccess(); - if (named_feedback.maps().size() != 1) break; - compiler::MapRef map = named_feedback.maps()[0]; - - // Monomorphic load, check the handler. - // TODO(leszeks): Make GetFeedbackForPropertyAccess read the handler. - MaybeObjectHandle handler = - FeedbackNexusForSlot(slot).FindHandlerForMap(map.object()); - - if (TryBuildMonomorphicLoad(receiver, lookup_start_object, map, handler)) + case compiler::ProcessedFeedback::kNamedAccess: + if (TryBuildNamedAccess(receiver, lookup_start_object, + processed_feedback.AsNamedAccess(), + compiler::AccessMode::kLoad)) { return; - } break; + } + break; default: break; @@ -1288,7 +1631,6 @@ void MaglevGraphBuilder::VisitGetKeyedProperty() { ValueNode* object = LoadRegisterTagged(0); // TODO(leszeks): We don't need to tag the key if it's an Int32 and a simple // monomorphic element load. - ValueNode* key = GetAccumulatorTagged(); FeedbackSlot slot = GetSlotOperand(1); compiler::FeedbackSource feedback_source{feedback(), slot}; @@ -1303,19 +1645,13 @@ void MaglevGraphBuilder::VisitGetKeyedProperty() { return; case compiler::ProcessedFeedback::kElementAccess: { - const compiler::ElementAccessFeedback& element_feedback = - processed_feedback.AsElementAccess(); - if (element_feedback.transition_groups().size() != 1) break; - if (element_feedback.transition_groups()[0].size() != 1) break; - compiler::MapRef map = MakeRefAssumeMemoryFence( - broker(), element_feedback.transition_groups()[0].front()); - - // Monomorphic load, check the handler. - // TODO(leszeks): Make GetFeedbackForPropertyAccess read the handler. - MaybeObjectHandle handler = - FeedbackNexusForSlot(slot).FindHandlerForMap(map.object()); - - if (TryBuildMonomorphicElementLoad(object, key, map, handler)) return; + // Get the accumulator without conversion. TryBuildElementAccess + // will try to pick the best representation. + ValueNode* index = current_interpreter_frame_.accumulator(); + if (TryBuildElementAccess(object, index, + processed_feedback.AsElementAccess())) { + return; + } break; } @@ -1325,6 +1661,7 @@ void MaglevGraphBuilder::VisitGetKeyedProperty() { // Create a generic store in the fallthrough. ValueNode* context = GetContext(); + ValueNode* key = GetAccumulatorTagged(); SetAccumulator( AddNewNode<GetKeyedGeneric>({context, object, key}, feedback_source)); } @@ -1332,10 +1669,21 @@ void MaglevGraphBuilder::VisitGetKeyedProperty() { void MaglevGraphBuilder::VisitLdaModuleVariable() { // LdaModuleVariable <cell_index> <depth> int cell_index = iterator_.GetImmediateOperand(0); - int depth = iterator_.GetUnsignedImmediateOperand(1); + size_t depth = iterator_.GetUnsignedImmediateOperand(1); ValueNode* context = GetContext(); - for (int i = 0; i < depth; i++) { + MinimizeContextChainDepth(&context, &depth); + + if (compilation_unit_->info()->specialize_to_function_context()) { + base::Optional<compiler::ContextRef> maybe_ref = + FunctionContextSpecialization::TryToRef(compilation_unit_, context, + &depth); + if (maybe_ref.has_value()) { + context = GetConstant(maybe_ref.value()); + } + } + + for (size_t i = 0; i < depth; i++) { context = AddNewNode<LoadTaggedField>( {context}, Context::OffsetOfElementAt(Context::PREVIOUS_INDEX)); } @@ -1366,9 +1714,21 @@ void MaglevGraphBuilder::VisitStaModuleVariable() { AbortReason::kUnsupportedModuleOperation))}); return; } + ValueNode* context = GetContext(); - int depth = iterator_.GetUnsignedImmediateOperand(1); - for (int i = 0; i < depth; i++) { + size_t depth = iterator_.GetUnsignedImmediateOperand(1); + MinimizeContextChainDepth(&context, &depth); + + if (compilation_unit_->info()->specialize_to_function_context()) { + base::Optional<compiler::ContextRef> maybe_ref = + FunctionContextSpecialization::TryToRef(compilation_unit_, context, + &depth); + if (maybe_ref.has_value()) { + context = GetConstant(maybe_ref.value()); + } + } + + for (size_t i = 0; i < depth; i++) { context = AddNewNode<LoadTaggedField>( {context}, Context::OffsetOfElementAt(Context::PREVIOUS_INDEX)); } @@ -1383,86 +1743,6 @@ void MaglevGraphBuilder::VisitStaModuleVariable() { Cell::kValueOffset); } -bool MaglevGraphBuilder::TryBuildMonomorphicStoreFromSmiHandler( - ValueNode* object, const compiler::MapRef& map, int32_t handler) { - StoreHandler::Kind kind = StoreHandler::KindBits::decode(handler); - if (kind != StoreHandler::Kind::kField) return false; - - Representation::Kind representation = - StoreHandler::RepresentationBits::decode(handler); - if (representation == Representation::kDouble) return false; - - InternalIndex descriptor_idx(StoreHandler::DescriptorBits::decode(handler)); - PropertyDetails property_details = - map.instance_descriptors().GetPropertyDetails(descriptor_idx); - - // TODO(leszeks): Allow a fast path which checks for equality with the current - // value. - if (property_details.constness() == PropertyConstness::kConst) return false; - - BuildMapCheck(object, map); - - ValueNode* store_target; - if (StoreHandler::IsInobjectBits::decode(handler)) { - store_target = object; - } else { - // The field is in the property array, first Store it from there. - store_target = AddNewNode<LoadTaggedField>( - {object}, JSReceiver::kPropertiesOrHashOffset); - } - - int field_index = StoreHandler::FieldIndexBits::decode(handler); - int offset = field_index * kTaggedSize; - - ValueNode* value = GetAccumulatorTagged(); - if (representation == Representation::kSmi) { - BuildCheckSmi(value); - AddNewNode<StoreTaggedFieldNoWriteBarrier>({store_target, value}, offset); - return true; - } - - if (representation == Representation::kHeapObject) { - FieldType descriptors_field_type = - map.instance_descriptors().object()->GetFieldType(descriptor_idx); - if (descriptors_field_type.IsNone()) { - // Store is not safe if the field type was cleared. Since we check this - // late, we'll emit a useless map check and maybe property store load, but - // that's fine, this case should be rare. - return false; - } - - // Emit a map check for the field type, if needed, otherwise just a - // HeapObject check. - if (descriptors_field_type.IsClass()) { - // Check that the value matches the expected field type. - base::Optional<compiler::MapRef> maybe_field_map = - TryMakeRef(broker(), descriptors_field_type.AsClass()); - if (!maybe_field_map.has_value()) return false; - - BuildMapCheck(value, *maybe_field_map); - } else { - BuildCheckHeapObject(value); - } - } - AddNewNode<StoreTaggedFieldWithWriteBarrier>({store_target, value}, offset); - return true; -} - -bool MaglevGraphBuilder::TryBuildMonomorphicStore(ValueNode* object, - const compiler::MapRef& map, - MaybeObjectHandle handler) { - if (handler.is_null()) return false; - - if (handler->IsSmi()) { - return TryBuildMonomorphicStoreFromSmiHandler(object, map, - handler->ToSmi().value()); - } - // TODO(leszeks): If we add non-Smi paths here, make sure to differentiate - // between Define and Set. - - return false; -} - void MaglevGraphBuilder::BuildLoadGlobal( compiler::NameRef name, compiler::FeedbackSource& feedback_source, TypeofMode typeof_mode) { @@ -1504,20 +1784,13 @@ void MaglevGraphBuilder::VisitSetNamedProperty() { DeoptimizeReason::kInsufficientTypeFeedbackForGenericNamedAccess); return; - case compiler::ProcessedFeedback::kNamedAccess: { - const compiler::NamedAccessFeedback& named_feedback = - processed_feedback.AsNamedAccess(); - if (named_feedback.maps().size() != 1) break; - compiler::MapRef map = named_feedback.maps()[0]; - - // Monomorphic store, check the handler. - // TODO(leszeks): Make GetFeedbackForPropertyAccess read the handler. - MaybeObjectHandle handler = - FeedbackNexusForSlot(slot).FindHandlerForMap(map.object()); - - if (TryBuildMonomorphicStore(object, map, handler)) return; - } break; - + case compiler::ProcessedFeedback::kNamedAccess: + if (TryBuildNamedAccess(object, object, + processed_feedback.AsNamedAccess(), + compiler::AccessMode::kStore)) { + return; + } + break; default: break; } @@ -1546,19 +1819,13 @@ void MaglevGraphBuilder::VisitDefineNamedOwnProperty() { DeoptimizeReason::kInsufficientTypeFeedbackForGenericNamedAccess); return; - case compiler::ProcessedFeedback::kNamedAccess: { - const compiler::NamedAccessFeedback& named_feedback = - processed_feedback.AsNamedAccess(); - if (named_feedback.maps().size() != 1) break; - compiler::MapRef map = named_feedback.maps()[0]; - - // Monomorphic store, check the handler. - // TODO(leszeks): Make GetFeedbackForPropertyAccess read the handler. - MaybeObjectHandle handler = - FeedbackNexusForSlot(slot).FindHandlerForMap(map.object()); - - if (TryBuildMonomorphicStore(object, map, handler)) return; - } break; + case compiler::ProcessedFeedback::kNamedAccess: + if (TryBuildNamedAccess(object, object, + processed_feedback.AsNamedAccess(), + compiler::AccessMode::kDefine)) { + return; + } + break; default: break; @@ -1644,14 +1911,6 @@ void MaglevGraphBuilder::VisitDefineKeyedOwnPropertyInLiteral() { {object, name, value, flags, feedback_vector, slot})); } -void MaglevGraphBuilder::VisitCollectTypeProfile() { - ValueNode* position = GetSmiConstant(GetFlag8Operand(0)); - ValueNode* value = GetAccumulatorTagged(); - ValueNode* feedback_vector = GetConstant(feedback()); - SetAccumulator(BuildCallRuntime(Runtime::kCollectTypeProfile, - {position, value, feedback_vector})); -} - void MaglevGraphBuilder::VisitAdd() { VisitBinaryOperation<Operation::kAdd>(); } void MaglevGraphBuilder::VisitSub() { VisitBinaryOperation<Operation::kSubtract>(); @@ -1802,9 +2061,15 @@ void MaglevGraphBuilder::VisitGetSuperConstructor() { StoreRegister(iterator_.GetRegisterOperand(0), map_proto); } -void MaglevGraphBuilder::VisitFindNonDefaultConstructor() { - // TODO(v8:13091): Implement. - CHECK(false); +void MaglevGraphBuilder::VisitFindNonDefaultConstructorOrConstruct() { + ValueNode* this_function = LoadRegisterTagged(0); + ValueNode* new_target = LoadRegisterTagged(1); + + CallBuiltin* call_builtin = + BuildCallBuiltin<Builtin::kFindNonDefaultConstructorOrConstruct>( + {this_function, new_target}); + auto result = iterator_.GetRegisterPairOperand(2); + StoreRegisterPair(result, call_builtin); } void MaglevGraphBuilder::InlineCallFromRegisters( @@ -1826,8 +2091,7 @@ void MaglevGraphBuilder::InlineCallFromRegisters( // Finish the current block with a jump to the inlined function. BasicBlockRef start_ref, end_ref; - BasicBlock* block = CreateBlock<JumpToInlined>({}, &start_ref, inner_unit); - ResolveJumpsToBlockAtOffset(block, block_offset_); + BasicBlock* block = FinishBlock<JumpToInlined>({}, &start_ref, inner_unit); // Manually create the prologue of the inner function graph, so that we // can manually set up the arguments. @@ -1877,10 +2141,7 @@ void MaglevGraphBuilder::InlineCallFromRegisters( inner_graph_builder.ProcessMergePoint( inner_graph_builder.inline_exit_offset()); inner_graph_builder.StartNewBlock(inner_graph_builder.inline_exit_offset()); - BasicBlock* end_block = - inner_graph_builder.CreateBlock<JumpFromInlined>({}, &end_ref); - inner_graph_builder.ResolveJumpsToBlockAtOffset( - end_block, inner_graph_builder.inline_exit_offset()); + inner_graph_builder.FinishBlock<JumpFromInlined>({}, &end_ref); // Pull the returned accumulator value out of the inlined function's final // merged return state. @@ -1893,7 +2154,6 @@ void MaglevGraphBuilder::InlineCallFromRegisters( current_block_ = zone()->New<BasicBlock>(MergePointInterpreterFrameState::New( *compilation_unit_, current_interpreter_frame_, iterator_.current_offset(), 1, block, GetInLiveness())); - block_offset_ = iterator_.current_offset(); // Set the exit JumpFromInlined to jump to this resume block. // TODO(leszeks): Passing start_ref to JumpFromInlined creates a two-element // linked list of refs. Consider adding a helper to explicitly set the target @@ -1952,7 +2212,7 @@ void MaglevGraphBuilder::BuildCallFromRegisters( return; case compiler::ProcessedFeedback::kCall: { - if (!FLAG_maglev_inlining) break; + if (!v8_flags.maglev_inlining) break; const compiler::CallFeedback& call_feedback = processed_feedback.AsCall(); CallFeedbackContent content = call_feedback.call_feedback_content(); @@ -2245,10 +2505,10 @@ void MaglevGraphBuilder::VisitIntrinsicAsyncGeneratorResolve( GetTaggedValue(args[2])})); } -void MaglevGraphBuilder::VisitIntrinsicAsyncGeneratorYield( +void MaglevGraphBuilder::VisitIntrinsicAsyncGeneratorYieldWithAwait( interpreter::RegisterList args) { DCHECK_EQ(args.register_count(), 3); - SetAccumulator(BuildCallBuiltin<Builtin::kAsyncGeneratorYield>( + SetAccumulator(BuildCallBuiltin<Builtin::kAsyncGeneratorYieldWithAwait>( {GetTaggedValue(args[0]), GetTaggedValue(args[1]), GetTaggedValue(args[2])})); } @@ -2258,10 +2518,12 @@ void MaglevGraphBuilder::VisitConstruct() { ValueNode* constructor = LoadRegisterTagged(0); interpreter::RegisterList args = iterator_.GetRegisterListOperand(1); ValueNode* context = GetContext(); + FeedbackSlot slot = GetSlotOperand(3); + compiler::FeedbackSource feedback_source{feedback(), slot}; size_t input_count = args.register_count() + 1 + Construct::kFixedInputCount; - Construct* construct = - CreateNewNode<Construct>(input_count, constructor, new_target, context); + Construct* construct = CreateNewNode<Construct>( + input_count, feedback_source, constructor, new_target, context); int arg_index = 0; // Add undefined receiver. construct->set_arg(arg_index++, GetRootConstant(RootIndex::kUndefinedValue)); @@ -2319,10 +2581,9 @@ void MaglevGraphBuilder::VisitTestInstanceOf() { // TODO(victorgomes): Check feedback slot and a do static lookup for // @@hasInstance. - USE(feedback_source); - ValueNode* context = GetContext(); - SetAccumulator(AddNewNode<TestInstanceOf>({context, object, callable})); + SetAccumulator( + AddNewNode<TestInstanceOf>({context, object, callable}, feedback_source)); } void MaglevGraphBuilder::VisitTestIn() { @@ -2358,6 +2619,7 @@ void MaglevGraphBuilder::BuildToNumberOrToNumeric(Object::Conversion mode) { UNREACHABLE(); case BinaryOperationHint::kNumber: case BinaryOperationHint::kBigInt: + case BinaryOperationHint::kBigInt64: AddNewNode<CheckNumber>({value}, mode); break; default: @@ -2514,6 +2776,8 @@ void MaglevGraphBuilder::VisitCreateClosure() { void MaglevGraphBuilder::VisitCreateBlockContext() { // TODO(v8:7700): Inline allocation when context is small. + // TODO(v8:7700): Update TryGetParentContext if this ever emits its own Node + // type. // CreateBlockContext <scope_info_idx> ValueNode* scope_info = GetConstant(GetRefOperand<ScopeInfo>(0)); SetAccumulator(BuildCallRuntime(Runtime::kPushBlockContext, {scope_info})); @@ -2521,6 +2785,8 @@ void MaglevGraphBuilder::VisitCreateBlockContext() { void MaglevGraphBuilder::VisitCreateCatchContext() { // TODO(v8:7700): Inline allocation when context is small. + // TODO(v8:7700): Update TryGetParentContext if this ever emits its own Node + // type. // CreateCatchContext <exception> <scope_info_idx> ValueNode* exception = LoadRegisterTagged(0); ValueNode* scope_info = GetConstant(GetRefOperand<ScopeInfo>(1)); @@ -2536,6 +2802,8 @@ void MaglevGraphBuilder::VisitCreateFunctionContext() { } void MaglevGraphBuilder::VisitCreateEvalContext() { + // TODO(v8:7700): Update TryGetParentContext if this ever emits its own Node + // type. compiler::ScopeInfoRef info = GetRefOperand<ScopeInfo>(0); uint32_t slot_count = iterator_.GetUnsignedImmediateOperand(1); if (slot_count <= static_cast<uint32_t>( @@ -2593,10 +2861,7 @@ void MaglevGraphBuilder::VisitJumpLoop() { BytecodeOffset(iterator_.current_offset()), compilation_unit_); BasicBlock* block = - target == block_offset_ - ? FinishBlock<JumpLoop>(next_offset(), {}, &jump_targets_[target]) - : FinishBlock<JumpLoop>(next_offset(), {}, - jump_targets_[target].block_ptr()); + FinishBlock<JumpLoop>({}, jump_targets_[target].block_ptr()); merge_states_[target]->MergeLoop(*compilation_unit_, current_interpreter_frame_, block, target); @@ -2608,8 +2873,8 @@ void MaglevGraphBuilder::VisitJump() { if (relative_jump_bytecode_offset > 0) { AddNewNode<IncreaseInterruptBudget>({}, relative_jump_bytecode_offset); } - BasicBlock* block = FinishBlock<Jump>( - next_offset(), {}, &jump_targets_[iterator_.GetJumpTargetOffset()]); + BasicBlock* block = + FinishBlock<Jump>({}, &jump_targets_[iterator_.GetJumpTargetOffset()]); MergeIntoFrameState(block, iterator_.GetJumpTargetOffset()); DCHECK_LT(next_offset(), bytecode().length()); } @@ -2663,7 +2928,7 @@ void MaglevGraphBuilder::MergeDeadIntoFrameState(int target) { // If this merge is the last one which kills a loop merge, remove that // merge state. if (merge_states_[target]->is_unreachable_loop()) { - if (FLAG_trace_maglev_graph_building) { + if (v8_flags.trace_maglev_graph_building) { std::cout << "! Killing loop merge state at @" << target << std::endl; } merge_states_[target] = nullptr; @@ -2705,83 +2970,107 @@ void MaglevGraphBuilder::MergeIntoInlinedReturnFrameState( } void MaglevGraphBuilder::BuildBranchIfRootConstant(ValueNode* node, - int true_target, - int false_target, + JumpType jump_type, RootIndex root_index) { + int fallthrough_offset = next_offset(); + int jump_offset = iterator_.GetJumpTargetOffset(); + BasicBlockRef* true_target = jump_type == kJumpIfTrue + ? &jump_targets_[jump_offset] + : &jump_targets_[fallthrough_offset]; + BasicBlockRef* false_target = jump_type == kJumpIfFalse + ? &jump_targets_[jump_offset] + : &jump_targets_[fallthrough_offset]; BasicBlock* block = FinishBlock<BranchIfRootConstant>( - next_offset(), {node}, &jump_targets_[true_target], - &jump_targets_[false_target], root_index); - MergeIntoFrameState(block, iterator_.GetJumpTargetOffset()); + {node}, true_target, false_target, root_index); + if (jump_type == kJumpIfTrue) { + block->control_node() + ->Cast<BranchControlNode>() + ->set_true_interrupt_correction( + iterator_.GetRelativeJumpTargetOffset()); + } else { + block->control_node() + ->Cast<BranchControlNode>() + ->set_false_interrupt_correction( + iterator_.GetRelativeJumpTargetOffset()); + } + MergeIntoFrameState(block, jump_offset); + StartFallthroughBlock(fallthrough_offset, block); } -void MaglevGraphBuilder::BuildBranchIfTrue(ValueNode* node, int true_target, - int false_target) { - BuildBranchIfRootConstant(node, true_target, false_target, - RootIndex::kTrueValue); +void MaglevGraphBuilder::BuildBranchIfTrue(ValueNode* node, + JumpType jump_type) { + BuildBranchIfRootConstant(node, jump_type, RootIndex::kTrueValue); } -void MaglevGraphBuilder::BuildBranchIfNull(ValueNode* node, int true_target, - int false_target) { - BuildBranchIfRootConstant(node, true_target, false_target, - RootIndex::kNullValue); +void MaglevGraphBuilder::BuildBranchIfNull(ValueNode* node, + JumpType jump_type) { + BuildBranchIfRootConstant(node, jump_type, RootIndex::kNullValue); } void MaglevGraphBuilder::BuildBranchIfUndefined(ValueNode* node, - int true_target, - int false_target) { - BuildBranchIfRootConstant(node, true_target, false_target, - RootIndex::kUndefinedValue); + JumpType jump_type) { + BuildBranchIfRootConstant(node, jump_type, RootIndex::kUndefinedValue); } void MaglevGraphBuilder::BuildBranchIfToBooleanTrue(ValueNode* node, - int true_target, - int false_target) { - BasicBlock* block = FinishBlock<BranchIfToBooleanTrue>( - next_offset(), {node}, &jump_targets_[true_target], - &jump_targets_[false_target]); - MergeIntoFrameState(block, iterator_.GetJumpTargetOffset()); + JumpType jump_type) { + int fallthrough_offset = next_offset(); + int jump_offset = iterator_.GetJumpTargetOffset(); + BasicBlockRef* true_target = jump_type == kJumpIfTrue + ? &jump_targets_[jump_offset] + : &jump_targets_[fallthrough_offset]; + BasicBlockRef* false_target = jump_type == kJumpIfFalse + ? &jump_targets_[jump_offset] + : &jump_targets_[fallthrough_offset]; + BasicBlock* block = + FinishBlock<BranchIfToBooleanTrue>({node}, true_target, false_target); + if (jump_type == kJumpIfTrue) { + block->control_node() + ->Cast<BranchControlNode>() + ->set_true_interrupt_correction( + iterator_.GetRelativeJumpTargetOffset()); + } else { + block->control_node() + ->Cast<BranchControlNode>() + ->set_false_interrupt_correction( + iterator_.GetRelativeJumpTargetOffset()); + } + MergeIntoFrameState(block, jump_offset); + StartFallthroughBlock(fallthrough_offset, block); } void MaglevGraphBuilder::VisitJumpIfToBooleanTrue() { - BuildBranchIfToBooleanTrue(GetAccumulatorTagged(), - iterator_.GetJumpTargetOffset(), next_offset()); + BuildBranchIfToBooleanTrue(GetAccumulatorTagged(), kJumpIfTrue); } void MaglevGraphBuilder::VisitJumpIfToBooleanFalse() { - BuildBranchIfToBooleanTrue(GetAccumulatorTagged(), next_offset(), - iterator_.GetJumpTargetOffset()); + BuildBranchIfToBooleanTrue(GetAccumulatorTagged(), kJumpIfFalse); } void MaglevGraphBuilder::VisitJumpIfTrue() { - BuildBranchIfTrue(GetAccumulatorTagged(), iterator_.GetJumpTargetOffset(), - next_offset()); + BuildBranchIfTrue(GetAccumulatorTagged(), kJumpIfTrue); } void MaglevGraphBuilder::VisitJumpIfFalse() { - BuildBranchIfTrue(GetAccumulatorTagged(), next_offset(), - iterator_.GetJumpTargetOffset()); + BuildBranchIfTrue(GetAccumulatorTagged(), kJumpIfFalse); } void MaglevGraphBuilder::VisitJumpIfNull() { - BuildBranchIfNull(GetAccumulatorTagged(), iterator_.GetJumpTargetOffset(), - next_offset()); + BuildBranchIfNull(GetAccumulatorTagged(), kJumpIfTrue); } void MaglevGraphBuilder::VisitJumpIfNotNull() { - BuildBranchIfNull(GetAccumulatorTagged(), next_offset(), - iterator_.GetJumpTargetOffset()); + BuildBranchIfNull(GetAccumulatorTagged(), kJumpIfFalse); } void MaglevGraphBuilder::VisitJumpIfUndefined() { - BuildBranchIfUndefined(GetAccumulatorTagged(), - iterator_.GetJumpTargetOffset(), next_offset()); + BuildBranchIfUndefined(GetAccumulatorTagged(), kJumpIfTrue); } void MaglevGraphBuilder::VisitJumpIfNotUndefined() { - BuildBranchIfUndefined(GetAccumulatorTagged(), next_offset(), - iterator_.GetJumpTargetOffset()); + BuildBranchIfUndefined(GetAccumulatorTagged(), kJumpIfFalse); } void MaglevGraphBuilder::VisitJumpIfUndefinedOrNull() { BasicBlock* block = FinishBlock<BranchIfUndefinedOrNull>( - next_offset(), {GetAccumulatorTagged()}, - &jump_targets_[iterator_.GetJumpTargetOffset()], + {GetAccumulatorTagged()}, &jump_targets_[iterator_.GetJumpTargetOffset()], &jump_targets_[next_offset()]); MergeIntoFrameState(block, iterator_.GetJumpTargetOffset()); + StartFallthroughBlock(next_offset(), block); } void MaglevGraphBuilder::VisitJumpIfJSReceiver() { BasicBlock* block = FinishBlock<BranchIfJSReceiver>( - next_offset(), {GetAccumulatorTagged()}, - &jump_targets_[iterator_.GetJumpTargetOffset()], + {GetAccumulatorTagged()}, &jump_targets_[iterator_.GetJumpTargetOffset()], &jump_targets_[next_offset()]); MergeIntoFrameState(block, iterator_.GetJumpTargetOffset()); + StartFallthroughBlock(next_offset(), block); } void MaglevGraphBuilder::VisitSwitchOnSmiNoFeedback() { @@ -2800,11 +3089,12 @@ void MaglevGraphBuilder::VisitSwitchOnSmiNoFeedback() { ValueNode* case_value = GetAccumulatorInt32(); BasicBlock* block = - FinishBlock<Switch>(next_offset(), {case_value}, case_value_base, targets, + FinishBlock<Switch>({case_value}, case_value_base, targets, offsets.size(), &jump_targets_[next_offset()]); for (interpreter::JumpTableTargetOffset offset : offsets) { MergeIntoFrameState(block, offset.target_offset); } + StartFallthroughBlock(next_offset(), block); } void MaglevGraphBuilder::VisitForInEnumerate() { @@ -2889,7 +3179,7 @@ void MaglevGraphBuilder::VisitReturn() { } if (!is_inline()) { - FinishBlock<Return>(next_offset(), {GetAccumulatorTagged()}); + FinishBlock<Return>({GetAccumulatorTagged()}); return; } @@ -2898,8 +3188,8 @@ void MaglevGraphBuilder::VisitReturn() { // execution of the caller. // TODO(leszeks): Consider shortcutting this Jump for cases where there is // only one return and no need to merge return states. - BasicBlock* block = FinishBlock<Jump>(next_offset(), {}, - &jump_targets_[inline_exit_offset()]); + BasicBlock* block = + FinishBlock<Jump>({}, &jump_targets_[inline_exit_offset()]); MergeIntoInlinedReturnFrameState(block); } @@ -2955,22 +3245,26 @@ void MaglevGraphBuilder::VisitThrowIfNotSuperConstructor() { void MaglevGraphBuilder::VisitSwitchOnGeneratorState() { // SwitchOnGeneratorState <generator> <table_start> <table_length> // It should be the first bytecode in the bytecode array. - DCHECK_EQ(block_offset_, 0); - int generator_prologue_block_offset = block_offset_ + 1; + DCHECK_EQ(iterator_.current_offset(), 0); + int generator_prologue_block_offset = 1; DCHECK_LT(generator_prologue_block_offset, next_offset()); + interpreter::JumpTableTargetOffsets offsets = + iterator_.GetJumpTableTargetOffsets(); + // If there are no jump offsets, then this generator is not resumable, which + // means we can skip checking for it and switching on its state. + if (offsets.size() == 0) return; + // We create an initial block that checks if the generator is undefined. ValueNode* maybe_generator = LoadRegisterTagged(0); - BasicBlock* block_is_generator_undefined = CreateBlock<BranchIfRootConstant>( + BasicBlock* block_is_generator_undefined = FinishBlock<BranchIfRootConstant>( {maybe_generator}, &jump_targets_[next_offset()], &jump_targets_[generator_prologue_block_offset], RootIndex::kUndefinedValue); MergeIntoFrameState(block_is_generator_undefined, next_offset()); - ResolveJumpsToBlockAtOffset(block_is_generator_undefined, block_offset_); // We create the generator prologue block. StartNewBlock(generator_prologue_block_offset); - DCHECK_EQ(generator_prologue_block_offset, block_offset_); // Generator prologue. ValueNode* generator = maybe_generator; @@ -2988,9 +3282,6 @@ void MaglevGraphBuilder::VisitSwitchOnGeneratorState() { interpreter::Register::virtual_accumulator()); // Switch on generator state. - interpreter::JumpTableTargetOffsets offsets = - iterator_.GetJumpTableTargetOffsets(); - DCHECK_NE(offsets.size(), 0); int case_value_base = (*offsets.begin()).case_value; BasicBlockRef* targets = zone()->NewArray<BasicBlockRef>(offsets.size()); for (interpreter::JumpTableTargetOffset offset : offsets) { @@ -2998,12 +3289,11 @@ void MaglevGraphBuilder::VisitSwitchOnGeneratorState() { new (ref) BasicBlockRef(&jump_targets_[offset.target_offset]); } ValueNode* case_value = AddNewNode<CheckedSmiUntag>({state}); - BasicBlock* generator_prologue_block = CreateBlock<Switch>( + BasicBlock* generator_prologue_block = FinishBlock<Switch>( {case_value}, case_value_base, targets, offsets.size()); for (interpreter::JumpTableTargetOffset offset : offsets) { MergeIntoFrameState(generator_prologue_block, offset.target_offset); } - ResolveJumpsToBlockAtOffset(generator_prologue_block, block_offset_); } void MaglevGraphBuilder::VisitSuspendGenerator() { @@ -3035,7 +3325,7 @@ void MaglevGraphBuilder::VisitSuspendGenerator() { if (relative_jump_bytecode_offset > 0) { AddNewNode<ReduceInterruptBudget>({}, relative_jump_bytecode_offset); } - FinishBlock<Return>(next_offset(), {GetAccumulatorTagged()}); + FinishBlock<Return>({GetAccumulatorTagged()}); } void MaglevGraphBuilder::VisitResumeGenerator() { @@ -3045,7 +3335,7 @@ void MaglevGraphBuilder::VisitResumeGenerator() { {generator}, JSGeneratorObject::kParametersAndRegistersOffset); interpreter::RegisterList registers = iterator_.GetRegisterListOperand(1); - if (FLAG_maglev_assert) { + if (v8_flags.maglev_assert) { // Check if register count is invalid, that is, larger than the // register file length. ValueNode* array_length_smi = @@ -3104,6 +3394,4 @@ DEBUG_BREAK_BYTECODE_LIST(DEBUG_BREAK) #undef DEBUG_BREAK void MaglevGraphBuilder::VisitIllegal() { UNREACHABLE(); } -} // namespace maglev -} // namespace internal -} // namespace v8 +} // namespace v8::internal::maglev diff --git a/deps/v8/src/maglev/maglev-graph-builder.h b/deps/v8/src/maglev/maglev-graph-builder.h index 93634d79c33104..621a23b015e1ad 100644 --- a/deps/v8/src/maglev/maglev-graph-builder.h +++ b/deps/v8/src/maglev/maglev-graph-builder.h @@ -16,7 +16,9 @@ #include "src/compiler/bytecode-liveness-map.h" #include "src/compiler/heap-refs.h" #include "src/compiler/js-heap-broker.h" +#include "src/compiler/processed-feedback.h" #include "src/deoptimizer/deoptimize-reason.h" +#include "src/flags/flags.h" #include "src/interpreter/bytecode-array-iterator.h" #include "src/interpreter/bytecode-decoder.h" #include "src/interpreter/bytecode-register.h" @@ -73,11 +75,14 @@ class MaglevGraphBuilder { Graph* graph() const { return graph_; } private: - BasicBlock* CreateEmptyBlock(int offset, BasicBlock* predecessor) { + BasicBlock* CreateEmptyBlock(int offset) { + if (v8_flags.trace_maglev_graph_building) { + std::cout << "== New empty block ==" << std::endl; + } DCHECK_NULL(current_block_); current_block_ = zone()->New<BasicBlock>(nullptr); - BasicBlock* result = CreateBlock<Jump>({}, &jump_targets_[offset]); - result->set_empty_block_predecessor(predecessor); + BasicBlock* result = FinishBlock<Jump>({}, &jump_targets_[offset]); + result->set_empty_block(); return result; } @@ -96,7 +101,7 @@ class MaglevGraphBuilder { if (has_graph_labeller()) { for (Phi* phi : *merge_states_[offset]->phis()) { graph_labeller()->RegisterNode(phi); - if (FLAG_trace_maglev_graph_building) { + if (v8_flags.trace_maglev_graph_building) { std::cout << " " << phi << " " << PrintNodeLabel(graph_labeller(), phi) << ": " << PrintNode(graph_labeller(), phi) << std::endl; @@ -131,7 +136,7 @@ class MaglevGraphBuilder { ControlNode* control = predecessor->control_node(); if (control->Is<ConditionalControlNode>()) { // CreateEmptyBlock automatically registers itself with the offset. - predecessor = CreateEmptyBlock(offset, predecessor); + predecessor = CreateEmptyBlock(offset); // Set the old predecessor's (the conditional block) reference to // point to the new empty predecessor block. old_jump_targets = @@ -147,7 +152,7 @@ class MaglevGraphBuilder { if (has_graph_labeller()) { for (Phi* phi : *merge_states_[offset]->phis()) { graph_labeller()->RegisterNode(phi); - if (FLAG_trace_maglev_graph_building) { + if (v8_flags.trace_maglev_graph_building) { std::cout << " " << phi << " " << PrintNodeLabel(graph_labeller(), phi) << ": " << PrintNode(graph_labeller(), phi) << std::endl; @@ -166,15 +171,13 @@ class MaglevGraphBuilder { void EmitUnconditionalDeopt(DeoptimizeReason reason) { // Create a block rather than calling finish, since we don't yet know the // next block's offset before the loop skipping the rest of the bytecodes. - BasicBlock* block = CreateBlock<Deopt>({}, reason); - ResolveJumpsToBlockAtOffset(block, block_offset_); - + FinishBlock<Deopt>({}, reason); MarkBytecodeDead(); } void MarkBytecodeDead() { DCHECK_NULL(current_block_); - if (FLAG_trace_maglev_graph_building) { + if (v8_flags.trace_maglev_graph_building) { std::cout << "== Dead ==\n" << std::setw(4) << iterator_.current_offset() << " : "; interpreter::BytecodeDecoder::Decode(std::cout, @@ -207,31 +210,6 @@ class MaglevGraphBuilder { // Any other bytecode that doesn't return or throw will merge into the // fallthrough. MergeDeadIntoFrameState(iterator_.next_offset()); - } else if (bytecode == interpreter::Bytecode::kSuspendGenerator) { - // Extra special case for SuspendGenerator, if the suspend is dead then - // the resume has to be dead too. However, the resume already has a merge - // state, with exactly one predecessor (the generator switch), so it will - // be revived along the standard path. This can cause havoc if e.g. the - // suspend/resume are inside a dead loop, because the JumpLoop can become - // live again. - // - // So, manually advance the iterator to the resume, go through the motions - // of processing the merge state, but immediately emit an unconditional - // deopt (which also kills the resume). - iterator_.Advance(); - DCHECK_EQ(iterator_.current_bytecode(), - interpreter::Bytecode::kResumeGenerator); - int resume_offset = iterator_.current_offset(); - DCHECK_EQ(NumPredecessors(resume_offset), 1); - ProcessMergePoint(resume_offset); - StartNewBlock(resume_offset); - // TODO(v8:7700): This approach is not ideal. We can create a deopt-reopt - // loop: the interpreted code runs, creates a generator while feedback is - // still not yet allocated, then suspends the generator, tiers up to - // maglev, and reaches this deopt. We then deopt, but since the generator - // is never created again, we re-opt without the suspend part and we loop! - EmitUnconditionalDeopt(DeoptimizeReason::kSuspendGeneratorIsDead); - return; } // TODO(leszeks): We could now continue iterating the bytecode @@ -245,12 +223,11 @@ class MaglevGraphBuilder { // TODO(leszeks): Re-evaluate this DCHECK, we might hit it if the only // bytecodes in this basic block were only register juggling. // DCHECK(!current_block_->nodes().is_empty()); - FinishBlock<Jump>(offset, {}, &jump_targets_[offset]); - + BasicBlock* predecessor = FinishBlock<Jump>({}, &jump_targets_[offset]); merge_state->Merge(*compilation_unit_, current_interpreter_frame_, - graph()->last_block(), offset); + predecessor, offset); } - if (FLAG_trace_maglev_graph_building) { + if (v8_flags.trace_maglev_graph_building) { auto detail = merge_state->is_exception_handler() ? "exception handler" : merge_state->is_loop() ? "loop header" : "merge"; @@ -316,7 +293,7 @@ class MaglevGraphBuilder { } DCHECK_NOT_NULL(current_block_); - if (FLAG_trace_maglev_graph_building) { + if (v8_flags.trace_maglev_graph_building) { std::cout << std::setw(4) << iterator_.current_offset() << " : "; interpreter::BytecodeDecoder::Decode(std::cout, iterator_.current_address()); @@ -352,7 +329,7 @@ class MaglevGraphBuilder { } current_block_->nodes().Add(node); if (has_graph_labeller()) graph_labeller()->RegisterNode(node); - if (FLAG_trace_maglev_graph_building) { + if (v8_flags.trace_maglev_graph_building) { std::cout << " " << node << " " << PrintNodeLabel(graph_labeller(), node) << ": " << PrintNode(graph_labeller(), node) << std::endl; @@ -408,6 +385,13 @@ class MaglevGraphBuilder { return node; } + enum ContextSlotMutability { kImmutable, kMutable }; + bool TrySpecializeLoadContextSlotToFunctionContext( + ValueNode** context, size_t* depth, int slot_index, + ContextSlotMutability slot_mutability); + void BuildLoadContextSlot(ValueNode* context, size_t depth, int slot_index, + ContextSlotMutability slot_mutability); + template <Builtin kBuiltin> CallBuiltin* BuildCallBuiltin(std::initializer_list<ValueNode*> inputs) { using Descriptor = typename CallInterfaceDescriptorFor<kBuiltin>::type; @@ -466,8 +450,7 @@ class MaglevGraphBuilder { void BuildAbort(AbortReason reason) { // Create a block rather than calling finish, since we don't yet know the // next block's offset before the loop skipping the rest of the bytecodes. - BasicBlock* block = CreateBlock<Abort>({}, reason); - ResolveJumpsToBlockAtOffset(block, block_offset_); + FinishBlock<Abort>({}, reason); MarkBytecodeDead(); } @@ -621,78 +604,52 @@ class MaglevGraphBuilder { current_interpreter_frame_.set(dst, current_interpreter_frame_.get(src)); } - template <typename NodeT> - ValueNode* AddNewConversionNode(interpreter::Register reg, ValueNode* node) { - // TODO(v8:7700): Use a canonical conversion node. Maybe like in Phi nodes - // where we always add a the conversion immediately after the ValueNode. - DCHECK(NodeT::kProperties.is_conversion()); - ValueNode* result = AddNewNode<NodeT>({node}); - current_interpreter_frame_.set(reg, result); - return result; - } - - ValueNode* GetTaggedValueHelper(interpreter::Register reg, ValueNode* value) { - // TODO(victorgomes): Consider adding the representation in the - // InterpreterFrameState, so that we don't need to derefence a node. + ValueNode* GetTaggedValue(interpreter::Register reg) { + ValueNode* value = current_interpreter_frame_.get(reg); switch (value->properties().value_representation()) { case ValueRepresentation::kTagged: return value; case ValueRepresentation::kInt32: { - if (value->Is<CheckedSmiUntag>()) { - return value->input(0).node(); + NodeInfo* node_info = known_node_aspects().GetOrCreateInfoFor(value); + if (node_info->tagged_alternative == nullptr) { + node_info->tagged_alternative = AddNewNode<CheckedSmiTag>({value}); } - return AddNewConversionNode<CheckedSmiTag>(reg, value); + return node_info->tagged_alternative; } case ValueRepresentation::kFloat64: { - if (value->Is<CheckedFloat64Unbox>()) { - return value->input(0).node(); + NodeInfo* node_info = known_node_aspects().GetOrCreateInfoFor(value); + if (node_info->tagged_alternative == nullptr) { + node_info->tagged_alternative = AddNewNode<Float64Box>({value}); } - if (value->Is<ChangeInt32ToFloat64>()) { - ValueNode* int32_value = value->input(0).node(); - return GetTaggedValueHelper(reg, int32_value); - } - return AddNewConversionNode<Float64Box>(reg, value); + return node_info->tagged_alternative; } } UNREACHABLE(); } - ValueNode* GetTaggedValue(interpreter::Register reg) { - ValueNode* value = current_interpreter_frame_.get(reg); - return GetTaggedValueHelper(reg, value); - } - - template <typename ConversionNodeT> - ValueNode* GetValue(interpreter::Register reg) { - ValueNode* value = current_interpreter_frame_.get(reg); - return AddNewConversionNode<ConversionNodeT>(reg, value); - } - ValueNode* GetInt32(interpreter::Register reg) { ValueNode* value = current_interpreter_frame_.get(reg); switch (value->properties().value_representation()) { case ValueRepresentation::kTagged: { - if (value->Is<CheckedSmiTag>()) { - return value->input(0).node(); - } else if (SmiConstant* constant = value->TryCast<SmiConstant>()) { + if (SmiConstant* constant = value->TryCast<SmiConstant>()) { return GetInt32Constant(constant->value().value()); } - return AddNewConversionNode<CheckedSmiUntag>(reg, value); + NodeInfo* node_info = known_node_aspects().GetOrCreateInfoFor(value); + if (node_info->int32_alternative == nullptr) { + node_info->int32_alternative = AddNewNode<CheckedSmiUntag>({value}); + } + return node_info->int32_alternative; } case ValueRepresentation::kInt32: return value; - case ValueRepresentation::kFloat64: - // We should not be able to request an Int32 from a Float64 input, - // unless it's an unboxing of a tagged value or a conversion from int32. - if (value->Is<CheckedFloat64Unbox>()) { - // TODO(leszeks): Maybe convert the CheckedFloat64Unbox to - // ChangeInt32ToFloat64 with this CheckedSmiUntag as the input. - return AddNewConversionNode<CheckedSmiUntag>(reg, - value->input(0).node()); - } else if (value->Is<ChangeInt32ToFloat64>()) { - return value->input(0).node(); + case ValueRepresentation::kFloat64: { + NodeInfo* node_info = known_node_aspects().GetOrCreateInfoFor(value); + if (node_info->int32_alternative == nullptr) { + node_info->int32_alternative = + AddNewNode<CheckedTruncateFloat64ToInt32>({value}); } - UNREACHABLE(); + return node_info->int32_alternative; + } } UNREACHABLE(); } @@ -701,25 +658,27 @@ class MaglevGraphBuilder { ValueNode* value = current_interpreter_frame_.get(reg); switch (value->properties().value_representation()) { case ValueRepresentation::kTagged: { - if (value->Is<Float64Box>()) { - return value->input(0).node(); + NodeInfo* node_info = known_node_aspects().GetOrCreateInfoFor(value); + if (node_info->float64_alternative == nullptr) { + node_info->float64_alternative = + AddNewNode<CheckedFloat64Unbox>({value}); } - return AddNewConversionNode<CheckedFloat64Unbox>(reg, value); + return node_info->float64_alternative; + } + case ValueRepresentation::kInt32: { + NodeInfo* node_info = known_node_aspects().GetOrCreateInfoFor(value); + if (node_info->float64_alternative == nullptr) { + node_info->float64_alternative = + AddNewNode<ChangeInt32ToFloat64>({value}); + } + return node_info->float64_alternative; } - case ValueRepresentation::kInt32: - return AddNewConversionNode<ChangeInt32ToFloat64>(reg, value); case ValueRepresentation::kFloat64: return value; } UNREACHABLE(); } - template <typename ConversionNodeT> - ValueNode* GetAccumulator() { - return GetValue<ConversionNodeT>( - interpreter::Register::virtual_accumulator()); - } - ValueNode* GetAccumulatorTagged() { return GetTaggedValue(interpreter::Register::virtual_accumulator()); } @@ -738,12 +697,6 @@ class MaglevGraphBuilder { current_interpreter_frame_.accumulator(); } - template <typename ConversionNodeT> - ValueNode* LoadRegister(int operand_index) { - return GetValue<ConversionNodeT>( - iterator_.GetRegisterOperand(operand_index)); - } - ValueNode* LoadRegisterTagged(int operand_index) { return GetTaggedValue(iterator_.GetRegisterOperand(operand_index)); } @@ -776,6 +729,8 @@ class MaglevGraphBuilder { // would be emitted between these two nodes. if (result->opcode() == Opcode::kCallRuntime) { DCHECK_EQ(result->Cast<CallRuntime>()->ReturnCount(), 2); + } else if (result->opcode() == Opcode::kCallBuiltin) { + DCHECK_EQ(result->Cast<CallBuiltin>()->ReturnCount(), 2); } else { DCHECK_EQ(result->opcode(), Opcode::kForInPrepare); } @@ -886,11 +841,11 @@ class MaglevGraphBuilder { void StartNewBlock(int offset) { DCHECK_NULL(current_block_); current_block_ = zone()->New<BasicBlock>(merge_states_[offset]); - block_offset_ = offset; + ResolveJumpsToBlockAtOffset(current_block_, offset); } template <typename ControlNodeT, typename... Args> - BasicBlock* CreateBlock(std::initializer_list<ValueNode*> control_inputs, + BasicBlock* FinishBlock(std::initializer_list<ValueNode*> control_inputs, Args&&... args) { ControlNode* control_node = CreateNewNode<ControlNodeT>( control_inputs, std::forward<Args>(args)...); @@ -902,7 +857,7 @@ class MaglevGraphBuilder { graph()->Add(block); if (has_graph_labeller()) { graph_labeller()->RegisterBasicBlock(block); - if (FLAG_trace_maglev_graph_building) { + if (v8_flags.trace_maglev_graph_building) { bool kSkipTargets = true; std::cout << " " << control_node << " " << PrintNodeLabel(graph_labeller(), control_node) << ": " @@ -915,40 +870,40 @@ class MaglevGraphBuilder { // Update all jumps which were targetting the not-yet-created block at the // given `block_offset`, to now point to the given `block`. - void ResolveJumpsToBlockAtOffset(BasicBlock* block, int block_offset) const { + void ResolveJumpsToBlockAtOffset(BasicBlock* block, int block_offset) { + int interrupt_budget_correction = 0; BasicBlockRef* jump_target_refs_head = jump_targets_[block_offset].SetToBlockAndReturnNext(block); while (jump_target_refs_head != nullptr) { + // Only one jump target should ever set the interrupt budget correction. + DCHECK_EQ(interrupt_budget_correction, 0); + interrupt_budget_correction = + jump_target_refs_head->interrupt_budget_correction(); jump_target_refs_head = jump_target_refs_head->SetToBlockAndReturnNext(block); } + if (interrupt_budget_correction != 0) { + DCHECK_GT(interrupt_budget_correction, 0); + AddNewNode<IncreaseInterruptBudget>({}, interrupt_budget_correction); + } DCHECK_EQ(jump_targets_[block_offset].block_ptr(), block); } - template <typename ControlNodeT, typename... Args> - BasicBlock* FinishBlock(int next_block_offset, - std::initializer_list<ValueNode*> control_inputs, - Args&&... args) { - BasicBlock* block = - CreateBlock<ControlNodeT>(control_inputs, std::forward<Args>(args)...); - ResolveJumpsToBlockAtOffset(block, block_offset_); - + void StartFallthroughBlock(int next_block_offset, BasicBlock* predecessor) { // Start a new block for the fallthrough path, unless it's a merge point, in // which case we merge our state into it. That merge-point could also be a // loop header, in which case the merge state might not exist yet (if the // only predecessors are this path and the JumpLoop). DCHECK_NULL(current_block_); - if (std::is_base_of<ConditionalControlNode, ControlNodeT>::value) { - if (NumPredecessors(next_block_offset) == 1) { - if (FLAG_trace_maglev_graph_building) { - std::cout << "== New block (single fallthrough) ==" << std::endl; - } - StartNewBlock(next_block_offset); - } else { - MergeIntoFrameState(block, next_block_offset); + + if (NumPredecessors(next_block_offset) == 1) { + if (v8_flags.trace_maglev_graph_building) { + std::cout << "== New block (single fallthrough) ==" << std::endl; } + StartNewBlock(next_block_offset); + } else { + MergeIntoFrameState(predecessor, next_block_offset); } - return block; } void InlineCallFromRegisters(int argc_count, @@ -968,32 +923,33 @@ class MaglevGraphBuilder { void BuildCheckSymbol(ValueNode* object); void BuildMapCheck(ValueNode* object, const compiler::MapRef& map); - bool TryBuildMonomorphicLoad(ValueNode* receiver, - ValueNode* lookup_start_object, - const compiler::MapRef& map, - MaybeObjectHandle handler); - bool TryBuildMonomorphicLoadFromSmiHandler(ValueNode* receiver, - ValueNode* lookup_start_object, - const compiler::MapRef& map, - int32_t handler); - bool TryBuildMonomorphicLoadFromLoadHandler(ValueNode* receiver, - ValueNode* lookup_start_object, - const compiler::MapRef& map, - LoadHandler handler); - - bool TryBuildMonomorphicElementLoad(ValueNode* object, ValueNode* index, - const compiler::MapRef& map, - MaybeObjectHandle handler); - bool TryBuildMonomorphicElementLoadFromSmiHandler(ValueNode* object, - ValueNode* index, - const compiler::MapRef& map, - int32_t handler); - - bool TryBuildMonomorphicStore(ValueNode* object, const compiler::MapRef& map, - MaybeObjectHandle handler); - bool TryBuildMonomorphicStoreFromSmiHandler(ValueNode* object, - const compiler::MapRef& map, - int32_t handler); + bool TryFoldLoadDictPrototypeConstant( + compiler::PropertyAccessInfo access_info); + bool TryFoldLoadConstantDataField(compiler::PropertyAccessInfo access_info); + + void BuildLoadField(compiler::PropertyAccessInfo access_info, + ValueNode* lookup_start_object); + bool TryBuildStoreField(compiler::PropertyAccessInfo access_info, + ValueNode* receiver); + bool TryBuildPropertyGetterCall(compiler::PropertyAccessInfo access_info, + ValueNode* receiver); + bool TryBuildPropertySetterCall(compiler::PropertyAccessInfo access_info, + ValueNode* receiver, ValueNode* value); + + bool TryBuildPropertyLoad(ValueNode* receiver, ValueNode* lookup_start_object, + compiler::PropertyAccessInfo const& access_info); + bool TryBuildPropertyStore(ValueNode* receiver, + compiler::PropertyAccessInfo const& access_info); + bool TryBuildPropertyAccess(ValueNode* receiver, + ValueNode* lookup_start_object, + compiler::PropertyAccessInfo const& access_info, + compiler::AccessMode access_mode); + + bool TryBuildNamedAccess(ValueNode* receiver, ValueNode* lookup_start_object, + compiler::NamedAccessFeedback const& feedback, + compiler::AccessMode access_mode); + bool TryBuildElementAccess(ValueNode* object, ValueNode* index, + compiler::ElementAccessFeedback const& feedback); template <Operation kOperation> void BuildGenericUnaryOperationNode(); @@ -1035,14 +991,14 @@ class MaglevGraphBuilder { void MergeDeadIntoFrameState(int target); void MergeDeadLoopIntoFrameState(int target); void MergeIntoInlinedReturnFrameState(BasicBlock* block); - void BuildBranchIfRootConstant(ValueNode* node, int true_target, - int false_target, RootIndex root_index); - void BuildBranchIfTrue(ValueNode* node, int true_target, int false_target); - void BuildBranchIfNull(ValueNode* node, int true_target, int false_target); - void BuildBranchIfUndefined(ValueNode* node, int true_target, - int false_target); - void BuildBranchIfToBooleanTrue(ValueNode* node, int true_target, - int false_target); + + enum JumpType { kJumpIfTrue, kJumpIfFalse }; + void BuildBranchIfRootConstant(ValueNode* node, JumpType jump_type, + RootIndex root_index); + void BuildBranchIfTrue(ValueNode* node, JumpType jump_type); + void BuildBranchIfNull(ValueNode* node, JumpType jump_type); + void BuildBranchIfUndefined(ValueNode* node, JumpType jump_type); + void BuildBranchIfToBooleanTrue(ValueNode* node, JumpType jump_type); void BuildToNumberOrToNumeric(Object::Conversion mode); @@ -1136,7 +1092,6 @@ class MaglevGraphBuilder { // Current block information. BasicBlock* current_block_ = nullptr; - int block_offset_ = 0; base::Optional<CheckpointedInterpreterState> latest_checkpointed_state_; BasicBlockRef* jump_targets_; diff --git a/deps/v8/src/maglev/maglev-graph-printer.cc b/deps/v8/src/maglev/maglev-graph-printer.cc index 3f4cec406c8326..02e809b73a018e 100644 --- a/deps/v8/src/maglev/maglev-graph-printer.cc +++ b/deps/v8/src/maglev/maglev-graph-printer.cc @@ -45,7 +45,7 @@ void PrintPaddedId(std::ostream& os, MaglevGraphLabeller* graph_labeller, for (int i = 0; i < padding_width; ++i) { os << padding; } - if (FLAG_log_colour) os << "\033[0m"; + if (v8_flags.log_colour) os << "\033[0m"; if (node->has_id()) { os << node->id() << "/"; } @@ -158,7 +158,7 @@ void PrintVerticalArrows(std::ostream& os, desired_color = (i % 6) + 1; c.AddVertical(); } - if (FLAG_log_colour && desired_color != current_color && + if (v8_flags.log_colour && desired_color != current_color && desired_color != -1) { os << "\033[0;3" << desired_color << "m"; current_color = desired_color; @@ -167,7 +167,7 @@ void PrintVerticalArrows(std::ostream& os, } // If there are no arrows starting here, clear the color. Otherwise, // PrintPaddedId will clear it. - if (FLAG_log_colour && arrows_starting_here.empty() && + if (v8_flags.log_colour && arrows_starting_here.empty() && targets_starting_here.empty()) { os << "\033[0m"; } @@ -342,7 +342,7 @@ void MaglevPrintingVisitor::PreProcessBasicBlock(BasicBlock* block) { desired_color = (i % 6) + 1; c.AddVertical(); } - if (FLAG_log_colour && current_color != desired_color && + if (v8_flags.log_colour && current_color != desired_color && desired_color != -1) { os_ << "\033[0;3" << desired_color << "m"; current_color = desired_color; @@ -350,7 +350,7 @@ void MaglevPrintingVisitor::PreProcessBasicBlock(BasicBlock* block) { os_ << c; } os_ << (saw_start ? "►" : " "); - if (FLAG_log_colour) os_ << "\033[0m"; + if (v8_flags.log_colour) os_ << "\033[0m"; } int block_id = graph_labeller_->BlockId(block); @@ -429,8 +429,8 @@ void PrintLazyDeopt(std::ostream& os, std::vector<BasicBlock*> targets, } else { os << PrintNodeLabel(graph_labeller, node) << ":" << deopt_info->input_locations[index].operand(); + index++; } - index++; }); os << "}\n"; } diff --git a/deps/v8/src/maglev/maglev-graph-verifier.h b/deps/v8/src/maglev/maglev-graph-verifier.h index af7c716c79fde5..5675d53be56175 100644 --- a/deps/v8/src/maglev/maglev-graph-verifier.h +++ b/deps/v8/src/maglev/maglev-graph-verifier.h @@ -86,6 +86,7 @@ class MaglevGraphVerifier { case Opcode::kCreateObjectLiteral: case Opcode::kCreateShallowObjectLiteral: case Opcode::kCreateRegExpLiteral: + case Opcode::kDebugBreak: case Opcode::kDeopt: case Opcode::kFloat64Constant: case Opcode::kGapMove: @@ -122,6 +123,7 @@ class MaglevGraphVerifier { case Opcode::kCheckString: case Opcode::kCheckSymbol: case Opcode::kCheckedInternalizedString: + case Opcode::kCheckedObjectToIndex: // TODO(victorgomes): Can we check that the input is Boolean? case Opcode::kBranchIfToBooleanTrue: case Opcode::kBranchIfRootConstant: @@ -135,6 +137,7 @@ class MaglevGraphVerifier { case Opcode::kGetTemplateObject: case Opcode::kLogicalNot: case Opcode::kSetPendingMessage: + case Opcode::kStringLength: case Opcode::kToBooleanLogicalNot: case Opcode::kTestUndetectable: case Opcode::kTestTypeOf: @@ -147,11 +150,13 @@ class MaglevGraphVerifier { break; case Opcode::kSwitch: case Opcode::kCheckedSmiTag: + case Opcode::kUnsafeSmiTag: case Opcode::kChangeInt32ToFloat64: DCHECK_EQ(node->input_count(), 1); CheckValueInputIs(node, 0, ValueRepresentation::kInt32); break; case Opcode::kFloat64Box: + case Opcode::kCheckedTruncateFloat64ToInt32: DCHECK_EQ(node->input_count(), 1); CheckValueInputIs(node, 0, ValueRepresentation::kFloat64); break; @@ -176,10 +181,6 @@ class MaglevGraphVerifier { case Opcode::kGenericLessThan: case Opcode::kGenericLessThanOrEqual: case Opcode::kGenericStrictEqual: - case Opcode::kCheckJSArrayBounds: - case Opcode::kCheckJSObjectElementsBounds: - case Opcode::kLoadTaggedElement: - case Opcode::kLoadDoubleElement: case Opcode::kGetIterator: case Opcode::kTaggedEqual: case Opcode::kTaggedNotEqual: @@ -275,6 +276,14 @@ class MaglevGraphVerifier { CheckValueInputIs(node, i, ValueRepresentation::kTagged); } break; + case Opcode::kCheckJSArrayBounds: + case Opcode::kCheckJSObjectElementsBounds: + case Opcode::kLoadTaggedElement: + case Opcode::kLoadDoubleElement: + DCHECK_EQ(node->input_count(), 2); + CheckValueInputIs(node, 0, ValueRepresentation::kTagged); + CheckValueInputIs(node, 1, ValueRepresentation::kInt32); + break; case Opcode::kCallBuiltin: { CallBuiltin* call_builtin = node->Cast<CallBuiltin>(); auto descriptor = diff --git a/deps/v8/src/maglev/maglev-interpreter-frame-state.h b/deps/v8/src/maglev/maglev-interpreter-frame-state.h index 8ddda35edd9b1b..2db6a4ee3e6c83 100644 --- a/deps/v8/src/maglev/maglev-interpreter-frame-state.h +++ b/deps/v8/src/maglev/maglev-interpreter-frame-state.h @@ -55,6 +55,11 @@ void DestructivelyIntersect(ZoneMap<ValueNode*, Value>& lhs_map, ++rhs_it; } } + // If we haven't reached the end of LHS by now, then we have reached the end + // of RHS, and the remaining items are therefore not in RHS. Remove them. + if (lhs_it != lhs_map.end()) { + lhs_map.erase(lhs_it, lhs_map.end()); + } } // The intersection (using `&`) of any two NodeTypes must be a valid NodeType @@ -72,34 +77,52 @@ enum class NodeType { kHeapObjectWithKnownMap = (1 << 5) | kAnyHeapObject, }; +inline bool NodeTypeIsSmi(NodeType type) { return type == NodeType::kSmi; } +inline bool NodeTypeIsAnyHeapObject(NodeType type) { + return static_cast<int>(type) & static_cast<int>(NodeType::kAnyHeapObject); +} +inline bool NodeTypeIsString(NodeType type) { + return type == NodeType::kString; +} +inline bool NodeTypeIsSymbol(NodeType type) { + return type == NodeType::kSymbol; +} + struct NodeInfo { - NodeType type; - // TODO(leszeks): Consider adding more info for nodes here, e.g. alternative - // representations or previously loaded fields. + NodeType type = NodeType::kUnknown; - static bool IsSmi(const NodeInfo* info) { - if (!info) return false; - return info->type == NodeType::kSmi; - } - static bool IsAnyHeapObject(const NodeInfo* info) { - if (!info) return false; - return static_cast<int>(info->type) & - static_cast<int>(NodeType::kAnyHeapObject); - } - static bool IsString(const NodeInfo* info) { - if (!info) return false; - return info->type == NodeType::kString; - } - static bool IsSymbol(const NodeInfo* info) { - if (!info) return false; - return info->type == NodeType::kSymbol; + // Optional alternative nodes with the equivalent value but a different + // representation. + // TODO(leszeks): At least one of these is redundant for every node, consider + // a more compressed form or even linked list. + ValueNode* tagged_alternative = nullptr; + ValueNode* int32_alternative = nullptr; + ValueNode* float64_alternative = nullptr; + + bool is_empty() { + return type == NodeType::kUnknown && tagged_alternative == nullptr && + int32_alternative == nullptr && float64_alternative == nullptr; } + bool is_smi() const { return NodeTypeIsSmi(type); } + bool is_any_heap_object() const { return NodeTypeIsAnyHeapObject(type); } + bool is_string() const { return NodeTypeIsString(type); } + bool is_symbol() const { return NodeTypeIsSymbol(type); } + // Mutate this node info by merging in another node info, with the result // being a node info that is the subset of information valid in both inputs. void MergeWith(const NodeInfo& other) { type = static_cast<NodeType>(static_cast<int>(type) & static_cast<int>(other.type)); + tagged_alternative = tagged_alternative == other.tagged_alternative + ? tagged_alternative + : nullptr; + int32_alternative = int32_alternative == other.int32_alternative + ? int32_alternative + : nullptr; + float64_alternative = float64_alternative == other.float64_alternative + ? float64_alternative + : nullptr; } }; @@ -131,28 +154,13 @@ struct KnownNodeAspects { return clone; } - NodeInfo* GetInfoFor(ValueNode* node) { - auto it = node_infos.find(node); - if (it == node_infos.end()) return nullptr; - return &it->second; - } - - void InsertOrUpdateNodeType(ValueNode* node, NodeInfo* existing_info, - NodeType new_type) { - if (existing_info == nullptr) { - DCHECK_EQ(node_infos.find(node), node_infos.end()); - node_infos.emplace(node, NodeInfo{new_type}); - } else { - DCHECK_EQ(&node_infos.find(node)->second, existing_info); - existing_info->type = new_type; - } - } + NodeInfo* GetOrCreateInfoFor(ValueNode* node) { return &node_infos[node]; } void Merge(const KnownNodeAspects& other) { DestructivelyIntersect(node_infos, other.node_infos, [](NodeInfo& lhs, const NodeInfo& rhs) { lhs.MergeWith(rhs); - return lhs.type != NodeType::kUnknown; + return !lhs.is_empty(); }); DestructivelyIntersect(stable_maps, other.stable_maps, [](compiler::MapRef lhs, compiler::MapRef rhs) { @@ -186,6 +194,8 @@ class InterpreterFrameState { const MergePointInterpreterFrameState& state); void set_accumulator(ValueNode* value) { + // Conversions should be stored in known_node_aspects/NodeInfo. + DCHECK(!value->properties().is_conversion()); frame_[interpreter::Register::virtual_accumulator()] = value; } ValueNode* accumulator() const { @@ -198,6 +208,8 @@ class InterpreterFrameState { reg == interpreter::Register::function_closure() || reg == interpreter::Register::virtual_accumulator() || reg.ToParameterIndex() >= 0); + // Conversions should be stored in known_node_aspects/NodeInfo. + DCHECK(!value->properties().is_conversion()); frame_[reg] = value; } ValueNode* get(interpreter::Register reg) const { @@ -444,11 +456,12 @@ class MergePointInterpreterFrameState { }); merge_state->predecessors_[0] = predecessor; merge_state->known_node_aspects_ = - info.zone()->New<KnownNodeAspects>(info.zone()); + state.known_node_aspects().Clone(info.zone()); return merge_state; } static MergePointInterpreterFrameState* NewForLoop( + const InterpreterFrameState& start_state, const MaglevCompilationUnit& info, int merge_offset, int predecessor_count, const compiler::BytecodeLivenessState* liveness, const compiler::LoopInfo* loop_info) { @@ -457,6 +470,11 @@ class MergePointInterpreterFrameState { info, predecessor_count, 0, info.zone()->NewArray<BasicBlock*>(predecessor_count), BasicBlockType::kLoopHeader, liveness); + if (loop_info->resumable()) { + state->known_node_aspects_ = + info.zone()->New<KnownNodeAspects>(info.zone()); + state->is_resumable_loop_ = true; + } auto& assignments = loop_info->assignments(); auto& frame_state = state->frame_state_; frame_state.ForEachParameter( @@ -464,6 +482,10 @@ class MergePointInterpreterFrameState { entry = nullptr; if (assignments.ContainsParameter(reg.ToParameterIndex())) { entry = state->NewLoopPhi(info.zone(), reg, merge_offset); + } else if (state->is_resumable_loop()) { + // Copy initial values out of the start state. + entry = start_state.get(reg); + DCHECK(entry->Is<InitialValue>()); } }); // TODO(v8:7700): Add contexts into assignment analysis. @@ -488,45 +510,46 @@ class MergePointInterpreterFrameState { // Merges an unmerged framestate with a possibly merged framestate into |this| // framestate. void Merge(MaglevCompilationUnit& compilation_unit, - const InterpreterFrameState& unmerged, BasicBlock* predecessor, + InterpreterFrameState& unmerged, BasicBlock* predecessor, int merge_offset) { DCHECK_GT(predecessor_count_, 1); DCHECK_LT(predecessors_so_far_, predecessor_count_); predecessors_[predecessors_so_far_] = predecessor; - if (known_node_aspects_ == nullptr) { - DCHECK(is_unmerged_loop()); - DCHECK_EQ(predecessors_so_far_, 0); - known_node_aspects_ = - unmerged.known_node_aspects().CloneWithoutUnstableMaps( - compilation_unit.zone()); - } else { - known_node_aspects_->Merge(unmerged.known_node_aspects()); - } - - if (FLAG_trace_maglev_graph_building) { + if (v8_flags.trace_maglev_graph_building) { std::cout << "Merging..." << std::endl; } frame_state_.ForEachValue(compilation_unit, [&](ValueNode*& value, interpreter::Register reg) { CheckIsLoopPhiIfNeeded(compilation_unit, merge_offset, reg, value); - if (FLAG_trace_maglev_graph_building) { + if (v8_flags.trace_maglev_graph_building) { std::cout << " " << reg.ToString() << ": " << PrintNodeLabel(compilation_unit.graph_labeller(), value) << " <- " << PrintNodeLabel(compilation_unit.graph_labeller(), unmerged.get(reg)); } - value = MergeValue(compilation_unit, reg, value, unmerged.get(reg), - merge_offset); - if (FLAG_trace_maglev_graph_building) { + value = MergeValue(compilation_unit, reg, unmerged.known_node_aspects(), + value, unmerged.get(reg), merge_offset); + if (v8_flags.trace_maglev_graph_building) { std::cout << " => " << PrintNodeLabel(compilation_unit.graph_labeller(), value) << ": " << PrintNode(compilation_unit.graph_labeller(), value) << std::endl; } }); + + if (known_node_aspects_ == nullptr) { + DCHECK(is_unmerged_loop()); + DCHECK_EQ(predecessors_so_far_, 0); + known_node_aspects_ = + unmerged.known_node_aspects().CloneWithoutUnstableMaps( + compilation_unit.zone()); + } else { + known_node_aspects_->Merge(unmerged.known_node_aspects()); + } + predecessors_so_far_++; DCHECK_LE(predecessors_so_far_, predecessor_count_); } @@ -534,30 +557,30 @@ class MergePointInterpreterFrameState { // Merges an unmerged framestate with a possibly merged framestate into |this| // framestate. void MergeLoop(MaglevCompilationUnit& compilation_unit, - const InterpreterFrameState& loop_end_state, + InterpreterFrameState& loop_end_state, BasicBlock* loop_end_block, int merge_offset) { // This should be the last predecessor we try to merge. DCHECK_EQ(predecessors_so_far_, predecessor_count_ - 1); DCHECK(is_unmerged_loop()); predecessors_[predecessor_count_ - 1] = loop_end_block; - if (FLAG_trace_maglev_graph_building) { + if (v8_flags.trace_maglev_graph_building) { std::cout << "Merging loop backedge..." << std::endl; } frame_state_.ForEachValue(compilation_unit, [&](ValueNode* value, interpreter::Register reg) { CheckIsLoopPhiIfNeeded(compilation_unit, merge_offset, reg, value); - if (FLAG_trace_maglev_graph_building) { + if (v8_flags.trace_maglev_graph_building) { std::cout << " " << reg.ToString() << ": " << PrintNodeLabel(compilation_unit.graph_labeller(), value) << " <- " << PrintNodeLabel(compilation_unit.graph_labeller(), loop_end_state.get(reg)); } - MergeLoopValue(compilation_unit, reg, value, loop_end_state.get(reg), - merge_offset); - if (FLAG_trace_maglev_graph_building) { + MergeLoopValue(compilation_unit, reg, loop_end_state.known_node_aspects(), + value, loop_end_state.get(reg), merge_offset); + if (v8_flags.trace_maglev_graph_building) { std::cout << " => " << PrintNodeLabel(compilation_unit.graph_labeller(), value) << ": " << PrintNode(compilation_unit.graph_labeller(), value) @@ -572,7 +595,7 @@ class MergePointInterpreterFrameState { // deopt). void MergeDead(const MaglevCompilationUnit& compilation_unit, int merge_offset) { - DCHECK_GT(predecessor_count_, 1); + DCHECK_GE(predecessor_count_, 1); DCHECK_LT(predecessors_so_far_, predecessor_count_); predecessor_count_--; DCHECK_LE(predecessors_so_far_, predecessor_count_); @@ -636,9 +659,12 @@ class MergePointInterpreterFrameState { bool is_unreachable_loop() const { // If there is only one predecessor, and it's not set, then this is a loop // merge with no forward control flow entering it. - return is_loop() && predecessor_count_ == 1 && predecessors_so_far_ == 0; + return is_loop() && !is_resumable_loop() && predecessor_count_ == 1 && + predecessors_so_far_ == 0; } + bool is_resumable_loop() const { return is_resumable_loop_; } + private: friend void InterpreterFrameState::CopyFrom( const MaglevCompilationUnit& info, @@ -658,44 +684,43 @@ class MergePointInterpreterFrameState { frame_state_(info, liveness) {} ValueNode* FromInt32ToTagged(MaglevCompilationUnit& compilation_unit, + KnownNodeAspects& known_node_aspects, ValueNode* value) { DCHECK_EQ(value->properties().value_representation(), ValueRepresentation::kInt32); - if (value->Is<CheckedSmiUntag>()) { - return value->input(0).node(); - } + DCHECK(!value->properties().is_conversion()); #define IS_INT32_OP_NODE(Name) || value->Is<Name>() - DCHECK(value->Is<Int32Constant>() + DCHECK(value->Is<Int32Constant>() || + value->Is<StringLength>() INT32_OPERATIONS_NODE_LIST(IS_INT32_OP_NODE)); #undef IS_INT32_OP_NODE - // Check if the next Node in the block after value is its CheckedSmiTag - // version and reuse it. - if (value->NextNode()) { - CheckedSmiTag* tagged = value->NextNode()->TryCast<CheckedSmiTag>(); - if (tagged != nullptr && value == tagged->input().node()) { - return tagged; - } - } - // Otherwise create a tagged version. - ValueNode* tagged = - Node::New<CheckedSmiTag, std::initializer_list<ValueNode*>>( + NodeInfo* node_info = known_node_aspects.GetOrCreateInfoFor(value); + if (!node_info->tagged_alternative) { + // Create a tagged version. + ValueNode* tagged; + if (value->Is<StringLength>()) { + static_assert(String::kMaxLength <= kSmiMaxValue, + "String length must fit into a Smi"); + tagged = Node::New<UnsafeSmiTag>(compilation_unit.zone(), {value}); + } else { + tagged = Node::New<CheckedSmiTag, std::initializer_list<ValueNode*>>( compilation_unit.zone(), compilation_unit, value->eager_deopt_info()->state, {value}); - Node::List::AddAfter(value, tagged); - compilation_unit.RegisterNodeInGraphLabeller(tagged); - return tagged; + } + + Node::List::AddAfter(value, tagged); + compilation_unit.RegisterNodeInGraphLabeller(tagged); + node_info->tagged_alternative = tagged; + } + return node_info->tagged_alternative; } ValueNode* FromFloat64ToTagged(MaglevCompilationUnit& compilation_unit, + KnownNodeAspects& known_node_aspects, ValueNode* value) { DCHECK_EQ(value->properties().value_representation(), ValueRepresentation::kFloat64); - if (value->Is<CheckedFloat64Unbox>()) { - return value->input(0).node(); - } - if (value->Is<ChangeInt32ToFloat64>()) { - return FromInt32ToTagged(compilation_unit, value->input(0).node()); - } + DCHECK(!value->properties().is_conversion()); // Check if the next Node in the block after value is its Float64Box // version and reuse it. if (value->NextNode()) { @@ -714,19 +739,21 @@ class MergePointInterpreterFrameState { // TODO(victorgomes): Consider refactor this function to share code with // MaglevGraphBuilder::GetTagged. ValueNode* EnsureTagged(MaglevCompilationUnit& compilation_unit, + KnownNodeAspects& known_node_aspects, ValueNode* value) { switch (value->properties().value_representation()) { case ValueRepresentation::kTagged: return value; case ValueRepresentation::kInt32: - return FromInt32ToTagged(compilation_unit, value); + return FromInt32ToTagged(compilation_unit, known_node_aspects, value); case ValueRepresentation::kFloat64: - return FromFloat64ToTagged(compilation_unit, value); + return FromFloat64ToTagged(compilation_unit, known_node_aspects, value); } } ValueNode* MergeValue(MaglevCompilationUnit& compilation_unit, - interpreter::Register owner, ValueNode* merged, + interpreter::Register owner, + KnownNodeAspects& unmerged_aspects, ValueNode* merged, ValueNode* unmerged, int merge_offset) { // If the merged node is null, this is a pre-created loop header merge // frame will null values for anything that isn't a loop Phi. @@ -741,7 +768,7 @@ class MergePointInterpreterFrameState { // It's possible that merged == unmerged at this point since loop-phis are // not dropped if they are only assigned to themselves in the loop. DCHECK_EQ(result->owner(), owner); - unmerged = EnsureTagged(compilation_unit, unmerged); + unmerged = EnsureTagged(compilation_unit, unmerged_aspects, unmerged); result->set_input(predecessors_so_far_, unmerged); return result; } @@ -750,8 +777,8 @@ class MergePointInterpreterFrameState { // We guarantee that the values are tagged. // TODO(victorgomes): Support Phi nodes of untagged values. - merged = EnsureTagged(compilation_unit, merged); - unmerged = EnsureTagged(compilation_unit, unmerged); + merged = EnsureTagged(compilation_unit, *known_node_aspects_, merged); + unmerged = EnsureTagged(compilation_unit, unmerged_aspects, unmerged); // Tagged versions could point to the same value, avoid Phi nodes in this // case. @@ -770,7 +797,7 @@ class MergePointInterpreterFrameState { for (int i = 0; i < predecessors_so_far_; i++) result->set_input(i, merged); result->set_input(predecessors_so_far_, unmerged); - if (FLAG_trace_maglev_graph_building) { + if (v8_flags.trace_maglev_graph_building) { for (int i = predecessors_so_far_ + 1; i < predecessor_count_; i++) { result->set_input(i, nullptr); } @@ -800,7 +827,8 @@ class MergePointInterpreterFrameState { } void MergeLoopValue(MaglevCompilationUnit& compilation_unit, - interpreter::Register owner, ValueNode* merged, + interpreter::Register owner, + KnownNodeAspects& unmerged_aspects, ValueNode* merged, ValueNode* unmerged, int merge_offset) { Phi* result = merged->TryCast<Phi>(); if (result == nullptr || result->merge_offset() != merge_offset) { @@ -814,7 +842,7 @@ class MergePointInterpreterFrameState { return; } DCHECK_EQ(result->owner(), owner); - unmerged = EnsureTagged(compilation_unit, unmerged); + unmerged = EnsureTagged(compilation_unit, unmerged_aspects, unmerged); result->set_input(predecessor_count_ - 1, unmerged); } @@ -823,7 +851,7 @@ class MergePointInterpreterFrameState { DCHECK_EQ(predecessors_so_far_, 0); // Create a new loop phi, which for now is empty. Phi* result = Node::New<Phi>(zone, predecessor_count_, reg, merge_offset); - if (FLAG_trace_maglev_graph_building) { + if (v8_flags.trace_maglev_graph_building) { for (int i = 0; i < predecessor_count_; i++) { result->set_input(i, nullptr); } @@ -844,6 +872,7 @@ class MergePointInterpreterFrameState { int predecessor_count_; int predecessors_so_far_; + bool is_resumable_loop_ = false; BasicBlock** predecessors_; BasicBlockType basic_block_type_; diff --git a/deps/v8/src/maglev/maglev-ir-inl.h b/deps/v8/src/maglev/maglev-ir-inl.h index 836f46f60548a7..e9fe230ee5440c 100644 --- a/deps/v8/src/maglev/maglev-ir-inl.h +++ b/deps/v8/src/maglev/maglev-ir-inl.h @@ -30,10 +30,28 @@ void DeepForEachInputImpl(const MaglevCompilationUnit& unit, } template <typename Function> -void DeepForEachInput(const EagerDeoptInfo* node, Function&& f) { +void DeepForEachInput(const EagerDeoptInfo* deopt_info, Function&& f) { int index = 0; - DeepForEachInputImpl(node->unit, &node->state, node->input_locations, index, - f); + DeepForEachInputImpl(deopt_info->unit, &deopt_info->state, + deopt_info->input_locations, index, f); +} + +template <typename Function> +void DeepForEachInput(const LazyDeoptInfo* deopt_info, Function&& f) { + int index = 0; + if (deopt_info->state.parent) { + DeepForEachInputImpl(*deopt_info->unit.caller(), deopt_info->state.parent, + deopt_info->input_locations, index, f); + } + // Handle the top-of-frame info separately, since we have to skip the result + // location. + deopt_info->state.register_frame->ForEachValue( + deopt_info->unit, [&](ValueNode* node, interpreter::Register reg) { + // Skip over the result location since it is irrelevant for lazy deopts + // (unoptimized code will recreate the result). + if (deopt_info->IsResultRegister(reg)) return; + f(node, reg, &deopt_info->input_locations[index++]); + }); } } // namespace detail diff --git a/deps/v8/src/maglev/maglev-ir.cc b/deps/v8/src/maglev/maglev-ir.cc index 3c32641ac4f820..6a530708190ecf 100644 --- a/deps/v8/src/maglev/maglev-ir.cc +++ b/deps/v8/src/maglev/maglev-ir.cc @@ -140,6 +140,24 @@ class SaveRegisterStateForCall { RegisterSnapshot snapshot_; }; +#ifdef DEBUG +RegList GetGeneralRegistersUsedAsInputs(const EagerDeoptInfo* deopt_info) { + RegList regs; + detail::DeepForEachInput(deopt_info, + [®s](ValueNode* value, interpreter::Register reg, + InputLocation* input) { + if (input->IsGeneralRegister()) { + regs.set(input->AssignedGeneralRegister()); + } + }); + return regs; +} +#endif // DEBUG + +// Helper macro for checking that a reglist is empty which prints the contents +// when non-empty. +#define DCHECK_REGLIST_EMPTY(...) DCHECK_EQ((__VA_ARGS__), RegList{}) + // --- // Inlined computations. // --- @@ -151,7 +169,8 @@ void AllocateRaw(MaglevAssembler* masm, RegisterSnapshot& register_snapshot, // TODO(victorgomes): Call the runtime for large object allocation. // TODO(victorgomes): Support double alignment. DCHECK_EQ(alignment, kTaggedAligned); - if (FLAG_single_generation) { + size_in_bytes = ALIGN_TO_ALLOCATION_ALIGNMENT(size_in_bytes); + if (v8_flags.single_generation) { alloc_type = AllocationType::kOld; } bool in_new_space = alloc_type == AllocationType::kYoung; @@ -165,7 +184,7 @@ void AllocateRaw(MaglevAssembler* masm, RegisterSnapshot& register_snapshot, ? ExternalReference::new_space_allocation_limit_address(isolate) : ExternalReference::old_space_allocation_limit_address(isolate); - ZoneLabelRef done(masm->compilation_info()->zone()); + ZoneLabelRef done(masm); Register new_top = kScratchRegister; // Check if there is enough space. __ Move(object, __ ExternalReferenceAsOperand(top)); @@ -174,12 +193,13 @@ void AllocateRaw(MaglevAssembler* masm, RegisterSnapshot& register_snapshot, // Otherwise call runtime. __ JumpToDeferredIf( greater_equal, - [](MaglevAssembler* masm, Label* return_label, - RegisterSnapshot register_snapshot, Register object, Builtin builtin, - int size_in_bytes, ZoneLabelRef done) { + [](MaglevAssembler* masm, RegisterSnapshot register_snapshot, + Register object, Builtin builtin, int size_in_bytes, + ZoneLabelRef done) { // Remove {object} from snapshot, since it is the returned allocated // HeapObject. register_snapshot.live_registers.clear(object); + register_snapshot.live_tagged_registers.clear(object); { SaveRegisterStateForCall save_register_state(masm, register_snapshot); using D = AllocateDescriptor; @@ -208,8 +228,8 @@ void ToBoolean(MaglevAssembler* masm, Register value, ZoneLabelRef is_true, __ CheckSmi(value); __ JumpToDeferredIf( zero, - [](MaglevAssembler* masm, Label* return_label, Register value, - ZoneLabelRef is_true, ZoneLabelRef is_false) { + [](MaglevAssembler* masm, Register value, ZoneLabelRef is_true, + ZoneLabelRef is_false) { // Check if {value} is not zero. __ SmiCompare(value, Smi::FromInt(0)); __ j(equal, *is_false); @@ -235,8 +255,8 @@ void ToBoolean(MaglevAssembler* masm, Register value, ZoneLabelRef is_true, __ CompareRoot(map, RootIndex::kHeapNumberMap); __ JumpToDeferredIf( equal, - [](MaglevAssembler* masm, Label* return_label, Register value, - ZoneLabelRef is_true, ZoneLabelRef is_false) { + [](MaglevAssembler* masm, Register value, ZoneLabelRef is_true, + ZoneLabelRef is_false) { // Sets scratch register to 0.0. __ Xorpd(kScratchDoubleReg, kScratchDoubleReg); // Sets ZF if equal to 0.0, -0.0 or NaN. @@ -251,8 +271,8 @@ void ToBoolean(MaglevAssembler* masm, Register value, ZoneLabelRef is_true, __ CompareRoot(map, RootIndex::kBigIntMap); __ JumpToDeferredIf( equal, - [](MaglevAssembler* masm, Label* return_label, Register value, - ZoneLabelRef is_true, ZoneLabelRef is_false) { + [](MaglevAssembler* masm, Register value, ZoneLabelRef is_true, + ZoneLabelRef is_false) { __ testl(FieldOperand(value, BigInt::kBitfieldOffset), Immediate(BigInt::LengthBits::kMask)); __ j(zero, *is_false); @@ -576,8 +596,9 @@ void GeneratorStore::GenerateCode(MaglevAssembler* masm, __ FromAnyToRegister(parameters_and_registers(i), WriteBarrierDescriptor::SlotAddressRegister()); + ZoneLabelRef done(masm); DeferredCodeInfo* deferred_write_barrier = __ PushDeferredCode( - [](MaglevAssembler* masm, Label* return_label, Register value, + [](MaglevAssembler* masm, ZoneLabelRef done, Register value, Register array, GeneratorStore* node, int32_t offset) { ASM_CODE_COMMENT_STRING(masm, "Write barrier slow path"); // Use WriteBarrierDescriptor::SlotAddressRegister() as the scratch @@ -585,7 +606,7 @@ void GeneratorStore::GenerateCode(MaglevAssembler* masm, __ CheckPageFlag( value, WriteBarrierDescriptor::SlotAddressRegister(), MemoryChunk::kPointersToHereAreInterestingOrInSharedHeapMask, - zero, return_label); + zero, *done); Register slot_reg = WriteBarrierDescriptor::SlotAddressRegister(); @@ -600,13 +621,13 @@ void GeneratorStore::GenerateCode(MaglevAssembler* masm, __ CallRecordWriteStub(array, slot_reg, save_fp_mode); - __ jmp(return_label); + __ jmp(*done); }, - value, array, this, FixedArray::OffsetOfElementAt(i)); + done, value, array, this, FixedArray::OffsetOfElementAt(i)); __ StoreTaggedField(FieldOperand(array, FixedArray::OffsetOfElementAt(i)), value); - __ JumpIfSmi(value, &deferred_write_barrier->return_label, Label::kNear); + __ JumpIfSmi(value, *done, Label::kNear); // TODO(leszeks): This will stay either false or true throughout this loop. // Consider hoisting the check out of the loop and duplicating the loop into // with and without write barrier. @@ -614,7 +635,7 @@ void GeneratorStore::GenerateCode(MaglevAssembler* masm, MemoryChunk::kPointersFromHereAreInterestingMask, not_zero, &deferred_write_barrier->deferred_code_label); - __ bind(&deferred_write_barrier->return_label); + __ bind(*done); } // Use WriteBarrierDescriptor::SlotAddressRegister() as the scratch @@ -622,19 +643,20 @@ void GeneratorStore::GenerateCode(MaglevAssembler* masm, Register context = __ FromAnyToRegister( context_input(), WriteBarrierDescriptor::SlotAddressRegister()); + ZoneLabelRef done(masm); DeferredCodeInfo* deferred_context_write_barrier = __ PushDeferredCode( - [](MaglevAssembler* masm, Label* return_label, Register context, + [](MaglevAssembler* masm, ZoneLabelRef done, Register context, Register generator, GeneratorStore* node) { ASM_CODE_COMMENT_STRING(masm, "Write barrier slow path"); // Use WriteBarrierDescriptor::SlotAddressRegister() as the scratch // register, see comment above. - // TODO(leszeks): The context is almost always going to be in old-space, - // consider moving this check to the fast path, maybe even as the first - // bailout. + // TODO(leszeks): The context is almost always going to be in + // old-space, consider moving this check to the fast path, maybe even + // as the first bailout. __ CheckPageFlag( context, WriteBarrierDescriptor::SlotAddressRegister(), MemoryChunk::kPointersToHereAreInterestingOrInSharedHeapMask, zero, - return_label); + *done); __ Move(WriteBarrierDescriptor::ObjectRegister(), generator); generator = WriteBarrierDescriptor::ObjectRegister(); @@ -652,16 +674,16 @@ void GeneratorStore::GenerateCode(MaglevAssembler* masm, __ CallRecordWriteStub(generator, slot_reg, save_fp_mode); - __ jmp(return_label); + __ jmp(*done); }, - context, generator, this); + done, context, generator, this); __ StoreTaggedField( FieldOperand(generator, JSGeneratorObject::kContextOffset), context); __ AssertNotSmi(context); __ CheckPageFlag(generator, kScratchRegister, MemoryChunk::kPointersFromHereAreInterestingMask, not_zero, &deferred_context_write_barrier->deferred_code_label); - __ bind(&deferred_context_write_barrier->return_label); + __ bind(*done); __ StoreTaggedSignedField( FieldOperand(generator, JSGeneratorObject::kContinuationOffset), @@ -681,7 +703,7 @@ void GeneratorRestoreRegister::GenerateCode(MaglevAssembler* masm, const ProcessingState& state) { Register array = ToRegister(array_input()); Register result_reg = ToRegister(result()); - Register temp = temporaries().PopFirst(); + Register temp = general_temporaries().PopFirst(); // The input and the output can alias, if that happen we use a temporary // register and a move at the end. @@ -931,6 +953,7 @@ void CreateArrayLiteral::GenerateCode(MaglevAssembler* masm, __ Push(constant_elements().object()); __ Push(Smi::FromInt(flags())); __ CallRuntime(Runtime::kCreateArrayLiteral); + masm->DefineExceptionHandlerAndLazyDeoptPoint(this); } void CreateShallowArrayLiteral::AllocateVreg( @@ -962,6 +985,7 @@ void CreateObjectLiteral::GenerateCode(MaglevAssembler* masm, __ Push(boilerplate_descriptor().object()); __ Push(Smi::FromInt(flags())); __ CallRuntime(Runtime::kCreateObjectLiteral); + masm->DefineExceptionHandlerAndLazyDeoptPoint(this); } void CreateEmptyObjectLiteral::AllocateVreg( @@ -1317,9 +1341,10 @@ void CheckMapsWithMigration::GenerateCode(MaglevAssembler* masm, } __ Cmp(FieldOperand(object, HeapObject::kMapOffset), map().object()); + ZoneLabelRef migration_done(masm); __ JumpToDeferredIf( not_equal, - [](MaglevAssembler* masm, Label* return_label, Register object, + [](MaglevAssembler* masm, ZoneLabelRef migration_done, Register object, CheckMapsWithMigration* node, EagerDeoptInfo* deopt_info) { __ RegisterEagerDeopt(deopt_info, DeoptimizeReason::kWrongMap); @@ -1365,10 +1390,11 @@ void CheckMapsWithMigration::GenerateCode(MaglevAssembler* masm, // Manually load the map pointer without uncompressing it. __ Cmp(FieldOperand(object, HeapObject::kMapOffset), node->map().object()); - __ j(equal, return_label); + __ j(equal, *migration_done); __ jmp(&deopt_info->deopt_entry_label); }, - object, this, eager_deopt_info()); + migration_done, object, this, eager_deopt_info()); + __ bind(*migration_done); } void CheckMapsWithMigration::PrintParams( std::ostream& os, MaglevGraphLabeller* graph_labeller) const { @@ -1384,15 +1410,14 @@ void CheckJSArrayBounds::GenerateCode(MaglevAssembler* masm, Register object = ToRegister(receiver_input()); Register index = ToRegister(index_input()); __ AssertNotSmi(object); - __ AssertSmi(index); - if (FLAG_debug_code) { + if (v8_flags.debug_code) { __ CmpObjectType(object, JS_ARRAY_TYPE, kScratchRegister); __ Assert(equal, AbortReason::kUnexpectedValue); } - TaggedRegister length(kScratchRegister); - __ LoadAnyTaggedField(length, FieldOperand(object, JSArray::kLengthOffset)); - __ cmp_tagged(index, length.reg()); + __ SmiUntagField(kScratchRegister, + FieldOperand(object, JSArray::kLengthOffset)); + __ cmpl(index, kScratchRegister); __ EmitEagerDeoptIf(above_equal, DeoptimizeReason::kOutOfBounds, this); } @@ -1406,24 +1431,28 @@ void CheckJSObjectElementsBounds::GenerateCode(MaglevAssembler* masm, Register object = ToRegister(receiver_input()); Register index = ToRegister(index_input()); __ AssertNotSmi(object); - __ AssertSmi(index); - if (FLAG_debug_code) { + if (v8_flags.debug_code) { __ CmpObjectType(object, FIRST_JS_OBJECT_TYPE, kScratchRegister); __ Assert(greater_equal, AbortReason::kUnexpectedValue); } __ LoadAnyTaggedField(kScratchRegister, FieldOperand(object, JSObject::kElementsOffset)); - if (FLAG_debug_code) { + if (v8_flags.debug_code) { __ AssertNotSmi(kScratchRegister); } - TaggedRegister length(kScratchRegister); - __ LoadAnyTaggedField( - length, FieldOperand(kScratchRegister, FixedArray::kLengthOffset)); - __ cmp_tagged(index, length.reg()); + __ SmiUntagField(kScratchRegister, + FieldOperand(kScratchRegister, FixedArray::kLengthOffset)); + __ cmpl(index, kScratchRegister); __ EmitEagerDeoptIf(above_equal, DeoptimizeReason::kOutOfBounds, this); } +void DebugBreak::AllocateVreg(MaglevVregAllocationState* vreg_state) {} +void DebugBreak::GenerateCode(MaglevAssembler* masm, + const ProcessingState& state) { + __ int3(); +} + void CheckedInternalizedString::AllocateVreg( MaglevVregAllocationState* vreg_state) { UseRegister(object_input()); @@ -1433,7 +1462,7 @@ void CheckedInternalizedString::AllocateVreg( void CheckedInternalizedString::GenerateCode(MaglevAssembler* masm, const ProcessingState& state) { Register object = ToRegister(object_input()); - RegList temps = temporaries(); + RegList temps = general_temporaries(); Register map_tmp = temps.PopFirst(); if (check_type_ == CheckType::kOmitHeapObjectCheck) { @@ -1449,9 +1478,10 @@ void CheckedInternalizedString::GenerateCode(MaglevAssembler* masm, __ testw(FieldOperand(map_tmp, Map::kInstanceTypeOffset), Immediate(kIsNotStringMask | kIsNotInternalizedMask)); static_assert((kStringTag | kInternalizedTag) == 0); + ZoneLabelRef done(masm); __ JumpToDeferredIf( not_zero, - [](MaglevAssembler* masm, Label* return_label, Register object, + [](MaglevAssembler* masm, ZoneLabelRef done, Register object, CheckedInternalizedString* node, EagerDeoptInfo* deopt_info, Register map_tmp) { __ RecordComment("Deferred Test IsThinString"); @@ -1465,7 +1495,7 @@ void CheckedInternalizedString::GenerateCode(MaglevAssembler* masm, __ j(zero, &deopt_info->deopt_entry_label); __ LoadTaggedPointerField( object, FieldOperand(object, ThinString::kActualOffset)); - if (FLAG_debug_code) { + if (v8_flags.debug_code) { __ RecordComment("DCHECK IsInternalizedString"); __ LoadMap(map_tmp, object); __ testw(FieldOperand(map_tmp, Map::kInstanceTypeOffset), @@ -1473,9 +1503,85 @@ void CheckedInternalizedString::GenerateCode(MaglevAssembler* masm, static_assert((kStringTag | kInternalizedTag) == 0); __ Check(zero, AbortReason::kUnexpectedValue); } - __ jmp(return_label); + __ jmp(*done); }, - object, this, eager_deopt_info(), map_tmp); + done, object, this, eager_deopt_info(), map_tmp); + __ bind(*done); +} + +void CheckedObjectToIndex::AllocateVreg(MaglevVregAllocationState* vreg_state) { + UseRegister(object_input()); + DefineAsRegister(vreg_state, this); + set_double_temporaries_needed(1); +} +void CheckedObjectToIndex::GenerateCode(MaglevAssembler* masm, + const ProcessingState& state) { + Register object = ToRegister(object_input()); + Register result_reg = ToRegister(result()); + + ZoneLabelRef done(masm); + Condition is_smi = __ CheckSmi(object); + __ JumpToDeferredIf( + NegateCondition(is_smi), + [](MaglevAssembler* masm, Register object, Register result_reg, + ZoneLabelRef done, CheckedObjectToIndex* node) { + Label is_string; + __ LoadMap(kScratchRegister, object); + __ CmpInstanceTypeRange(kScratchRegister, kScratchRegister, + FIRST_STRING_TYPE, LAST_STRING_TYPE); + __ j(below_equal, &is_string); + + __ cmpl(kScratchRegister, Immediate(HEAP_NUMBER_TYPE)); + // The IC will go generic if it encounters something other than a + // Number or String key. + __ EmitEagerDeoptIf(not_equal, DeoptimizeReason::kNotInt32, node); + + // Heap Number. + { + DoubleRegister number_value = node->double_temporaries().first(); + DoubleRegister converted_back = kScratchDoubleReg; + // Convert the input float64 value to int32. + __ Cvttsd2si(result_reg, number_value); + // Convert that int32 value back to float64. + __ Cvtlsi2sd(converted_back, result_reg); + // Check that the result of the float64->int32->float64 is equal to + // the input (i.e. that the conversion didn't truncate. + __ Ucomisd(number_value, converted_back); + __ j(equal, *done); + __ EmitEagerDeopt(node, DeoptimizeReason::kNotInt32); + } + + // String. + __ bind(&is_string); + { + RegisterSnapshot snapshot = node->register_snapshot(); + snapshot.live_registers.clear(result_reg); + DCHECK(!snapshot.live_tagged_registers.has(result_reg)); + { + SaveRegisterStateForCall save_register_state(masm, snapshot); + AllowExternalCallThatCantCauseGC scope(masm); + __ PrepareCallCFunction(1); + __ Move(arg_reg_1, object); + __ CallCFunction( + ExternalReference::string_to_array_index_function(), 1); + // No need for safepoint since this is a fast C call. + __ Move(result_reg, kReturnRegister0); + } + __ cmpl(result_reg, Immediate(0)); + __ j(greater_equal, *done); + __ EmitEagerDeopt(node, DeoptimizeReason::kNotInt32); + } + }, + object, result_reg, done, this); + + // If we didn't enter the deferred block, we're a Smi. + if (result_reg == object) { + __ SmiUntag(object); + } else { + __ SmiUntag(result_reg, object); + } + + __ bind(*done); } void LoadTaggedField::AllocateVreg(MaglevVregAllocationState* vreg_state) { @@ -1500,7 +1606,7 @@ void LoadDoubleField::AllocateVreg(MaglevVregAllocationState* vreg_state) { } void LoadDoubleField::GenerateCode(MaglevAssembler* masm, const ProcessingState& state) { - Register tmp = temporaries().PopFirst(); + Register tmp = general_temporaries().PopFirst(); Register object = ToRegister(object_input()); __ AssertNotSmi(object); __ DecompressAnyTagged(tmp, FieldOperand(object, offset())); @@ -1524,33 +1630,22 @@ void LoadTaggedElement::GenerateCode(MaglevAssembler* masm, Register index = ToRegister(index_input()); Register result_reg = ToRegister(result()); __ AssertNotSmi(object); - if (FLAG_debug_code) { + if (v8_flags.debug_code) { __ CmpObjectType(object, JS_OBJECT_TYPE, kScratchRegister); __ Assert(above_equal, AbortReason::kUnexpectedValue); } __ DecompressAnyTagged(kScratchRegister, FieldOperand(object, JSObject::kElementsOffset)); - if (FLAG_debug_code) { + if (v8_flags.debug_code) { __ CmpObjectType(kScratchRegister, FIXED_ARRAY_TYPE, kScratchRegister); __ Assert(equal, AbortReason::kUnexpectedValue); // Reload since CmpObjectType clobbered the scratch register. __ DecompressAnyTagged(kScratchRegister, FieldOperand(object, JSObject::kElementsOffset)); } - __ AssertSmi(index); - // Zero out top bits of index reg (these were previously either zero already, - // or the cage base). This technically mutates it, but since it's a Smi, that - // doesn't matter. - __ movl(index, index); - static_assert(kSmiTagSize + kSmiShiftSize < times_tagged_size, - "Folding the Smi shift into the FixedArray entry size shift " - "only works if the shift is small"); __ DecompressAnyTagged( - result_reg, - FieldOperand(kScratchRegister, index, - static_cast<ScaleFactor>(times_tagged_size - - (kSmiTagSize + kSmiShiftSize)), - FixedArray::kHeaderSize)); + result_reg, FieldOperand(kScratchRegister, index, times_tagged_size, + FixedArray::kHeaderSize)); } void LoadDoubleElement::AllocateVreg(MaglevVregAllocationState* vreg_state) { @@ -1564,13 +1659,13 @@ void LoadDoubleElement::GenerateCode(MaglevAssembler* masm, Register index = ToRegister(index_input()); DoubleRegister result_reg = ToDoubleRegister(result()); __ AssertNotSmi(object); - if (FLAG_debug_code) { + if (v8_flags.debug_code) { __ CmpObjectType(object, JS_OBJECT_TYPE, kScratchRegister); __ Assert(above_equal, AbortReason::kUnexpectedValue); } __ DecompressAnyTagged(kScratchRegister, FieldOperand(object, JSObject::kElementsOffset)); - if (FLAG_debug_code) { + if (v8_flags.debug_code) { __ CmpObjectType(kScratchRegister, FIXED_DOUBLE_ARRAY_TYPE, kScratchRegister); __ Assert(equal, AbortReason::kUnexpectedValue); @@ -1578,19 +1673,8 @@ void LoadDoubleElement::GenerateCode(MaglevAssembler* masm, __ DecompressAnyTagged(kScratchRegister, FieldOperand(object, JSObject::kElementsOffset)); } - __ AssertSmi(index); - // Zero out top bits of index reg (these were previously either zero already, - // or the cage base). This technically mutates it, but since it's a Smi, that - // doesn't matter. - __ movl(index, index); - static_assert(kSmiTagSize + kSmiShiftSize < times_8, - "Folding the Smi shift into the FixedArray entry size shift " - "only works if the shift is small"); - __ Movsd(result_reg, - FieldOperand(kScratchRegister, index, - static_cast<ScaleFactor>(times_8 - - (kSmiTagSize + kSmiShiftSize)), - FixedDoubleArray::kHeaderSize)); + __ Movsd(result_reg, FieldOperand(kScratchRegister, index, times_8, + FixedDoubleArray::kHeaderSize)); } void StoreTaggedFieldNoWriteBarrier::AllocateVreg( @@ -1628,14 +1712,15 @@ void StoreTaggedFieldWithWriteBarrier::GenerateCode( __ AssertNotSmi(object); __ StoreTaggedField(FieldOperand(object, offset()), value); + ZoneLabelRef done(masm); DeferredCodeInfo* deferred_write_barrier = __ PushDeferredCode( - [](MaglevAssembler* masm, Label* return_label, Register value, + [](MaglevAssembler* masm, ZoneLabelRef done, Register value, Register object, StoreTaggedFieldWithWriteBarrier* node) { ASM_CODE_COMMENT_STRING(masm, "Write barrier slow path"); __ CheckPageFlag( value, kScratchRegister, MemoryChunk::kPointersToHereAreInterestingOrInSharedHeapMask, zero, - return_label); + *done); Register slot_reg = WriteBarrierDescriptor::SlotAddressRegister(); RegList saved; @@ -1654,15 +1739,15 @@ void StoreTaggedFieldWithWriteBarrier::GenerateCode( __ CallRecordWriteStub(object, slot_reg, save_fp_mode); __ PopAll(saved); - __ jmp(return_label); + __ jmp(*done); }, - value, object, this); + done, value, object, this); - __ JumpIfSmi(value, &deferred_write_barrier->return_label); + __ JumpIfSmi(value, *done); __ CheckPageFlag(object, kScratchRegister, MemoryChunk::kPointersFromHereAreInterestingMask, not_zero, &deferred_write_barrier->deferred_code_label); - __ bind(&deferred_write_barrier->return_label); + __ bind(*done); } void StoreTaggedFieldWithWriteBarrier::PrintParams( std::ostream& os, MaglevGraphLabeller* graph_labeller) const { @@ -1745,6 +1830,25 @@ void SetNamedGeneric::PrintParams(std::ostream& os, os << "(" << name_ << ")"; } +void StringLength::AllocateVreg(MaglevVregAllocationState* vreg_state) { + UseRegister(object_input()); + DefineAsRegister(vreg_state, this); +} +void StringLength::GenerateCode(MaglevAssembler* masm, + const ProcessingState& state) { + Register object = ToRegister(object_input()); + if (v8_flags.debug_code) { + // Use return register as temporary. + Register tmp = ToRegister(result()); + // Check if {object} is a string. + __ AssertNotSmi(object); + __ LoadMap(tmp, object); + __ CmpInstanceTypeRange(tmp, tmp, FIRST_STRING_TYPE, LAST_STRING_TYPE); + __ Check(below_equal, AbortReason::kUnexpectedValue); + } + __ movl(ToRegister(result()), FieldOperand(object, String::kLengthOffset)); +} + void DefineNamedOwnGeneric::AllocateVreg( MaglevVregAllocationState* vreg_state) { using D = CallInterfaceDescriptorFor<Builtin::kDefineNamedOwnIC>::type; @@ -2016,6 +2120,10 @@ void Int32AddWithOverflow::GenerateCode(MaglevAssembler* masm, Register left = ToRegister(left_input()); Register right = ToRegister(right_input()); __ addl(left, right); + // None of the mutated input registers should be a register input into the + // eager deopt info. + DCHECK_REGLIST_EMPTY(RegList{left} & + GetGeneralRegistersUsedAsInputs(eager_deopt_info())); __ EmitEagerDeoptIf(overflow, DeoptimizeReason::kOverflow, this); } @@ -2031,6 +2139,10 @@ void Int32SubtractWithOverflow::GenerateCode(MaglevAssembler* masm, Register left = ToRegister(left_input()); Register right = ToRegister(right_input()); __ subl(left, right); + // None of the mutated input registers should be a register input into the + // eager deopt info. + DCHECK_REGLIST_EMPTY(RegList{left} & + GetGeneralRegistersUsedAsInputs(eager_deopt_info())); __ EmitEagerDeoptIf(overflow, DeoptimizeReason::kOverflow, this); } @@ -2048,10 +2160,14 @@ void Int32MultiplyWithOverflow::GenerateCode(MaglevAssembler* masm, Register right = ToRegister(right_input()); DCHECK_EQ(result, ToRegister(left_input())); - Register saved_left = temporaries().first(); + Register saved_left = general_temporaries().first(); __ movl(saved_left, result); // TODO(leszeks): peephole optimise multiplication by a constant. __ imull(result, right); + // None of the mutated input registers should be a register input into the + // eager deopt info. + DCHECK_REGLIST_EMPTY(RegList{saved_left, result} & + GetGeneralRegistersUsedAsInputs(eager_deopt_info())); __ EmitEagerDeoptIf(overflow, DeoptimizeReason::kOverflow, this); // If the result is zero, check if either lhs or rhs is negative. @@ -2082,8 +2198,8 @@ void Int32DivideWithOverflow::AllocateVreg( void Int32DivideWithOverflow::GenerateCode(MaglevAssembler* masm, const ProcessingState& state) { - DCHECK(temporaries().has(rax)); - DCHECK(temporaries().has(rdx)); + DCHECK(general_temporaries().has(rax)); + DCHECK(general_temporaries().has(rdx)); Register left = ToRegister(left_input()); Register right = ToRegister(right_input()); __ movl(rax, left); @@ -2099,9 +2215,10 @@ void Int32DivideWithOverflow::GenerateCode(MaglevAssembler* masm, // Check if {right} is positive (and not zero). __ cmpl(right, Immediate(0)); + ZoneLabelRef done(masm); __ JumpToDeferredIf( less_equal, - [](MaglevAssembler* masm, Label* return_label, Register right, + [](MaglevAssembler* masm, ZoneLabelRef done, Register right, Int32DivideWithOverflow* node) { // {right} is negative or zero. @@ -2122,20 +2239,25 @@ void Int32DivideWithOverflow::GenerateCode(MaglevAssembler* masm, // Check if {left} is kMinInt and {right} is -1, in which case we'd have // to return -kMinInt, which is not representable as Int32. __ cmpl(rax, Immediate(kMinInt)); - __ j(not_equal, return_label); + __ j(not_equal, *done); __ cmpl(right, Immediate(-1)); - __ j(not_equal, return_label); + __ j(not_equal, *done); // TODO(leszeks): Better DeoptimizeReason = kOverflow, but // eager_deopt_info is already configured as kNotInt32. __ EmitEagerDeopt(node, DeoptimizeReason::kNotInt32); }, - right, this); + done, right, this); + __ bind(*done); // Perform the actual integer division. __ idivl(right); // Check that the remainder is zero. __ cmpl(rdx, Immediate(0)); + // None of the mutated input registers should be a register input into the + // eager deopt info. + DCHECK_REGLIST_EMPTY(RegList{rax, rdx} & + GetGeneralRegistersUsedAsInputs(eager_deopt_info())); __ EmitEagerDeoptIf(not_equal, DeoptimizeReason::kNotInt32, this); DCHECK_EQ(ToRegister(result()), rax); } @@ -2229,6 +2351,10 @@ void Int32ShiftRightLogical::GenerateCode(MaglevAssembler* masm, // TODO(jgruber): Properly track signed/unsigned representations and // allocated a heap number if the result is outside smi range. __ testl(left, Immediate((1 << 31) | (1 << 30))); + // None of the mutated input registers should be a register input into the + // eager deopt info. + DCHECK_REGLIST_EMPTY(RegList{left} & + GetGeneralRegistersUsedAsInputs(eager_deopt_info())); __ EmitEagerDeoptIf(not_equal, DeoptimizeReason::kOverflow, this); } @@ -2422,9 +2548,26 @@ void CheckedSmiTag::GenerateCode(MaglevAssembler* masm, const ProcessingState& state) { Register reg = ToRegister(input()); __ addl(reg, reg); + // None of the mutated input registers should be a register input into the + // eager deopt info. + DCHECK_REGLIST_EMPTY(RegList{reg} & + GetGeneralRegistersUsedAsInputs(eager_deopt_info())); __ EmitEagerDeoptIf(overflow, DeoptimizeReason::kOverflow, this); } +void UnsafeSmiTag::AllocateVreg(MaglevVregAllocationState* vreg_state) { + UseRegister(input()); + DefineSameAsFirst(vreg_state, this); +} +void UnsafeSmiTag::GenerateCode(MaglevAssembler* masm, + const ProcessingState& state) { + Register reg = ToRegister(input()); + __ addl(reg, reg); + if (v8_flags.debug_code) { + __ Check(no_overflow, AbortReason::kInputDoesNotFitSmi); + } +} + void Int32Constant::AllocateVreg(MaglevVregAllocationState* vreg_state) { DefineAsConstant(vreg_state, this); } @@ -2499,41 +2642,53 @@ void LogicalNot::GenerateCode(MaglevAssembler* masm, const ProcessingState& state) { Register object = ToRegister(value()); Register return_value = ToRegister(result()); - Label not_equal_true; - // We load the constant true to the return value and we return it if the - // object is not equal to it. Otherwise we load the constant false. - __ LoadRoot(return_value, RootIndex::kTrueValue); - __ cmp_tagged(return_value, object); - __ j(not_equal, ¬_equal_true); - __ LoadRoot(return_value, RootIndex::kFalseValue); - if (FLAG_debug_code) { - Label is_equal_true; - __ jmp(&is_equal_true); - __ bind(¬_equal_true); - // LogicalNot expects either the constants true or false. - // We know it is not true, so it must be false! + + if (v8_flags.debug_code) { + // LogicalNot expects either TrueValue or FalseValue. + Label next; __ CompareRoot(object, RootIndex::kFalseValue); + __ j(equal, &next); + __ CompareRoot(object, RootIndex::kTrueValue); __ Check(equal, AbortReason::kUnexpectedValue); - __ bind(&is_equal_true); - } else { - __ bind(¬_equal_true); + __ bind(&next); } + + Label return_false, done; + __ CompareRoot(object, RootIndex::kTrueValue); + __ j(equal, &return_false, Label::kNear); + __ LoadRoot(return_value, RootIndex::kTrueValue); + __ jmp(&done, Label::kNear); + + __ bind(&return_false); + __ LoadRoot(return_value, RootIndex::kFalseValue); + + __ bind(&done); } void SetPendingMessage::AllocateVreg(MaglevVregAllocationState* vreg_state) { UseRegister(value()); + set_temporaries_needed(1); DefineAsRegister(vreg_state, this); } void SetPendingMessage::GenerateCode(MaglevAssembler* masm, const ProcessingState& state) { - Register message = ToRegister(value()); + Register new_message = ToRegister(value()); Register return_value = ToRegister(result()); - Isolate* isolate = masm->isolate(); - MemOperand message_op = __ ExternalReferenceAsOperand( - ExternalReference::address_of_pending_message(isolate), kScratchRegister); - __ Move(return_value, message_op); - __ movq(message_op, message); + + MemOperand pending_message_operand = __ ExternalReferenceAsOperand( + ExternalReference::address_of_pending_message(masm->isolate()), + kScratchRegister); + + if (new_message != return_value) { + __ Move(return_value, pending_message_operand); + __ movq(pending_message_operand, new_message); + } else { + Register scratch = general_temporaries().PopFirst(); + __ Move(scratch, pending_message_operand); + __ movq(pending_message_operand, new_message); + __ Move(return_value, scratch); + } } void ToBooleanLogicalNot::AllocateVreg(MaglevVregAllocationState* vreg_state) { @@ -2550,7 +2705,7 @@ void ToBooleanLogicalNot::GenerateCode(MaglevAssembler* masm, ToBoolean(masm, object, object_is_true, object_is_false, true); __ bind(*object_is_true); __ LoadRoot(return_value, RootIndex::kFalseValue); - __ jmp(&done); + __ jmp(&done, Label::kNear); __ bind(*object_is_false); __ LoadRoot(return_value, RootIndex::kTrueValue); __ bind(&done); @@ -2591,7 +2746,7 @@ void TaggedNotEqual::GenerateCode(MaglevAssembler* masm, } void TestInstanceOf::AllocateVreg(MaglevVregAllocationState* vreg_state) { - using D = CallInterfaceDescriptorFor<Builtin::kInstanceOf>::type; + using D = CallInterfaceDescriptorFor<Builtin::kInstanceOf_WithFeedback>::type; UseFixed(context(), kContextRegister); UseFixed(object(), D::GetRegisterParameter(D::kLeft)); UseFixed(callable(), D::GetRegisterParameter(D::kRight)); @@ -2599,13 +2754,15 @@ void TestInstanceOf::AllocateVreg(MaglevVregAllocationState* vreg_state) { } void TestInstanceOf::GenerateCode(MaglevAssembler* masm, const ProcessingState& state) { + using D = CallInterfaceDescriptorFor<Builtin::kInstanceOf_WithFeedback>::type; #ifdef DEBUG - using D = CallInterfaceDescriptorFor<Builtin::kInstanceOf>::type; DCHECK_EQ(ToRegister(context()), kContextRegister); DCHECK_EQ(ToRegister(object()), D::GetRegisterParameter(D::kLeft)); DCHECK_EQ(ToRegister(callable()), D::GetRegisterParameter(D::kRight)); #endif - __ CallBuiltin(Builtin::kInstanceOf); + __ Move(D::GetRegisterParameter(D::kFeedbackVector), feedback().vector); + __ Move(D::GetRegisterParameter(D::kSlot), Immediate(feedback().index())); + __ CallBuiltin(Builtin::kInstanceOf_WithFeedback); masm->DefineExceptionHandlerAndLazyDeoptPoint(this); } @@ -2618,18 +2775,22 @@ void TestUndetectable::GenerateCode(MaglevAssembler* masm, const ProcessingState& state) { Register object = ToRegister(value()); Register return_value = ToRegister(result()); - RegList temps = temporaries(); - Register tmp = temps.PopFirst(); - Label done; - __ LoadRoot(return_value, RootIndex::kFalseValue); - // If the object is an Smi, return false. - __ JumpIfSmi(object, &done); - // If it is a HeapObject, load the map and check for the undetectable bit. - __ LoadMap(tmp, object); - __ testl(FieldOperand(tmp, Map::kBitFieldOffset), + Register scratch = general_temporaries().PopFirst(); + + Label return_false, done; + __ JumpIfSmi(object, &return_false, Label::kNear); + // For heap objects, check the map's undetectable bit. + __ LoadMap(scratch, object); + __ testl(FieldOperand(scratch, Map::kBitFieldOffset), Immediate(Map::Bits1::IsUndetectableBit::kMask)); - __ j(zero, &done); + __ j(zero, &return_false, Label::kNear); + __ LoadRoot(return_value, RootIndex::kTrueValue); + __ jmp(&done, Label::kNear); + + __ bind(&return_false); + __ LoadRoot(return_value, RootIndex::kFalseValue); + __ bind(&done); } @@ -2646,80 +2807,80 @@ void TestTypeOf::GenerateCode(MaglevAssembler* masm, Label is_true, is_false, done; switch (literal_) { case LiteralFlag::kNumber: - __ JumpIfSmi(object, &is_true); + __ JumpIfSmi(object, &is_true, Label::kNear); __ CompareRoot(FieldOperand(object, HeapObject::kMapOffset), RootIndex::kHeapNumberMap); - __ j(not_equal, &is_false); + __ j(not_equal, &is_false, Label::kNear); break; case LiteralFlag::kString: - __ JumpIfSmi(object, &is_false); + __ JumpIfSmi(object, &is_false, Label::kNear); __ LoadMap(tmp, object); __ cmpw(FieldOperand(tmp, Map::kInstanceTypeOffset), Immediate(FIRST_NONSTRING_TYPE)); - __ j(greater_equal, &is_false); + __ j(greater_equal, &is_false, Label::kNear); break; case LiteralFlag::kSymbol: - __ JumpIfSmi(object, &is_false); + __ JumpIfSmi(object, &is_false, Label::kNear); __ LoadMap(tmp, object); __ cmpw(FieldOperand(tmp, Map::kInstanceTypeOffset), Immediate(SYMBOL_TYPE)); - __ j(not_equal, &is_false); + __ j(not_equal, &is_false, Label::kNear); break; case LiteralFlag::kBoolean: __ CompareRoot(object, RootIndex::kTrueValue); - __ j(equal, &is_true); + __ j(equal, &is_true, Label::kNear); __ CompareRoot(object, RootIndex::kFalseValue); - __ j(not_equal, &is_false); + __ j(not_equal, &is_false, Label::kNear); break; case LiteralFlag::kBigInt: - __ JumpIfSmi(object, &is_false); + __ JumpIfSmi(object, &is_false, Label::kNear); __ LoadMap(tmp, object); __ cmpw(FieldOperand(tmp, Map::kInstanceTypeOffset), Immediate(BIGINT_TYPE)); - __ j(not_equal, &is_false); + __ j(not_equal, &is_false, Label::kNear); break; case LiteralFlag::kUndefined: - __ JumpIfSmi(object, &is_false); + __ JumpIfSmi(object, &is_false, Label::kNear); // Check it has the undetectable bit set and it is not null. __ LoadMap(tmp, object); __ testl(FieldOperand(tmp, Map::kBitFieldOffset), Immediate(Map::Bits1::IsUndetectableBit::kMask)); - __ j(zero, &is_false); + __ j(zero, &is_false, Label::kNear); __ CompareRoot(object, RootIndex::kNullValue); - __ j(equal, &is_false); + __ j(equal, &is_false, Label::kNear); break; case LiteralFlag::kFunction: - __ JumpIfSmi(object, &is_false); + __ JumpIfSmi(object, &is_false, Label::kNear); // Check if callable bit is set and not undetectable. __ LoadMap(tmp, object); __ movl(tmp, FieldOperand(tmp, Map::kBitFieldOffset)); __ andl(tmp, Immediate(Map::Bits1::IsUndetectableBit::kMask | Map::Bits1::IsCallableBit::kMask)); __ cmpl(tmp, Immediate(Map::Bits1::IsCallableBit::kMask)); - __ j(not_equal, &is_false); + __ j(not_equal, &is_false, Label::kNear); break; case LiteralFlag::kObject: - __ JumpIfSmi(object, &is_false); + __ JumpIfSmi(object, &is_false, Label::kNear); // If the object is null then return true. __ CompareRoot(object, RootIndex::kNullValue); - __ j(equal, &is_true); + __ j(equal, &is_true, Label::kNear); // Check if the object is a receiver type, __ LoadMap(tmp, object); __ cmpw(FieldOperand(tmp, Map::kInstanceTypeOffset), Immediate(FIRST_JS_RECEIVER_TYPE)); - __ j(less, &is_false); + __ j(less, &is_false, Label::kNear); // ... and is not undefined (undetectable) nor callable. __ testl(FieldOperand(tmp, Map::kBitFieldOffset), Immediate(Map::Bits1::IsUndetectableBit::kMask | Map::Bits1::IsCallableBit::kMask)); - __ j(not_zero, &is_false); + __ j(not_zero, &is_false, Label::kNear); break; case LiteralFlag::kOther: UNREACHABLE(); } __ bind(&is_true); __ LoadRoot(ToRegister(result()), RootIndex::kTrueValue); - __ jmp(&done); + __ jmp(&done, Label::kNear); __ bind(&is_false); __ LoadRoot(ToRegister(result()), RootIndex::kFalseValue); __ bind(&done); @@ -2823,6 +2984,40 @@ void ChangeInt32ToFloat64::GenerateCode(MaglevAssembler* masm, __ Cvtlsi2sd(ToDoubleRegister(result()), ToRegister(input())); } +void CheckedTruncateFloat64ToInt32::AllocateVreg( + MaglevVregAllocationState* vreg_state) { + UseRegister(input()); + DefineAsRegister(vreg_state, this); +} +void CheckedTruncateFloat64ToInt32::GenerateCode(MaglevAssembler* masm, + const ProcessingState& state) { + DoubleRegister input_reg = ToDoubleRegister(input()); + Register result_reg = ToRegister(result()); + DoubleRegister converted_back = kScratchDoubleReg; + + // Convert the input float64 value to int32. + __ Cvttsd2si(result_reg, input_reg); + // Convert that int32 value back to float64. + __ Cvtlsi2sd(converted_back, result_reg); + // Check that the result of the float64->int32->float64 is equal to the input + // (i.e. that the conversion didn't truncate. + __ Ucomisd(input_reg, converted_back); + __ EmitEagerDeoptIf(not_equal, DeoptimizeReason::kNotInt32, this); + + // Check if {input} is -0. + Label check_done; + __ cmpl(result_reg, Immediate(0)); + __ j(not_equal, &check_done); + + // In case of 0, we need to check the high bits for the IEEE -0 pattern. + Register high_word32_of_input = kScratchRegister; + __ Pextrd(high_word32_of_input, input_reg, 1); + __ cmpl(high_word32_of_input, Immediate(0)); + __ EmitEagerDeoptIf(less, DeoptimizeReason::kNotInt32, this); + + __ bind(&check_done); +} + void Phi::AllocateVreg(MaglevVregAllocationState* vreg_state) { // Phi inputs are processed in the post-process, once loop phis' inputs' // v-regs are allocated. @@ -2892,7 +3087,7 @@ void Call::GenerateCode(MaglevAssembler* masm, const ProcessingState& state) { } void Construct::AllocateVreg(MaglevVregAllocationState* vreg_state) { - using D = ConstructStubDescriptor; + using D = Construct_WithFeedbackDescriptor; UseFixed(function(), D::GetRegisterParameter(D::kTarget)); UseFixed(new_target(), D::GetRegisterParameter(D::kNewTarget)); UseFixed(context(), kContextRegister); @@ -2903,7 +3098,7 @@ void Construct::AllocateVreg(MaglevVregAllocationState* vreg_state) { } void Construct::GenerateCode(MaglevAssembler* masm, const ProcessingState& state) { - using D = ConstructStubDescriptor; + using D = Construct_WithFeedbackDescriptor; DCHECK_EQ(ToRegister(function()), D::GetRegisterParameter(D::kTarget)); DCHECK_EQ(ToRegister(new_target()), D::GetRegisterParameter(D::kNewTarget)); DCHECK_EQ(ToRegister(context()), kContextRegister); @@ -2911,13 +3106,14 @@ void Construct::GenerateCode(MaglevAssembler* masm, for (int i = num_args() - 1; i >= 0; --i) { __ PushInput(arg(i)); } + __ Push(feedback().vector); uint32_t arg_count = num_args(); __ Move(D::GetRegisterParameter(D::kActualArgumentsCount), Immediate(arg_count)); + __ Move(D::GetRegisterParameter(D::kSlot), Immediate(feedback().index())); - __ CallBuiltin(Builtin::kConstruct); - + __ CallBuiltin(Builtin::kConstruct_WithFeedback); masm->DefineExceptionHandlerAndLazyDeoptPoint(this); } @@ -2934,7 +3130,6 @@ void CallBuiltin::AllocateVreg(MaglevVregAllocationState* vreg_state) { if (has_context) { UseFixed(input(i), kContextRegister); } - DCHECK_EQ(descriptor.GetReturnCount(), 1); DefineAsFixed(vreg_state, this, kReturnRegister0); } @@ -3110,7 +3305,7 @@ void IncreaseInterruptBudget::AllocateVreg( } void IncreaseInterruptBudget::GenerateCode(MaglevAssembler* masm, const ProcessingState& state) { - Register scratch = temporaries().first(); + Register scratch = general_temporaries().first(); __ movq(scratch, MemOperand(rbp, StandardFrameConstants::kFunctionOffset)); __ LoadTaggedPointerField( scratch, FieldOperand(scratch, JSFunction::kFeedbackCellOffset)); @@ -3128,15 +3323,16 @@ void ReduceInterruptBudget::AllocateVreg( } void ReduceInterruptBudget::GenerateCode(MaglevAssembler* masm, const ProcessingState& state) { - Register scratch = temporaries().first(); + Register scratch = general_temporaries().first(); __ movq(scratch, MemOperand(rbp, StandardFrameConstants::kFunctionOffset)); __ LoadTaggedPointerField( scratch, FieldOperand(scratch, JSFunction::kFeedbackCellOffset)); __ subl(FieldOperand(scratch, FeedbackCell::kInterruptBudgetOffset), Immediate(amount())); + ZoneLabelRef done(masm); __ JumpToDeferredIf( less, - [](MaglevAssembler* masm, Label* return_label, + [](MaglevAssembler* masm, ZoneLabelRef done, ReduceInterruptBudget* node) { { SaveRegisterStateForCall save_register_state( @@ -3148,9 +3344,10 @@ void ReduceInterruptBudget::GenerateCode(MaglevAssembler* masm, save_register_state.DefineSafepointWithLazyDeopt( node->lazy_deopt_info()); } - __ jmp(return_label); + __ jmp(*done); }, - this); + done, this); + __ bind(*done); } void ReduceInterruptBudget::PrintParams( std::ostream& os, MaglevGraphLabeller* graph_labeller) const { @@ -3171,12 +3368,11 @@ void ThrowReferenceErrorIfHole::GenerateCode(MaglevAssembler* masm, } __ JumpToDeferredIf( equal, - [](MaglevAssembler* masm, Label* return_label, - ThrowReferenceErrorIfHole* node) { + [](MaglevAssembler* masm, ThrowReferenceErrorIfHole* node) { __ Move(kContextRegister, masm->native_context().object()); __ Push(node->name().object()); __ CallRuntime(Runtime::kThrowAccessedUninitializedVariable, 1); - masm->DefineLazyDeoptPoint(node->lazy_deopt_info()); + masm->DefineExceptionHandlerAndLazyDeoptPoint(node); __ Abort(AbortReason::kUnexpectedReturnFromThrow); }, this); @@ -3196,11 +3392,10 @@ void ThrowSuperNotCalledIfHole::GenerateCode(MaglevAssembler* masm, } __ JumpToDeferredIf( equal, - [](MaglevAssembler* masm, Label* return_label, - ThrowSuperNotCalledIfHole* node) { + [](MaglevAssembler* masm, ThrowSuperNotCalledIfHole* node) { __ Move(kContextRegister, masm->native_context().object()); __ CallRuntime(Runtime::kThrowSuperNotCalled, 0); - masm->DefineLazyDeoptPoint(node->lazy_deopt_info()); + masm->DefineExceptionHandlerAndLazyDeoptPoint(node); __ Abort(AbortReason::kUnexpectedReturnFromThrow); }, this); @@ -3220,11 +3415,10 @@ void ThrowSuperAlreadyCalledIfNotHole::GenerateCode( } __ JumpToDeferredIf( not_equal, - [](MaglevAssembler* masm, Label* return_label, - ThrowSuperAlreadyCalledIfNotHole* node) { + [](MaglevAssembler* masm, ThrowSuperAlreadyCalledIfNotHole* node) { __ Move(kContextRegister, masm->native_context().object()); __ CallRuntime(Runtime::kThrowSuperAlreadyCalledError, 0); - masm->DefineLazyDeoptPoint(node->lazy_deopt_info()); + masm->DefineExceptionHandlerAndLazyDeoptPoint(node); __ Abort(AbortReason::kUnexpectedReturnFromThrow); }, this); @@ -3242,13 +3436,12 @@ void ThrowIfNotSuperConstructor::GenerateCode(MaglevAssembler* masm, Immediate(Map::Bits1::IsConstructorBit::kMask)); __ JumpToDeferredIf( equal, - [](MaglevAssembler* masm, Label* return_label, - ThrowIfNotSuperConstructor* node) { - __ Move(kContextRegister, masm->native_context().object()); + [](MaglevAssembler* masm, ThrowIfNotSuperConstructor* node) { __ Push(ToRegister(node->constructor())); __ Push(ToRegister(node->function())); + __ Move(kContextRegister, masm->native_context().object()); __ CallRuntime(Runtime::kThrowNotSuperConstructor, 2); - masm->DefineLazyDeoptPoint(node->lazy_deopt_info()); + masm->DefineExceptionHandlerAndLazyDeoptPoint(node); __ Abort(AbortReason::kUnexpectedReturnFromThrow); }, this); @@ -3356,7 +3549,8 @@ void JumpFromInlined::GenerateCode(MaglevAssembler* masm, namespace { -void AttemptOnStackReplacement(MaglevAssembler* masm, Label* return_label, +void AttemptOnStackReplacement(MaglevAssembler* masm, + ZoneLabelRef no_code_for_osr, JumpLoopPrologue* node, Register scratch0, Register scratch1, int32_t loop_depth, FeedbackSlot feedback_slot, @@ -3370,6 +3564,7 @@ void AttemptOnStackReplacement(MaglevAssembler* masm, Label* return_label, // See also: InterpreterAssembler::OnStackReplacement. baseline::BaselineAssembler basm(masm); + __ AssertFeedbackVector(scratch0); // Case 1). Label deopt; @@ -3381,11 +3576,10 @@ void AttemptOnStackReplacement(MaglevAssembler* masm, Label* return_label, // Case 2). { - __ AssertFeedbackVector(scratch0); __ movb(scratch0, FieldOperand(scratch0, FeedbackVector::kOsrStateOffset)); __ DecodeField<FeedbackVector::OsrUrgencyBits>(scratch0); basm.JumpIfByte(baseline::Condition::kUnsignedLessThanEqual, scratch0, - loop_depth, return_label, Label::kNear); + loop_depth, *no_code_for_osr, Label::kNear); // The osr_urgency exceeds the current loop_depth, signaling an OSR // request. Call into runtime to compile. @@ -3413,23 +3607,29 @@ void AttemptOnStackReplacement(MaglevAssembler* masm, Label* return_label, } } }); + DCHECK(!snapshot.live_registers.has(maybe_target_code)); SaveRegisterStateForCall save_register_state(masm, snapshot); __ Move(kContextRegister, masm->native_context().object()); __ Push(Smi::FromInt(osr_offset.ToInt())); __ CallRuntime(Runtime::kCompileOptimizedOSRFromMaglev, 1); save_register_state.DefineSafepoint(); - __ Move(scratch0, rax); + __ Move(maybe_target_code, kReturnRegister0); } // A `0` return value means there is no OSR code available yet. Fall // through for now, OSR code will be picked up once it exists and is // cached on the feedback vector. - __ testq(scratch0, scratch0); - __ j(equal, return_label, Label::kNear); + __ Cmp(maybe_target_code, 0); + __ j(equal, *no_code_for_osr, Label::kNear); } __ bind(&deopt); - if (V8_LIKELY(FLAG_turbofan)) { + if (V8_LIKELY(v8_flags.turbofan)) { + // None of the mutated input registers should be a register input into the + // eager deopt info. + DCHECK_REGLIST_EMPTY( + RegList{scratch0, scratch1} & + GetGeneralRegistersUsedAsInputs(node->eager_deopt_info())); __ EmitEagerDeopt(node, DeoptimizeReason::kPrepareForOnStackReplacement); } else { // Fall through. With TF disabled we cannot OSR and thus it doesn't make @@ -3446,8 +3646,8 @@ void JumpLoopPrologue::AllocateVreg(MaglevVregAllocationState* vreg_state) { } void JumpLoopPrologue::GenerateCode(MaglevAssembler* masm, const ProcessingState& state) { - Register scratch0 = temporaries().PopFirst(); - Register scratch1 = temporaries().PopFirst(); + Register scratch0 = general_temporaries().PopFirst(); + Register scratch1 = general_temporaries().PopFirst(); const Register osr_state = scratch1; __ Move(scratch0, unit_->feedback().object()); @@ -3459,8 +3659,11 @@ void JumpLoopPrologue::GenerateCode(MaglevAssembler* masm, static_assert(FeedbackVector::MaybeHasOptimizedOsrCodeBit::encode(true) > FeedbackVector::kMaxOsrUrgency); __ cmpl(osr_state, Immediate(loop_depth_)); - __ JumpToDeferredIf(above, AttemptOnStackReplacement, this, scratch0, - scratch1, loop_depth_, feedback_slot_, osr_offset_); + ZoneLabelRef no_code_for_osr(masm); + __ JumpToDeferredIf(above, AttemptOnStackReplacement, no_code_for_osr, this, + scratch0, scratch1, loop_depth_, feedback_slot_, + osr_offset_); + __ bind(*no_code_for_osr); } void JumpLoop::AllocateVreg(MaglevVregAllocationState* vreg_state) {} diff --git a/deps/v8/src/maglev/maglev-ir.h b/deps/v8/src/maglev/maglev-ir.h index a1766807f97352..7616941df1e04c 100644 --- a/deps/v8/src/maglev/maglev-ir.h +++ b/deps/v8/src/maglev/maglev-ir.h @@ -15,6 +15,7 @@ #include "src/common/globals.h" #include "src/common/operation.h" #include "src/compiler/backend/instruction.h" +#include "src/compiler/feedback-source.h" #include "src/compiler/heap-refs.h" #include "src/deoptimizer/deoptimize-reason.h" #include "src/interpreter/bytecode-flags.h" @@ -117,68 +118,72 @@ class CompactInterpreterFrameState; V(RootConstant) \ V(SmiConstant) -#define VALUE_NODE_LIST(V) \ - V(Call) \ - V(CallBuiltin) \ - V(CallRuntime) \ - V(CallWithSpread) \ - V(Construct) \ - V(ConstructWithSpread) \ - V(CreateEmptyArrayLiteral) \ - V(CreateArrayLiteral) \ - V(CreateShallowArrayLiteral) \ - V(CreateObjectLiteral) \ - V(CreateEmptyObjectLiteral) \ - V(CreateShallowObjectLiteral) \ - V(CreateFunctionContext) \ - V(CreateClosure) \ - V(FastCreateClosure) \ - V(CreateRegExpLiteral) \ - V(DeleteProperty) \ - V(ForInPrepare) \ - V(ForInNext) \ - V(GeneratorRestoreRegister) \ - V(GetIterator) \ - V(GetSecondReturnedValue) \ - V(GetTemplateObject) \ - V(InitialValue) \ - V(LoadTaggedField) \ - V(LoadDoubleField) \ - V(LoadTaggedElement) \ - V(LoadDoubleElement) \ - V(LoadGlobal) \ - V(LoadNamedGeneric) \ - V(LoadNamedFromSuperGeneric) \ - V(SetNamedGeneric) \ - V(DefineNamedOwnGeneric) \ - V(StoreInArrayLiteralGeneric) \ - V(StoreGlobal) \ - V(GetKeyedGeneric) \ - V(SetKeyedGeneric) \ - V(DefineKeyedOwnGeneric) \ - V(Phi) \ - V(RegisterInput) \ - V(CheckedSmiTag) \ - V(CheckedSmiUntag) \ - V(CheckedInternalizedString) \ - V(ChangeInt32ToFloat64) \ - V(Float64Box) \ - V(CheckedFloat64Unbox) \ - V(LogicalNot) \ - V(SetPendingMessage) \ - V(ToBooleanLogicalNot) \ - V(TaggedEqual) \ - V(TaggedNotEqual) \ - V(TestInstanceOf) \ - V(TestUndetectable) \ - V(TestTypeOf) \ - V(ToName) \ - V(ToNumberOrNumeric) \ - V(ToObject) \ - V(ToString) \ - CONSTANT_VALUE_NODE_LIST(V) \ - INT32_OPERATIONS_NODE_LIST(V) \ - FLOAT64_OPERATIONS_NODE_LIST(V) \ +#define VALUE_NODE_LIST(V) \ + V(Call) \ + V(CallBuiltin) \ + V(CallRuntime) \ + V(CallWithSpread) \ + V(Construct) \ + V(ConstructWithSpread) \ + V(CreateEmptyArrayLiteral) \ + V(CreateArrayLiteral) \ + V(CreateShallowArrayLiteral) \ + V(CreateObjectLiteral) \ + V(CreateEmptyObjectLiteral) \ + V(CreateShallowObjectLiteral) \ + V(CreateFunctionContext) \ + V(CreateClosure) \ + V(FastCreateClosure) \ + V(CreateRegExpLiteral) \ + V(DeleteProperty) \ + V(ForInPrepare) \ + V(ForInNext) \ + V(GeneratorRestoreRegister) \ + V(GetIterator) \ + V(GetSecondReturnedValue) \ + V(GetTemplateObject) \ + V(InitialValue) \ + V(LoadTaggedField) \ + V(LoadDoubleField) \ + V(LoadTaggedElement) \ + V(LoadDoubleElement) \ + V(LoadGlobal) \ + V(LoadNamedGeneric) \ + V(LoadNamedFromSuperGeneric) \ + V(SetNamedGeneric) \ + V(DefineNamedOwnGeneric) \ + V(StoreInArrayLiteralGeneric) \ + V(StoreGlobal) \ + V(GetKeyedGeneric) \ + V(SetKeyedGeneric) \ + V(DefineKeyedOwnGeneric) \ + V(Phi) \ + V(RegisterInput) \ + V(CheckedSmiTag) \ + V(UnsafeSmiTag) \ + V(CheckedSmiUntag) \ + V(CheckedInternalizedString) \ + V(CheckedObjectToIndex) \ + V(ChangeInt32ToFloat64) \ + V(CheckedTruncateFloat64ToInt32) \ + V(Float64Box) \ + V(CheckedFloat64Unbox) \ + V(LogicalNot) \ + V(SetPendingMessage) \ + V(StringLength) \ + V(ToBooleanLogicalNot) \ + V(TaggedEqual) \ + V(TaggedNotEqual) \ + V(TestInstanceOf) \ + V(TestUndetectable) \ + V(TestTypeOf) \ + V(ToName) \ + V(ToNumberOrNumeric) \ + V(ToObject) \ + V(ToString) \ + CONSTANT_VALUE_NODE_LIST(V) \ + INT32_OPERATIONS_NODE_LIST(V) \ + FLOAT64_OPERATIONS_NODE_LIST(V) \ GENERIC_OPERATIONS_NODE_LIST(V) #define GAP_MOVE_NODE_LIST(V) \ @@ -196,6 +201,7 @@ class CompactInterpreterFrameState; V(CheckMapsWithMigration) \ V(CheckJSArrayBounds) \ V(CheckJSObjectElementsBounds) \ + V(DebugBreak) \ V(GeneratorStore) \ V(JumpLoopPrologue) \ V(StoreTaggedFieldNoWriteBarrier) \ @@ -442,11 +448,22 @@ class BasicBlockRef { return next_ref_ != nullptr; } + int interrupt_budget_correction() const { + DCHECK_EQ(state_, kRefList); + return interrupt_budget_correction_; + } + + void set_interrupt_budget_correction(int interrupt_budget_correction) { + DCHECK_EQ(state_, kRefList); + interrupt_budget_correction_ = interrupt_budget_correction; + } + private: union { BasicBlock* block_ptr_; BasicBlockRef* next_ref_; }; + int interrupt_budget_correction_ = 0; #ifdef DEBUG enum { kBlockPointer, kRefList } state_; #endif // DEBUG @@ -616,6 +633,7 @@ class ValueLocation { } bool IsAnyRegister() const { return operand_.IsAnyRegister(); } + bool IsGeneralRegister() const { return operand_.IsRegister(); } bool IsDoubleRegister() const { return operand_.IsDoubleRegister(); } const compiler::InstructionOperand& operand() const { return operand_; } @@ -761,8 +779,10 @@ class NodeBase : public ZoneObject { using OpPropertiesField = OpcodeField::Next<OpProperties, OpProperties::kSize>; using NumTemporariesNeededField = OpPropertiesField::Next<uint8_t, 2>; + using NumDoubleTemporariesNeededField = + NumTemporariesNeededField::Next<uint8_t, 1>; // Align input count to 32-bit. - using UnusedField = NumTemporariesNeededField::Next<uint8_t, 3>; + using UnusedField = NumDoubleTemporariesNeededField::Next<uint8_t, 2>; using InputCountField = UnusedField::Next<size_t, 17>; static_assert(InputCountField::kShift == 32); @@ -874,13 +894,35 @@ class NodeBase : public ZoneObject { id_ = id; } + template <typename RegisterT> uint8_t num_temporaries_needed() const { - return NumTemporariesNeededField::decode(bitfield_); + if constexpr (std::is_same_v<RegisterT, Register>) { + return NumTemporariesNeededField::decode(bitfield_); + } else { + return NumDoubleTemporariesNeededField::decode(bitfield_); + } + } + + template <typename RegisterT> + RegListBase<RegisterT>& temporaries() { + if constexpr (std::is_same_v<RegisterT, Register>) { + return temporaries_; + } else { + return double_temporaries_; + } } - RegList& temporaries() { return temporaries_; } + RegList& general_temporaries() { return temporaries_; } + DoubleRegList& double_temporaries() { return double_temporaries_; } - void assign_temporaries(RegList list) { temporaries_ = list; } + template <typename RegisterT> + void assign_temporaries(RegListBase<RegisterT> list) { + if constexpr (std::is_same_v<RegisterT, Register>) { + temporaries_ = list; + } else { + double_temporaries_ = list; + } + } void Print(std::ostream& os, MaglevGraphLabeller*, bool skip_targets = false) const; @@ -949,14 +991,23 @@ class NodeBase : public ZoneObject { // // Does not include any registers requested by RequireSpecificTemporary. void set_temporaries_needed(uint8_t value) { - DCHECK_EQ(num_temporaries_needed(), 0); + DCHECK_EQ(num_temporaries_needed<Register>(), 0); bitfield_ = NumTemporariesNeededField::update(bitfield_, value); } + void set_double_temporaries_needed(uint8_t value) { + DCHECK_EQ(num_temporaries_needed<DoubleRegister>(), 0); + bitfield_ = NumDoubleTemporariesNeededField::update(bitfield_, value); + } + // Require that a specific register is free (and therefore clobberable) by the // entry into this node. void RequireSpecificTemporary(Register reg) { temporaries_.set(reg); } + void RequireSpecificDoubleTemporary(DoubleRegister reg) { + double_temporaries_.set(reg); + } + private: template <class Derived, typename... Args> static Derived* Allocate(Zone* zone, size_t input_count, Args&&... args) { @@ -1021,6 +1072,7 @@ class NodeBase : public ZoneObject { uint64_t bitfield_; NodeIdT id_ = kInvalidNodeId; RegList temporaries_; + DoubleRegList double_temporaries_; NodeBase() = delete; NodeBase(const NodeBase&) = delete; @@ -1147,7 +1199,7 @@ class ValueNode : public Node { struct LiveRange { NodeIdT start = kInvalidNodeId; - NodeIdT end = kInvalidNodeId; + NodeIdT end = kInvalidNodeId; // Inclusive. }; bool has_valid_live_range() const { return end_id_ != 0; } @@ -1652,6 +1704,20 @@ class CheckedSmiTag : public FixedInputValueNodeT<1, CheckedSmiTag> { DECL_NODE_INTERFACE_WITH_EMPTY_PRINT_PARAMS() }; +// Input must guarantee to fit in a Smi. +class UnsafeSmiTag : public FixedInputValueNodeT<1, UnsafeSmiTag> { + using Base = FixedInputValueNodeT<1, UnsafeSmiTag>; + + public: + explicit UnsafeSmiTag(uint64_t bitfield) : Base(bitfield) {} + + static constexpr OpProperties kProperties = OpProperties::ConversionNode(); + + Input& input() { return Node::input(0); } + + DECL_NODE_INTERFACE_WITH_EMPTY_PRINT_PARAMS() +}; + class CheckedSmiUntag : public FixedInputValueNodeT<1, CheckedSmiUntag> { using Base = FixedInputValueNodeT<1, CheckedSmiUntag>; @@ -1746,6 +1812,22 @@ class ChangeInt32ToFloat64 DECL_NODE_INTERFACE_WITH_EMPTY_PRINT_PARAMS() }; +class CheckedTruncateFloat64ToInt32 + : public FixedInputValueNodeT<1, CheckedTruncateFloat64ToInt32> { + using Base = FixedInputValueNodeT<1, CheckedTruncateFloat64ToInt32>; + + public: + explicit CheckedTruncateFloat64ToInt32(uint64_t bitfield) : Base(bitfield) {} + + static constexpr OpProperties kProperties = OpProperties::EagerDeopt() | + OpProperties::Int32() | + OpProperties::ConversionNode(); + + Input& input() { return Node::input(0); } + + DECL_NODE_INTERFACE_WITH_EMPTY_PRINT_PARAMS() +}; + class CheckedFloat64Unbox : public FixedInputValueNodeT<1, CheckedFloat64Unbox> { using Base = FixedInputValueNodeT<1, CheckedFloat64Unbox>; @@ -1824,7 +1906,8 @@ class TestInstanceOf : public FixedInputValueNodeT<3, TestInstanceOf> { using Base = FixedInputValueNodeT<3, TestInstanceOf>; public: - explicit TestInstanceOf(uint64_t bitfield) : Base(bitfield) {} + explicit TestInstanceOf(uint64_t bitfield, compiler::FeedbackSource feedback) + : Base(bitfield), feedback_(feedback) {} // The implementation currently calls runtime. static constexpr OpProperties kProperties = OpProperties::JSCall(); @@ -1832,8 +1915,12 @@ class TestInstanceOf : public FixedInputValueNodeT<3, TestInstanceOf> { Input& context() { return input(0); } Input& object() { return input(1); } Input& callable() { return input(2); } + compiler::FeedbackSource feedback() const { return feedback_; } DECL_NODE_INTERFACE_WITH_EMPTY_PRINT_PARAMS() + + private: + const compiler::FeedbackSource feedback_; }; class TestUndetectable : public FixedInputValueNodeT<1, TestUndetectable> { @@ -2197,9 +2284,13 @@ class Constant : public FixedInputValueNodeT<0, Constant> { DECL_NODE_INTERFACE() + compiler::HeapObjectRef object() { return object_; } + void DoLoadToRegister(MaglevAssembler*, OutputRegister); Handle<Object> DoReify(LocalIsolate* isolate); + const compiler::HeapObjectRef& ref() const { return object_; } + private: const compiler::HeapObjectRef object_; }; @@ -2265,7 +2356,8 @@ class CreateArrayLiteral : public FixedInputValueNodeT<0, CreateArrayLiteral> { int flags() const { return flags_; } // The implementation currently calls runtime. - static constexpr OpProperties kProperties = OpProperties::Call(); + static constexpr OpProperties kProperties = + OpProperties::Call() | OpProperties::Throw() | OpProperties::LazyDeopt(); DECL_NODE_INTERFACE_WITH_EMPTY_PRINT_PARAMS() @@ -2325,7 +2417,8 @@ class CreateObjectLiteral int flags() const { return flags_; } // The implementation currently calls runtime. - static constexpr OpProperties kProperties = OpProperties::Call(); + static constexpr OpProperties kProperties = + OpProperties::Call() | OpProperties::Throw() | OpProperties::LazyDeopt(); DECL_NODE_INTERFACE_WITH_EMPTY_PRINT_PARAMS() @@ -2700,6 +2793,15 @@ class CheckJSObjectElementsBounds DECL_NODE_INTERFACE_WITH_EMPTY_PRINT_PARAMS() }; +class DebugBreak : public FixedInputNodeT<0, DebugBreak> { + using Base = FixedInputNodeT<0, DebugBreak>; + + public: + explicit DebugBreak(uint64_t bitfield) : Base(bitfield) {} + + DECL_NODE_INTERFACE_WITH_EMPTY_PRINT_PARAMS() +}; + class CheckedInternalizedString : public FixedInputValueNodeT<1, CheckedInternalizedString> { using Base = FixedInputValueNodeT<1, CheckedInternalizedString>; @@ -2711,9 +2813,8 @@ class CheckedInternalizedString CHECK_EQ(properties().value_representation(), ValueRepresentation::kTagged); } - static constexpr OpProperties kProperties = OpProperties::EagerDeopt() | - OpProperties::TaggedValue() | - OpProperties::ConversionNode(); + static constexpr OpProperties kProperties = + OpProperties::EagerDeopt() | OpProperties::TaggedValue(); static constexpr int kObjectIndex = 0; Input& object_input() { return Node::input(kObjectIndex); } @@ -2724,6 +2825,23 @@ class CheckedInternalizedString const CheckType check_type_; }; +class CheckedObjectToIndex + : public FixedInputValueNodeT<1, CheckedObjectToIndex> { + using Base = FixedInputValueNodeT<1, CheckedObjectToIndex>; + + public: + explicit CheckedObjectToIndex(uint64_t bitfield) : Base(bitfield) {} + + static constexpr OpProperties kProperties = + OpProperties::EagerDeopt() | OpProperties::Int32() | + OpProperties::DeferredCall() | OpProperties::ConversionNode(); + + static constexpr int kObjectIndex = 0; + Input& object_input() { return Node::input(kObjectIndex); } + + DECL_NODE_INTERFACE_WITH_EMPTY_PRINT_PARAMS() +}; + class GetTemplateObject : public FixedInputValueNodeT<1, GetTemplateObject> { using Base = FixedInputValueNodeT<1, GetTemplateObject>; @@ -3012,6 +3130,21 @@ class SetNamedGeneric : public FixedInputValueNodeT<3, SetNamedGeneric> { const compiler::FeedbackSource feedback_; }; +class StringLength : public FixedInputValueNodeT<1, StringLength> { + using Base = FixedInputValueNodeT<1, StringLength>; + + public: + explicit StringLength(uint64_t bitfield) : Base(bitfield) {} + + static constexpr OpProperties kProperties = + OpProperties::Reading() | OpProperties::Int32(); + + static constexpr int kObjectIndex = 0; + Input& object_input() { return input(kObjectIndex); } + + DECL_NODE_INTERFACE_WITH_EMPTY_PRINT_PARAMS() +}; + class DefineNamedOwnGeneric : public FixedInputValueNodeT<3, DefineNamedOwnGeneric> { using Base = FixedInputValueNodeT<3, DefineNamedOwnGeneric>; @@ -3210,6 +3343,8 @@ class Phi : public ValueNodeT<Phi> { using Node::reduce_input_count; using Node::set_input; + bool is_exception_phi() const { return input_count() == 0; } + DECL_NODE_INTERFACE() void AllocateVregInPostProcess(MaglevVregAllocationState*); @@ -3279,9 +3414,9 @@ class Construct : public ValueNodeT<Construct> { // This ctor is used when for variable input counts. // Inputs must be initialized manually. - Construct(uint64_t bitfield, ValueNode* function, ValueNode* new_target, - ValueNode* context) - : Base(bitfield) { + Construct(uint64_t bitfield, const compiler::FeedbackSource& feedback, + ValueNode* function, ValueNode* new_target, ValueNode* context) + : Base(bitfield), feedback_(feedback) { set_input(kFunctionIndex, function); set_input(kNewTargetIndex, new_target); set_input(kContextIndex, context); @@ -3300,8 +3435,12 @@ class Construct : public ValueNodeT<Construct> { void set_arg(int i, ValueNode* node) { set_input(i + kFixedInputCount, node); } + compiler::FeedbackSource feedback() const { return feedback_; } DECL_NODE_INTERFACE_WITH_EMPTY_PRINT_PARAMS() + + private: + const compiler::FeedbackSource feedback_; }; class CallBuiltin : public ValueNodeT<CallBuiltin> { @@ -3377,6 +3516,10 @@ class CallBuiltin : public ValueNodeT<CallBuiltin> { void set_arg(int i, ValueNode* node) { set_input(i, node); } + int ReturnCount() const { + return Builtins::CallInterfaceDescriptorFor(builtin_).GetReturnCount(); + } + DECL_NODE_INTERFACE() private: @@ -3556,7 +3699,7 @@ class ThrowReferenceErrorIfHole : Base(bitfield), name_(name) {} static constexpr OpProperties kProperties = - OpProperties::LazyDeopt() | OpProperties::DeferredCall(); + OpProperties::Throw() | OpProperties::DeferredCall(); const compiler::NameRef& name() const { return name_; } @@ -3576,7 +3719,7 @@ class ThrowSuperNotCalledIfHole explicit ThrowSuperNotCalledIfHole(uint64_t bitfield) : Base(bitfield) {} static constexpr OpProperties kProperties = - OpProperties::LazyDeopt() | OpProperties::DeferredCall(); + OpProperties::Throw() | OpProperties::DeferredCall(); Input& value() { return Node::input(0); } @@ -3592,7 +3735,7 @@ class ThrowSuperAlreadyCalledIfNotHole : Base(bitfield) {} static constexpr OpProperties kProperties = - OpProperties::LazyDeopt() | OpProperties::DeferredCall(); + OpProperties::Throw() | OpProperties::DeferredCall(); Input& value() { return Node::input(0); } @@ -3607,7 +3750,7 @@ class ThrowIfNotSuperConstructor explicit ThrowIfNotSuperConstructor(uint64_t bitfield) : Base(bitfield) {} static constexpr OpProperties kProperties = - OpProperties::LazyDeopt() | OpProperties::DeferredCall(); + OpProperties::Throw() | OpProperties::DeferredCall(); Input& constructor() { return Node::input(0); } Input& function() { return Node::input(1); } @@ -3701,6 +3844,12 @@ class BranchControlNode : public ConditionalControlNode { BasicBlock* if_true() const { return if_true_.block_ptr(); } BasicBlock* if_false() const { return if_false_.block_ptr(); } + void set_true_interrupt_correction(int interrupt_budget_correction) { + if_true_.set_interrupt_budget_correction(interrupt_budget_correction); + } + void set_false_interrupt_correction(int interrupt_budget_correction) { + if_false_.set_interrupt_budget_correction(interrupt_budget_correction); + } private: BasicBlockRef if_true_; diff --git a/deps/v8/src/maglev/maglev-regalloc.cc b/deps/v8/src/maglev/maglev-regalloc.cc index 9cc1d02636dfb8..b6189dc6ceabeb 100644 --- a/deps/v8/src/maglev/maglev-regalloc.cc +++ b/deps/v8/src/maglev/maglev-regalloc.cc @@ -283,7 +283,7 @@ void StraightForwardRegisterAllocator::PrintLiveRegs() const { } void StraightForwardRegisterAllocator::AllocateRegisters() { - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_.reset(new MaglevPrintingVisitor( compilation_info_->graph_labeller(), std::cout)); printing_visitor_->PreProcessGraph(graph_); @@ -319,6 +319,14 @@ void StraightForwardRegisterAllocator::AllocateRegisters() { if (block->state()->is_exception_handler()) { // Exceptions start from a blank state of register values. ClearRegisterValues(); + } else if (block->state()->is_resumable_loop() && + block->state()->predecessor_count() <= 1) { + // Loops that are only reachable through JumpLoop start from a blank + // state of register values. + // This should actually only support predecessor_count == 1, but we + // currently don't eliminate resumable loop headers (and subsequent code + // until the next resume) that end up being unreachable from JumpLoop. + ClearRegisterValues(); } else { InitializeRegisterValues(block->state()->register_state()); } @@ -326,7 +334,7 @@ void StraightForwardRegisterAllocator::AllocateRegisters() { InitializeRegisterValues(block->empty_block_register_state()); } - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->PreProcessBasicBlock(block); printing_visitor_->os() << "live regs: "; PrintLiveRegs(); @@ -391,7 +399,7 @@ void StraightForwardRegisterAllocator::AllocateRegisters() { if (phi->owner() == interpreter::Register::virtual_accumulator() && !phi->is_dead()) { phi->result().SetAllocated(ForceAllocate(kReturnRegister0, phi)); - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->Process(phi, ProcessingState(block_it_)); printing_visitor_->os() << "phi (exception message object) " << phi->result().operand() << std::endl; @@ -411,7 +419,7 @@ void StraightForwardRegisterAllocator::AllocateRegisters() { compiler::AllocatedOperand allocation = general_registers_.AllocateRegister(phi); phi->result().SetAllocated(allocation); - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->Process(phi, ProcessingState(block_it_)); printing_visitor_->os() << "phi (new reg) " << phi->result().operand() << std::endl; @@ -428,14 +436,14 @@ void StraightForwardRegisterAllocator::AllocateRegisters() { AllocateSpillSlot(phi); // TODO(verwaest): Will this be used at all? phi->result().SetAllocated(phi->spill_slot()); - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->Process(phi, ProcessingState(block_it_)); printing_visitor_->os() << "phi (stack) " << phi->result().operand() << std::endl; } } - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->os() << "live regs: "; PrintLiveRegs(); printing_visitor_->os() << std::endl; @@ -470,7 +478,7 @@ void StraightForwardRegisterAllocator::UpdateUse( if (!node->is_dead()) return; - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->os() << " freeing " << PrintNodeLabel(graph_labeller(), node) << "\n"; } @@ -498,7 +506,7 @@ void StraightForwardRegisterAllocator::UpdateUse( detail::DeepForEachInput( &deopt_info, [&](ValueNode* node, interpreter::Register reg, InputLocation* input) { - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->os() << "- using " << PrintNodeLabel(graph_labeller(), node) << "\n"; } @@ -513,26 +521,17 @@ void StraightForwardRegisterAllocator::UpdateUse( void StraightForwardRegisterAllocator::UpdateUse( const LazyDeoptInfo& deopt_info) { - const CompactInterpreterFrameState* checkpoint_state = - deopt_info.state.register_frame; - int index = 0; - // TODO(leszeks): This is missing parent recursion, fix it. - // See also: UpdateUse(EagerDeoptInfo&). - checkpoint_state->ForEachValue( - deopt_info.unit, [&](ValueNode* node, interpreter::Register reg) { - // Skip over the result location since it is irrelevant for lazy deopts - // (unoptimized code will recreate the result). - if (deopt_info.IsResultRegister(reg)) return; - if (FLAG_trace_maglev_regalloc) { + detail::DeepForEachInput( + &deopt_info, + [&](ValueNode* node, interpreter::Register reg, InputLocation* input) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->os() << "- using " << PrintNodeLabel(graph_labeller(), node) << "\n"; } - InputLocation* input = &deopt_info.input_locations[index++]; - // We might have dropped this node without spilling it. Spill it now. - if (!node->has_register() && !node->is_loadable()) { - Spill(node); - } - input->InjectLocation(node->allocation()); + // Lazy deopts always need spilling, and should always be loaded from + // their loadable slot. + Spill(node); + input->InjectLocation(node->loadable_slot()); UpdateUse(node, input); }); } @@ -555,7 +554,7 @@ void StraightForwardRegisterAllocator::AllocateNode(Node* node) { DCHECK(!node->Is<ConstantGapMove>()); current_node_ = node; - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->os() << "Allocating " << PrintNodeLabel(graph_labeller(), node) << " inputs...\n"; @@ -567,26 +566,26 @@ void StraightForwardRegisterAllocator::AllocateNode(Node* node) { // Allocate node output. if (node->Is<ValueNode>()) { - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->os() << "Allocating result...\n"; } AllocateNodeResult(node->Cast<ValueNode>()); } - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->os() << "Updating uses...\n"; } // Update uses only after allocating the node result. This order is necessary // to avoid emitting input-clobbering gap moves during node result allocation. if (node->properties().can_eager_deopt()) { - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->os() << "Using eager deopt nodes...\n"; } UpdateUse(*node->eager_deopt_info()); } for (Input& input : *node) { - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->os() << "Using input " << PrintNodeLabel(graph_labeller(), input.node()) << "...\n"; @@ -596,7 +595,7 @@ void StraightForwardRegisterAllocator::AllocateNode(Node* node) { // Lazy deopts are semantically after the node, so update them last. if (node->properties().can_lazy_deopt()) { - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->os() << "Using lazy deopt nodes...\n"; } UpdateUse(*node->lazy_deopt_info()); @@ -604,7 +603,7 @@ void StraightForwardRegisterAllocator::AllocateNode(Node* node) { if (node->properties().needs_register_snapshot()) SaveRegisterSnapshot(node); - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->Process(node, ProcessingState(block_it_)); printing_visitor_->os() << "live regs: "; PrintLiveRegs(); @@ -615,8 +614,10 @@ void StraightForwardRegisterAllocator::AllocateNode(Node* node) { // result, which could be written into a register that was previously // considered a temporary. DCHECK_EQ(general_registers_.free() | - (node->temporaries() - GetNodeResultRegister(node)), + (node->general_temporaries() - GetNodeResultRegister(node)), general_registers_.free()); + DCHECK_EQ(double_registers_.free() | node->double_temporaries(), + double_registers_.free()); general_registers_.clear_blocked(); double_registers_.clear_blocked(); VerifyRegisterState(); @@ -628,7 +629,8 @@ void StraightForwardRegisterAllocator::DropRegisterValueAtEnd(RegisterT reg) { list.unblock(reg); if (!list.free().has(reg)) { ValueNode* node = list.GetValue(reg); - // If the is not live after the current node, just remove its value. + // If the register is not live after the current node, just remove its + // value. if (node->live_range().end == current_node_->id()) { node->RemoveRegister(reg); } else { @@ -716,7 +718,7 @@ void StraightForwardRegisterAllocator::DropRegisterValue( ValueNode* node = registers.GetValue(reg); - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->os() << " dropping " << reg << " value " << PrintNodeLabel(graph_labeller(), node) << "\n"; } @@ -798,71 +800,6 @@ void StraightForwardRegisterAllocator::InitializeConditionalBranchTarget( target); } -#ifdef DEBUG -namespace { - -bool IsReachable(BasicBlock* source_block, BasicBlock* target_block, - std::set<BasicBlock*>& visited) { - if (source_block == target_block) return true; - if (!visited.insert(source_block).second) return false; - - ControlNode* control_node = source_block->control_node(); - if (UnconditionalControlNode* unconditional = - control_node->TryCast<UnconditionalControlNode>()) { - return IsReachable(unconditional->target(), target_block, visited); - } - if (BranchControlNode* branch = control_node->TryCast<BranchControlNode>()) { - return IsReachable(branch->if_true(), target_block, visited) || - IsReachable(branch->if_true(), target_block, visited); - } - if (Switch* switch_node = control_node->TryCast<Switch>()) { - const BasicBlockRef* targets = switch_node->targets(); - for (int i = 0; i < switch_node->size(); i++) { - if (IsReachable(source_block, targets[i].block_ptr(), visited)) { - return true; - } - } - if (switch_node->has_fallthrough()) { - if (IsReachable(source_block, switch_node->fallthrough(), visited)) { - return true; - } - } - return false; - } - return false; -} - -// Complex predicate for a JumpLoop lifetime extension DCHECK, see comments -// in AllocateControlNode. -bool IsValueFromGeneratorResumeThatDoesNotReachJumpLoop( - Graph* graph, ValueNode* input_node, BasicBlock* jump_loop_block) { - // The given node _must_ be created in the generator resume block. This is - // always the third block -- the first is inital values, the second is the - // test for an undefined generator, and the third is the generator resume - // machinery. - DCHECK_GE(graph->num_blocks(), 3); - BasicBlock* generator_block = *(graph->begin() + 2); - DCHECK_EQ(generator_block->control_node()->opcode(), Opcode::kSwitch); - - bool found_node = false; - for (Node* node : generator_block->nodes()) { - if (node == input_node) { - found_node = true; - break; - } - } - DCHECK(found_node); - - std::set<BasicBlock*> visited; - bool jump_loop_block_is_reachable_from_generator_block = - IsReachable(generator_block, jump_loop_block, visited); - DCHECK(!jump_loop_block_is_reachable_from_generator_block); - - return true; -} -} // namespace -#endif - void StraightForwardRegisterAllocator::AllocateControlNode(ControlNode* node, BasicBlock* block) { current_node_ = node; @@ -872,30 +809,36 @@ void StraightForwardRegisterAllocator::AllocateControlNode(ControlNode* node, if (node->Is<JumpToInlined>() || node->Is<Abort>()) { // Do nothing. - DCHECK(node->temporaries().is_empty()); - DCHECK_EQ(node->num_temporaries_needed(), 0); + DCHECK(node->general_temporaries().is_empty()); + DCHECK(node->double_temporaries().is_empty()); + DCHECK_EQ(node->num_temporaries_needed<Register>(), 0); + DCHECK_EQ(node->num_temporaries_needed<DoubleRegister>(), 0); DCHECK_EQ(node->input_count(), 0); DCHECK_EQ(node->properties(), OpProperties(0)); - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->Process(node, ProcessingState(block_it_)); } } else if (node->Is<Deopt>()) { - // No fixed temporaries. - DCHECK(node->temporaries().is_empty()); - DCHECK_EQ(node->num_temporaries_needed(), 0); + // No temporaries. + DCHECK(node->general_temporaries().is_empty()); + DCHECK(node->double_temporaries().is_empty()); + DCHECK_EQ(node->num_temporaries_needed<Register>(), 0); + DCHECK_EQ(node->num_temporaries_needed<DoubleRegister>(), 0); DCHECK_EQ(node->input_count(), 0); DCHECK_EQ(node->properties(), OpProperties::EagerDeopt()); UpdateUse(*node->eager_deopt_info()); - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->Process(node, ProcessingState(block_it_)); } } else if (auto unconditional = node->TryCast<UnconditionalControlNode>()) { - // No fixed temporaries. - DCHECK(node->temporaries().is_empty()); - DCHECK_EQ(node->num_temporaries_needed(), 0); + // No temporaries. + DCHECK(node->general_temporaries().is_empty()); + DCHECK(node->double_temporaries().is_empty()); + DCHECK_EQ(node->num_temporaries_needed<Register>(), 0); + DCHECK_EQ(node->num_temporaries_needed<DoubleRegister>(), 0); DCHECK_EQ(node->input_count(), 0); DCHECK(!node->properties().can_eager_deopt()); DCHECK(!node->properties().can_lazy_deopt()); @@ -915,19 +858,17 @@ void StraightForwardRegisterAllocator::AllocateControlNode(ControlNode* node, // extended lifetime nodes are dead. if (auto jump_loop = node->TryCast<JumpLoop>()) { for (Input& input : jump_loop->used_nodes()) { - // Since the value is used by the loop, it must be live somewhere ( - // either in a register or loadable). The exception is when this value - // is created in a generator resume, and the use of it cannot reach the - // JumpLoop (e.g. because it returns or deopts on resume). - DCHECK_IMPLIES( - !input.node()->has_register() && !input.node()->is_loadable(), - IsValueFromGeneratorResumeThatDoesNotReachJumpLoop( - graph_, input.node(), block)); + if (!input.node()->has_register() && !input.node()->is_loadable()) { + // If the value isn't loadable by the end of a loop (this can happen + // e.g. when a deferred throw doesn't spill it, and an exception + // handler drops the value) + Spill(input.node()); + } UpdateUse(&input); } } - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->Process(node, ProcessingState(block_it_)); } } else { @@ -943,14 +884,16 @@ void StraightForwardRegisterAllocator::AllocateControlNode(ControlNode* node, DCHECK(!node->properties().needs_register_snapshot()); - DCHECK_EQ(general_registers_.free() | node->temporaries(), + DCHECK_EQ(general_registers_.free() | node->general_temporaries(), general_registers_.free()); + DCHECK_EQ(double_registers_.free() | node->double_temporaries(), + double_registers_.free()); general_registers_.clear_blocked(); double_registers_.clear_blocked(); VerifyRegisterState(); - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->Process(node, ProcessingState(block_it_)); } @@ -984,7 +927,7 @@ void StraightForwardRegisterAllocator::TryAllocateToInput(Phi* phi) { if (general_registers_.unblocked_free().has(reg)) { phi->result().SetAllocated(ForceAllocate(reg, phi)); DCHECK_EQ(general_registers_.GetValue(reg), phi); - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->Process(phi, ProcessingState(block_it_)); printing_visitor_->os() << "phi (reuse) " << input.operand() << std::endl; @@ -1001,7 +944,7 @@ void StraightForwardRegisterAllocator::AddMoveBeforeCurrentNode( Node* gap_move; if (source.IsConstant()) { DCHECK(IsConstantNode(node->opcode())); - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->os() << " constant gap move: " << target << " ← " << PrintNodeLabel(graph_labeller(), node) << std::endl; @@ -1009,7 +952,7 @@ void StraightForwardRegisterAllocator::AddMoveBeforeCurrentNode( gap_move = Node::New<ConstantGapMove>(compilation_info_->zone(), {}, node, target); } else { - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->os() << " gap move: " << target << " ← " << PrintNodeLabel(graph_labeller(), node) << ":" << source << std::endl; @@ -1037,7 +980,7 @@ void StraightForwardRegisterAllocator::AddMoveBeforeCurrentNode( void StraightForwardRegisterAllocator::Spill(ValueNode* node) { if (node->is_loadable()) return; AllocateSpillSlot(node); - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->os() << " spill: " << node->spill_slot() << " ← " << PrintNodeLabel(graph_labeller(), node) << std::endl; @@ -1053,7 +996,7 @@ void StraightForwardRegisterAllocator::AssignFixedInput(Input& input) { switch (operand.extended_policy()) { case compiler::UnallocatedOperand::MUST_HAVE_REGISTER: // Allocated in AssignArbitraryRegisterInput. - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->os() << "- " << PrintNodeLabel(graph_labeller(), input.node()) << " has arbitrary register\n"; @@ -1062,7 +1005,7 @@ void StraightForwardRegisterAllocator::AssignFixedInput(Input& input) { case compiler::UnallocatedOperand::REGISTER_OR_SLOT_OR_CONSTANT: // Allocated in AssignAnyInput. - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->os() << "- " << PrintNodeLabel(graph_labeller(), input.node()) << " has arbitrary location\n"; @@ -1088,7 +1031,7 @@ void StraightForwardRegisterAllocator::AssignFixedInput(Input& input) { case compiler::UnallocatedOperand::MUST_HAVE_SLOT: UNREACHABLE(); } - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->os() << "- " << PrintNodeLabel(graph_labeller(), input.node()) << " in forced " << input.operand() << "\n"; @@ -1120,7 +1063,7 @@ void StraightForwardRegisterAllocator::AssignArbitraryRegisterInput( ValueNode* node = input.node(); compiler::InstructionOperand location = node->allocation(); - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->os() << "- " << PrintNodeLabel(graph_labeller(), input.node()) << " in " << location << "\n"; @@ -1152,7 +1095,7 @@ void StraightForwardRegisterAllocator::AssignAnyInput(Input& input) { compiler::InstructionOperand location = node->allocation(); input.InjectLocation(location); - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->os() << "- " << PrintNodeLabel(graph_labeller(), input.node()) << " in original " << location << "\n"; @@ -1291,7 +1234,7 @@ void StraightForwardRegisterAllocator::SpillAndClearRegisters( while (registers.used() != registers.empty()) { RegisterT reg = registers.used().first(); ValueNode* node = registers.GetValue(reg); - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->os() << " clearing registers with " << PrintNodeLabel(graph_labeller(), node) << "\n"; } @@ -1328,7 +1271,7 @@ void StraightForwardRegisterAllocator::AllocateSpillSlot(ValueNode* node) { // architectures. SpillSlots& slots = is_tagged ? tagged_ : untagged_; MachineRepresentation representation = node->GetMachineRepresentation(); - if (!FLAG_maglev_reuse_stack_slots || slots.free_slots.empty()) { + if (!v8_flags.maglev_reuse_stack_slots || slots.free_slots.empty()) { free_slot = slots.top++; } else { NodeIdT start = node->live_range().start; @@ -1352,7 +1295,7 @@ template <typename RegisterT> RegisterT StraightForwardRegisterAllocator::PickRegisterToFree( RegListBase<RegisterT> reserved) { RegisterFrameState<RegisterT>& registers = GetRegisterFrameState<RegisterT>(); - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->os() << " need to free a register... "; } int furthest_use = 0; @@ -1373,7 +1316,7 @@ RegisterT StraightForwardRegisterAllocator::PickRegisterToFree( best = reg; } } - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->os() << " chose " << best << " with next use " << furthest_use << "\n"; } @@ -1448,7 +1391,7 @@ template <typename RegisterT> compiler::AllocatedOperand StraightForwardRegisterAllocator::ForceAllocate( RegisterFrameState<RegisterT>& registers, RegisterT reg, ValueNode* node) { DCHECK(!registers.is_blocked(reg)); - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->os() << " forcing " << reg << " to " << PrintNodeLabel(graph_labeller(), node) << "...\n"; @@ -1533,57 +1476,81 @@ compiler::AllocatedOperand RegisterFrameState<RegisterT>::AllocateRegister( reg.code()); } -void StraightForwardRegisterAllocator::AssignFixedTemporaries(NodeBase* node) { - // TODO(victorgomes): Support double registers as temporaries. - RegList fixed_temporaries = node->temporaries(); +template <typename RegisterT> +void StraightForwardRegisterAllocator::AssignFixedTemporaries( + RegisterFrameState<RegisterT>& registers, NodeBase* node) { + RegListBase<RegisterT> fixed_temporaries = node->temporaries<RegisterT>(); // Make sure that any initially set temporaries are definitely free. - for (Register reg : fixed_temporaries) { - DCHECK(!general_registers_.is_blocked(reg)); - if (!general_registers_.free().has(reg)) { - DropRegisterValue(general_registers_, reg); - general_registers_.AddToFree(reg); + for (RegisterT reg : fixed_temporaries) { + DCHECK(!registers.is_blocked(reg)); + if (!registers.free().has(reg)) { + DropRegisterValue(registers, reg); + registers.AddToFree(reg); } - general_registers_.block(reg); + registers.block(reg); } - if (FLAG_trace_maglev_regalloc) { - printing_visitor_->os() - << "Fixed temporaries: " << fixed_temporaries << "\n"; + if (v8_flags.trace_maglev_regalloc && !fixed_temporaries.is_empty()) { + if constexpr (std::is_same_v<RegisterT, Register>) { + printing_visitor_->os() + << "Fixed Temporaries: " << fixed_temporaries << "\n"; + } else { + printing_visitor_->os() + << "Fixed Double Temporaries: " << fixed_temporaries << "\n"; + } } } +void StraightForwardRegisterAllocator::AssignFixedTemporaries(NodeBase* node) { + AssignFixedTemporaries(general_registers_, node); + AssignFixedTemporaries(double_registers_, node); +} + +template <typename RegisterT> void StraightForwardRegisterAllocator::AssignArbitraryTemporaries( - NodeBase* node) { - int num_temporaries_needed = node->num_temporaries_needed(); + RegisterFrameState<RegisterT>& registers, NodeBase* node) { + int num_temporaries_needed = node->num_temporaries_needed<RegisterT>(); if (num_temporaries_needed == 0) return; - RegList temporaries = node->temporaries(); + DCHECK_GT(num_temporaries_needed, 0); + RegListBase<RegisterT> temporaries = node->temporaries<RegisterT>(); + int remaining_temporaries_needed = num_temporaries_needed; - // TODO(victorgomes): Support double registers as temporaries. - for (Register reg : general_registers_.unblocked_free()) { - general_registers_.block(reg); + for (RegisterT reg : registers.unblocked_free()) { + registers.block(reg); DCHECK(!temporaries.has(reg)); temporaries.set(reg); - if (--num_temporaries_needed == 0) break; + if (--remaining_temporaries_needed == 0) break; } // Free extra registers if necessary. - for (int i = 0; i < num_temporaries_needed; ++i) { - DCHECK(general_registers_.UnblockedFreeIsEmpty()); - Register reg = FreeUnblockedRegister<Register>(); - general_registers_.block(reg); + for (int i = 0; i < remaining_temporaries_needed; ++i) { + DCHECK(registers.UnblockedFreeIsEmpty()); + RegisterT reg = FreeUnblockedRegister<RegisterT>(); + registers.block(reg); DCHECK(!temporaries.has(reg)); temporaries.set(reg); } - DCHECK_GE(temporaries.Count(), node->num_temporaries_needed()); + DCHECK_GE(temporaries.Count(), num_temporaries_needed); + node->assign_temporaries(temporaries); - if (FLAG_trace_maglev_regalloc) { - printing_visitor_->os() << "Temporaries: " << temporaries << "\n"; + if (v8_flags.trace_maglev_regalloc) { + if constexpr (std::is_same_v<RegisterT, Register>) { + printing_visitor_->os() << "Temporaries: " << temporaries << "\n"; + } else { + printing_visitor_->os() << "Double Temporaries: " << temporaries << "\n"; + } } } +void StraightForwardRegisterAllocator::AssignArbitraryTemporaries( + NodeBase* node) { + AssignArbitraryTemporaries(general_registers_, node); + AssignArbitraryTemporaries(double_registers_, node); +} + namespace { template <typename RegisterT> void ClearRegisterState(RegisterFrameState<RegisterT>& registers) { @@ -1711,7 +1678,7 @@ void StraightForwardRegisterAllocator::MergeRegisterValues(ControlNode* control, return InitializeBranchTargetRegisterValues(control, target); } - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->os() << "Merging registers...\n"; } @@ -1735,7 +1702,7 @@ void StraightForwardRegisterAllocator::MergeRegisterValues(ControlNode* control, if (!registers.free().has(reg)) { incoming = registers.GetValue(reg); if (!IsLiveAtTarget(incoming, control, target)) { - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->os() << " " << reg << " - incoming node " << PrintNodeLabel(graph_labeller(), incoming) << " dead at target\n"; @@ -1747,7 +1714,7 @@ void StraightForwardRegisterAllocator::MergeRegisterValues(ControlNode* control, if (incoming == node) { // We're using the same register as the target already has. If registers // are merged, add input information. - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { if (node) { printing_visitor_->os() << " " << reg << " - incoming node same as node: " @@ -1762,7 +1729,7 @@ void StraightForwardRegisterAllocator::MergeRegisterValues(ControlNode* control, // The register is already occupied with a different node. Figure out // where that node is allocated on the incoming branch. merge->operand(predecessor_id) = node->allocation(); - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->os() << " " << reg << " - merge: loading " << PrintNodeLabel(graph_labeller(), node) << " from " << node->allocation() << " \n"; @@ -1787,7 +1754,7 @@ void StraightForwardRegisterAllocator::MergeRegisterValues(ControlNode* control, // containing conversion nodes. // DCHECK_IMPLIES(!IsInRegister(target_state, incoming), // incoming->properties().is_conversion()); - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->os() << " " << reg << " - can't load incoming " << PrintNodeLabel(graph_labeller(), node) << ", bailing out\n"; @@ -1802,7 +1769,7 @@ void StraightForwardRegisterAllocator::MergeRegisterValues(ControlNode* control, // over the liveness of the node they are converting. // TODO(v8:7700): Overeager DCHECK. // DCHECK(node->properties().is_conversion()); - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->os() << " " << reg << " - can't load " << PrintNodeLabel(graph_labeller(), node) << ", dropping the merge\n"; @@ -1834,14 +1801,14 @@ void StraightForwardRegisterAllocator::MergeRegisterValues(ControlNode* control, // state. if (node == nullptr) { merge->operand(predecessor_id) = register_info; - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->os() << " " << reg << " - new merge: loading new " << PrintNodeLabel(graph_labeller(), incoming) << " from " << register_info << " \n"; } } else { merge->operand(predecessor_id) = node->allocation(); - if (FLAG_trace_maglev_regalloc) { + if (v8_flags.trace_maglev_regalloc) { printing_visitor_->os() << " " << reg << " - new merge: loading " << PrintNodeLabel(graph_labeller(), node) << " from " << node->allocation() << " \n"; diff --git a/deps/v8/src/maglev/maglev-regalloc.h b/deps/v8/src/maglev/maglev-regalloc.h index 081383dd82602f..4d6d4cf5210606 100644 --- a/deps/v8/src/maglev/maglev-regalloc.h +++ b/deps/v8/src/maglev/maglev-regalloc.h @@ -142,7 +142,13 @@ class StraightForwardRegisterAllocator { void AssignArbitraryRegisterInput(Input& input); void AssignAnyInput(Input& input); void AssignInputs(NodeBase* node); + template <typename RegisterT> + void AssignFixedTemporaries(RegisterFrameState<RegisterT>& registers, + NodeBase* node); void AssignFixedTemporaries(NodeBase* node); + template <typename RegisterT> + void AssignArbitraryTemporaries(RegisterFrameState<RegisterT>& registers, + NodeBase* node); void AssignArbitraryTemporaries(NodeBase* node); void TryAllocateToInput(Phi* phi); diff --git a/deps/v8/src/maglev/maglev.cc b/deps/v8/src/maglev/maglev.cc index f4e2275945af48..024175c8408808 100644 --- a/deps/v8/src/maglev/maglev.cc +++ b/deps/v8/src/maglev/maglev.cc @@ -13,12 +13,12 @@ namespace internal { MaybeHandle<CodeT> Maglev::Compile(Isolate* isolate, Handle<JSFunction> function) { - DCHECK(FLAG_maglev); + DCHECK(v8_flags.maglev); std::unique_ptr<maglev::MaglevCompilationInfo> info = maglev::MaglevCompilationInfo::New(isolate, function); maglev::MaglevCompiler::Compile(isolate->main_thread_local_isolate(), info.get()); - return maglev::MaglevCompiler::GenerateCode(info.get()); + return maglev::MaglevCompiler::GenerateCode(isolate, info.get()); } } // namespace internal diff --git a/deps/v8/src/maglev/maglev.h b/deps/v8/src/maglev/maglev.h index e55df23b156ed9..7207fdec5eabe7 100644 --- a/deps/v8/src/maglev/maglev.h +++ b/deps/v8/src/maglev/maglev.h @@ -17,6 +17,8 @@ class JSFunction; class Maglev : public AllStatic { public: + // TODO(v8:7700): This entry point is only used for testing. Consider + // removing it once BenchMaglev runtime functions are no longer useful. static MaybeHandle<CodeT> Compile(Isolate* isolate, Handle<JSFunction> function); }; diff --git a/deps/v8/src/numbers/conversions.cc b/deps/v8/src/numbers/conversions.cc index 902fb53bdcdc5b..76f072e4f948c0 100644 --- a/deps/v8/src/numbers/conversions.cc +++ b/deps/v8/src/numbers/conversions.cc @@ -1516,6 +1516,11 @@ bool IsSpecialIndex(String string) { } return true; } + +float DoubleToFloat32_NoInline(double x) { return DoubleToFloat32(x); } + +int32_t DoubleToInt32_NoInline(double x) { return DoubleToInt32(x); } + } // namespace internal } // namespace v8 diff --git a/deps/v8/src/numbers/conversions.h b/deps/v8/src/numbers/conversions.h index b8d2f63adf5542..cf4ac196909d13 100644 --- a/deps/v8/src/numbers/conversions.h +++ b/deps/v8/src/numbers/conversions.h @@ -61,12 +61,14 @@ inline double FastUI2D(unsigned x) { // This function should match the exact semantics of ECMA-262 20.2.2.17. inline float DoubleToFloat32(double x); +float DoubleToFloat32_NoInline(double x); // This function should match the exact semantics of ECMA-262 9.4. inline double DoubleToInteger(double x); // This function should match the exact semantics of ECMA-262 9.5. inline int32_t DoubleToInt32(double x); +int32_t DoubleToInt32_NoInline(double x); // This function should match the exact semantics of ECMA-262 9.6. inline uint32_t DoubleToUint32(double x); diff --git a/deps/v8/src/objects/all-objects-inl.h b/deps/v8/src/objects/all-objects-inl.h index 568f731f0cf656..b400f843395ec4 100644 --- a/deps/v8/src/objects/all-objects-inl.h +++ b/deps/v8/src/objects/all-objects-inl.h @@ -46,6 +46,7 @@ #include "src/objects/js-objects-inl.h" #include "src/objects/js-promise-inl.h" #include "src/objects/js-proxy-inl.h" +#include "src/objects/js-raw-json-inl.h" #include "src/objects/js-regexp-inl.h" #include "src/objects/js-regexp-string-iterator-inl.h" #include "src/objects/js-shadow-realm-inl.h" @@ -97,6 +98,7 @@ #include "src/objects/js-collator-inl.h" #include "src/objects/js-date-time-format-inl.h" #include "src/objects/js-display-names-inl.h" +#include "src/objects/js-duration-format-inl.h" #include "src/objects/js-list-format-inl.h" #include "src/objects/js-locale-inl.h" #include "src/objects/js-number-format-inl.h" diff --git a/deps/v8/src/objects/backing-store.cc b/deps/v8/src/objects/backing-store.cc index 7a9baa0f3662d6..1265accb248d78 100644 --- a/deps/v8/src/objects/backing-store.cc +++ b/deps/v8/src/objects/backing-store.cc @@ -135,6 +135,21 @@ void BackingStore::Clear() { type_specific_data_.v8_api_array_buffer_allocator = nullptr; } +void BackingStore::FreeResizableMemory() { + DCHECK(free_on_destruct_); + DCHECK(!custom_deleter_); + DCHECK(is_resizable_by_js_ || is_wasm_memory_); + auto region = + GetReservedRegion(has_guard_regions_, buffer_start_, byte_capacity_); + + PageAllocator* page_allocator = GetArrayBufferPageAllocator(); + if (!region.is_empty()) { + FreePages(page_allocator, reinterpret_cast<void*>(region.begin()), + region.size()); + } + Clear(); +} + BackingStore::BackingStore(void* buffer_start, size_t byte_length, size_t max_byte_length, size_t byte_capacity, SharedFlag shared, ResizableFlag resizable, @@ -147,7 +162,7 @@ BackingStore::BackingStore(void* buffer_start, size_t byte_length, byte_capacity_(byte_capacity), id_(next_backing_store_id_.fetch_add(1)), is_shared_(shared == SharedFlag::kShared), - is_resizable_(resizable == ResizableFlag::kResizable), + is_resizable_by_js_(resizable == ResizableFlag::kResizable), is_wasm_memory_(is_wasm_memory), holds_shared_ptr_to_allocator_(false), free_on_destruct_(free_on_destruct), @@ -156,10 +171,10 @@ BackingStore::BackingStore(void* buffer_start, size_t byte_length, custom_deleter_(custom_deleter), empty_deleter_(empty_deleter) { // TODO(v8:11111): RAB / GSAB - Wasm integration. - DCHECK_IMPLIES(is_wasm_memory_, !is_resizable_); - DCHECK_IMPLIES(is_resizable_, !custom_deleter_); - DCHECK_IMPLIES(is_resizable_, free_on_destruct_); - DCHECK_IMPLIES(!is_wasm_memory && !is_resizable_, + DCHECK_IMPLIES(is_wasm_memory_, !is_resizable_by_js_); + DCHECK_IMPLIES(is_resizable_by_js_, !custom_deleter_); + DCHECK_IMPLIES(is_resizable_by_js_, free_on_destruct_); + DCHECK_IMPLIES(!is_wasm_memory && !is_resizable_by_js_, byte_length_ == max_byte_length_); DCHECK_GE(max_byte_length_, byte_length_); DCHECK_GE(byte_capacity_, max_byte_length_); @@ -173,14 +188,10 @@ BackingStore::~BackingStore() { return; } - PageAllocator* page_allocator = GetArrayBufferPageAllocator(); - #if V8_ENABLE_WEBASSEMBLY if (is_wasm_memory_) { // TODO(v8:11111): RAB / GSAB - Wasm integration. - DCHECK(!is_resizable_); - DCHECK(free_on_destruct_); - DCHECK(!custom_deleter_); + DCHECK(!is_resizable_by_js_); size_t reservation_size = GetReservationSize(has_guard_regions_, byte_capacity_); TRACE_BS( @@ -192,31 +203,14 @@ BackingStore::~BackingStore() { delete shared_data; type_specific_data_.shared_wasm_memory_data = nullptr; } - // Wasm memories are always allocated through the page allocator. - auto region = - GetReservedRegion(has_guard_regions_, buffer_start_, byte_capacity_); - - if (!region.is_empty()) { - FreePages(page_allocator, reinterpret_cast<void*>(region.begin()), - region.size()); - } - Clear(); + FreeResizableMemory(); return; } #endif // V8_ENABLE_WEBASSEMBLY - if (is_resizable_) { - DCHECK(free_on_destruct_); - DCHECK(!custom_deleter_); - auto region = - GetReservedRegion(has_guard_regions_, buffer_start_, byte_capacity_); - - if (!region.is_empty()) { - FreePages(page_allocator, reinterpret_cast<void*>(region.begin()), - region.size()); - } - Clear(); + if (is_resizable_by_js_) { + FreeResizableMemory(); return; } if (custom_deleter_) { @@ -280,6 +274,14 @@ std::unique_ptr<BackingStore> BackingStore::Allocate( counters->array_buffer_new_size_failures()->AddSample(mb_length); return {}; } +#ifdef V8_ENABLE_SANDBOX + // Check to catch use of a non-sandbox-compatible ArrayBufferAllocator. + CHECK_WITH_MSG(GetProcessWideSandbox()->Contains(buffer_start), + "When the V8 Sandbox is enabled, ArrayBuffer backing stores " + "must be allocated inside the sandbox address space. Please " + "use an appropriate ArrayBuffer::Allocator to allocate " + "these buffers, or disable the sandbox."); +#endif } auto result = new BackingStore(buffer_start, // start diff --git a/deps/v8/src/objects/backing-store.h b/deps/v8/src/objects/backing-store.h index e91ef178bfbb0e..699e2c1f314857 100644 --- a/deps/v8/src/objects/backing-store.h +++ b/deps/v8/src/objects/backing-store.h @@ -95,7 +95,7 @@ class V8_EXPORT_PRIVATE BackingStore : public BackingStoreBase { size_t max_byte_length() const { return max_byte_length_; } size_t byte_capacity() const { return byte_capacity_; } bool is_shared() const { return is_shared_; } - bool is_resizable() const { return is_resizable_; } + bool is_resizable_by_js() const { return is_resizable_by_js_; } bool is_wasm_memory() const { return is_wasm_memory_; } bool has_guard_regions() const { return has_guard_regions_; } bool free_on_destruct() const { return free_on_destruct_; } @@ -112,7 +112,8 @@ class V8_EXPORT_PRIVATE BackingStore : public BackingStoreBase { bool CanReallocate() const { return !is_wasm_memory_ && !custom_deleter_ && !globally_registered_ && - free_on_destruct_ && !is_resizable_ && buffer_start_ != nullptr; + free_on_destruct_ && !is_resizable_by_js_ && + buffer_start_ != nullptr; } // Wrapper around ArrayBuffer::Allocator::Reallocate. @@ -223,7 +224,7 @@ class V8_EXPORT_PRIVATE BackingStore : public BackingStoreBase { bool is_shared_ : 1; // Backing stores for (Resizable|GrowableShared)ArrayBuffer - bool is_resizable_ : 1; + bool is_resizable_by_js_ : 1; bool is_wasm_memory_ : 1; bool holds_shared_ptr_to_allocator_ : 1; bool free_on_destruct_ : 1; @@ -236,6 +237,7 @@ class V8_EXPORT_PRIVATE BackingStore : public BackingStoreBase { v8::ArrayBuffer::Allocator* get_v8_api_array_buffer_allocator(); SharedWasmMemoryData* get_shared_wasm_memory_data(); + void FreeResizableMemory(); // Free the reserved region for resizable memory void Clear(); // Internally clears fields after deallocation. }; diff --git a/deps/v8/src/objects/bigint.cc b/deps/v8/src/objects/bigint.cc index 191603d08518ff..ffc4e2a6ee84ee 100644 --- a/deps/v8/src/objects/bigint.cc +++ b/deps/v8/src/objects/bigint.cc @@ -319,14 +319,19 @@ void MutableBigInt::Canonicalize(MutableBigInt result) { while (new_length > 0 && result.digit(new_length - 1) == 0) new_length--; int to_trim = old_length - new_length; if (to_trim != 0) { - int size_delta = to_trim * MutableBigInt::kDigitSize; - Address new_end = result.address() + BigInt::SizeFor(new_length); Heap* heap = result.GetHeap(); if (!heap->IsLargeObject(result)) { // We do not create a filler for objects in large object space. // TODO(hpayer): We should shrink the large object page if the size // of the object changed significantly. - heap->CreateFillerObjectAt(new_end, size_delta); + int old_size = ALIGN_TO_ALLOCATION_ALIGNMENT(BigInt::SizeFor(old_length)); + int new_size = ALIGN_TO_ALLOCATION_ALIGNMENT(BigInt::SizeFor(new_length)); + if (!V8_COMPRESS_POINTERS_8GB_BOOL || new_size < old_size) { + // A non-zero to_trim already guarantees that the sizes are different, + // but their aligned values can be equal. + Address new_end = result.address() + new_size; + heap->CreateFillerObjectAt(new_end, old_size - new_size); + } } result.set_length(new_length, kReleaseStore); @@ -996,8 +1001,10 @@ MaybeHandle<String> BigInt::ToString(Isolate* isolate, Handle<BigInt> bigint, // estimates). if (chars_written < chars_allocated) { result->set_length(chars_written, kReleaseStore); - int string_size = SeqOneByteString::SizeFor(chars_allocated); - int needed_size = SeqOneByteString::SizeFor(chars_written); + int string_size = ALIGN_TO_ALLOCATION_ALIGNMENT( + SeqOneByteString::SizeFor(chars_allocated)); + int needed_size = + ALIGN_TO_ALLOCATION_ALIGNMENT(SeqOneByteString::SizeFor(chars_written)); if (needed_size < string_size && !isolate->heap()->IsLargeObject(*result)) { Address new_end = result->address() + needed_size; isolate->heap()->CreateFillerObjectAt(new_end, @@ -1626,10 +1633,11 @@ void MutableBigInt_AbsoluteSubAndCanonicalize(Address result_addr, MutableBigInt::Canonicalize(result); } -// Returns true if it succeeded to obtain the result of multiplication. -// Returns false if the computation is interrupted. -bool MutableBigInt_AbsoluteMulAndCanonicalize(Address result_addr, - Address x_addr, Address y_addr) { +// Returns 0 if it succeeded to obtain the result of multiplication. +// Returns 1 if the computation is interrupted. +int32_t MutableBigInt_AbsoluteMulAndCanonicalize(Address result_addr, + Address x_addr, + Address y_addr) { BigInt x = BigInt::cast(Object(x_addr)); BigInt y = BigInt::cast(Object(y_addr)); MutableBigInt result = MutableBigInt::cast(Object(result_addr)); @@ -1643,15 +1651,16 @@ bool MutableBigInt_AbsoluteMulAndCanonicalize(Address result_addr, bigint::Status status = isolate->bigint_processor()->Multiply( GetRWDigits(result), GetDigits(x), GetDigits(y)); if (status == bigint::Status::kInterrupted) { - return false; + return 1; } MutableBigInt::Canonicalize(result); - return true; + return 0; } -bool MutableBigInt_AbsoluteDivAndCanonicalize(Address result_addr, - Address x_addr, Address y_addr) { +int32_t MutableBigInt_AbsoluteDivAndCanonicalize(Address result_addr, + Address x_addr, + Address y_addr) { BigInt x = BigInt::cast(Object(x_addr)); BigInt y = BigInt::cast(Object(y_addr)); MutableBigInt result = MutableBigInt::cast(Object(result_addr)); @@ -1667,11 +1676,11 @@ bool MutableBigInt_AbsoluteDivAndCanonicalize(Address result_addr, bigint::Status status = isolate->bigint_processor()->Divide( GetRWDigits(result), GetDigits(x), GetDigits(y)); if (status == bigint::Status::kInterrupted) { - return false; + return 1; } MutableBigInt::Canonicalize(result); - return true; + return 0; } void MutableBigInt_BitwiseAndPosPosAndCanonicalize(Address result_addr, diff --git a/deps/v8/src/objects/bigint.h b/deps/v8/src/objects/bigint.h index 04de48fbf95f95..5ebd2a423a51d6 100644 --- a/deps/v8/src/objects/bigint.h +++ b/deps/v8/src/objects/bigint.h @@ -26,10 +26,12 @@ void MutableBigInt_AbsoluteAddAndCanonicalize(Address result_addr, int32_t MutableBigInt_AbsoluteCompare(Address x_addr, Address y_addr); void MutableBigInt_AbsoluteSubAndCanonicalize(Address result_addr, Address x_addr, Address y_addr); -bool MutableBigInt_AbsoluteMulAndCanonicalize(Address result_addr, - Address x_addr, Address y_addr); -bool MutableBigInt_AbsoluteDivAndCanonicalize(Address result_addr, - Address x_addr, Address y_addr); +int32_t MutableBigInt_AbsoluteMulAndCanonicalize(Address result_addr, + Address x_addr, + Address y_addr); +int32_t MutableBigInt_AbsoluteDivAndCanonicalize(Address result_addr, + Address x_addr, + Address y_addr); void MutableBigInt_BitwiseAndPosPosAndCanonicalize(Address result_addr, Address x_addr, Address y_addr); diff --git a/deps/v8/src/objects/code-inl.h b/deps/v8/src/objects/code-inl.h index 2677c203beda56..1c631363d4aba8 100644 --- a/deps/v8/src/objects/code-inl.h +++ b/deps/v8/src/objects/code-inl.h @@ -787,7 +787,7 @@ bool CodeDataContainer::has_handler_table() const { int Code::constant_pool_size() const { const int size = code_comments_offset() - constant_pool_offset(); - if (!v8_flags.enable_embedded_constant_pool) { + if (!V8_EMBEDDED_CONSTANT_POOL_BOOL) { DCHECK_EQ(size, 0); return 0; } @@ -1232,7 +1232,7 @@ bool CodeDataContainer::is_wasm_code() const { #endif int Code::constant_pool_offset() const { - if (!v8_flags.enable_embedded_constant_pool) { + if (!V8_EMBEDDED_CONSTANT_POOL_BOOL) { // Redirection needed since the field doesn't exist in this case. return code_comments_offset(); } @@ -1240,7 +1240,7 @@ int Code::constant_pool_offset() const { } void Code::set_constant_pool_offset(int value) { - if (!v8_flags.enable_embedded_constant_pool) { + if (!V8_EMBEDDED_CONSTANT_POOL_BOOL) { // Redirection needed since the field doesn't exist in this case. return; } @@ -1436,15 +1436,21 @@ Object CodeDataContainer::raw_code() const { } Object CodeDataContainer::raw_code(PtrComprCageBase cage_base) const { - CHECK(V8_EXTERNAL_CODE_SPACE_BOOL); - Object value = TaggedField<Object, kCodeOffset>::load(cage_base, *this); +#ifdef V8_EXTERNAL_CODE_SPACE + Object value = ExternalCodeField::load(cage_base, *this); return value; +#else + UNREACHABLE(); +#endif // V8_EXTERNAL_CODE_SPACE } void CodeDataContainer::set_raw_code(Object value, WriteBarrierMode mode) { - CHECK(V8_EXTERNAL_CODE_SPACE_BOOL); - TaggedField<Object, kCodeOffset>::Release_Store(*this, value); +#ifdef V8_EXTERNAL_CODE_SPACE + ExternalCodeField::Release_Store(*this, value); CONDITIONAL_WRITE_BARRIER(*this, kCodeOffset, value, mode); +#else + UNREACHABLE(); +#endif // V8_EXTERNAL_CODE_SPACE } Object CodeDataContainer::raw_code(RelaxedLoadTag tag) const { @@ -1454,10 +1460,12 @@ Object CodeDataContainer::raw_code(RelaxedLoadTag tag) const { Object CodeDataContainer::raw_code(PtrComprCageBase cage_base, RelaxedLoadTag) const { - Object value = - TaggedField<Object, kCodeOffset>::Relaxed_Load(cage_base, *this); - CHECK(V8_EXTERNAL_CODE_SPACE_BOOL); +#ifdef V8_EXTERNAL_CODE_SPACE + Object value = ExternalCodeField::Relaxed_Load(cage_base, *this); return value; +#else + UNREACHABLE(); +#endif // V8_EXTERNAL_CODE_SPACE } ACCESSORS(CodeDataContainer, next_code_link, Object, kNextCodeLinkOffset) diff --git a/deps/v8/src/objects/code.cc b/deps/v8/src/objects/code.cc index 37092fd10b2cbc..46d9edb47127b2 100644 --- a/deps/v8/src/objects/code.cc +++ b/deps/v8/src/objects/code.cc @@ -215,10 +215,6 @@ void Code::RelocateFromDesc(ByteArray reloc_info, Heap* heap, it.rinfo()->set_target_address(p, UPDATE_WRITE_BARRIER, SKIP_ICACHE_FLUSH); DCHECK_EQ(p, it.rinfo()->target_address()); - } else if (RelocInfo::IsRuntimeEntry(mode)) { - Address p = it.rinfo()->target_runtime_entry(origin); - it.rinfo()->set_target_runtime_entry(p, UPDATE_WRITE_BARRIER, - SKIP_ICACHE_FLUSH); } else { intptr_t delta = raw_instruction_start() - reinterpret_cast<Address>(desc.buffer); @@ -369,12 +365,10 @@ bool Code::IsIsolateIndependent(Isolate* isolate) { RelocInfo::ModeMask(RelocInfo::RELATIVE_CODE_TARGET) | RelocInfo::ModeMask(RelocInfo::COMPRESSED_EMBEDDED_OBJECT) | RelocInfo::ModeMask(RelocInfo::FULL_EMBEDDED_OBJECT) | - RelocInfo::ModeMask(RelocInfo::DATA_EMBEDDED_OBJECT) | RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) | RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE) | RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE_ENCODED) | RelocInfo::ModeMask(RelocInfo::NEAR_BUILTIN_ENTRY) | - RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY) | RelocInfo::ModeMask(RelocInfo::WASM_CALL) | RelocInfo::ModeMask(RelocInfo::WASM_STUB_CALL))); diff --git a/deps/v8/src/objects/code.h b/deps/v8/src/objects/code.h index 83fbf06369ddf3..65720ab7c68162 100644 --- a/deps/v8/src/objects/code.h +++ b/deps/v8/src/objects/code.h @@ -266,6 +266,11 @@ class CodeDataContainer : public HeapObject { DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, CODE_DATA_FIELDS) #undef CODE_DATA_FIELDS +#ifdef V8_EXTERNAL_CODE_SPACE + using ExternalCodeField = + TaggedField<Object, kCodeOffset, ExternalCodeCompressionScheme>; +#endif + class BodyDescriptor; // Flags layout. @@ -728,8 +733,7 @@ class Code : public HeapObject { V(kOsrOffsetOffset, kInt32Size) \ /* Offsets describing inline metadata tables, relative to MetadataStart. */ \ V(kHandlerTableOffsetOffset, kIntSize) \ - V(kConstantPoolOffsetOffset, \ - v8_flags.enable_embedded_constant_pool.value() ? kIntSize : 0) \ + V(kConstantPoolOffsetOffset, V8_EMBEDDED_CONSTANT_POOL_BOOL ? kIntSize : 0) \ V(kCodeCommentsOffsetOffset, kIntSize) \ V(kUnwindingInfoOffsetOffset, kInt32Size) \ V(kUnalignedHeaderSize, 0) \ @@ -761,9 +765,8 @@ class Code : public HeapObject { static constexpr int kHeaderPaddingSize = 8; #elif V8_TARGET_ARCH_PPC64 static constexpr int kHeaderPaddingSize = - v8_flags.enable_embedded_constant_pool.value() - ? (COMPRESS_POINTERS_BOOL ? 4 : 48) - : (COMPRESS_POINTERS_BOOL ? 8 : 52); + V8_EMBEDDED_CONSTANT_POOL_BOOL ? (COMPRESS_POINTERS_BOOL ? 4 : 48) + : (COMPRESS_POINTERS_BOOL ? 8 : 52); #elif V8_TARGET_ARCH_S390X static constexpr int kHeaderPaddingSize = COMPRESS_POINTERS_BOOL ? 8 : 20; #elif V8_TARGET_ARCH_RISCV64 diff --git a/deps/v8/src/objects/compressed-slots-inl.h b/deps/v8/src/objects/compressed-slots-inl.h index 0f99810219e0f9..1f1f0530bee9bf 100644 --- a/deps/v8/src/objects/compressed-slots-inl.h +++ b/deps/v8/src/objects/compressed-slots-inl.h @@ -11,8 +11,7 @@ #include "src/objects/compressed-slots.h" #include "src/objects/maybe-object-inl.h" -namespace v8 { -namespace internal { +namespace v8::internal { // // CompressedObjectSlot implementation. @@ -36,16 +35,16 @@ bool CompressedObjectSlot::contains_map_value(Address raw_value) const { Object CompressedObjectSlot::operator*() const { Tagged_t value = *location(); - return Object(DecompressTaggedAny(address(), value)); + return Object(TCompressionScheme::DecompressTaggedAny(address(), value)); } Object CompressedObjectSlot::load(PtrComprCageBase cage_base) const { Tagged_t value = *location(); - return Object(DecompressTaggedAny(cage_base, value)); + return Object(TCompressionScheme::DecompressTaggedAny(cage_base, value)); } void CompressedObjectSlot::store(Object value) const { - *location() = CompressTagged(value.ptr()); + *location() = TCompressionScheme::CompressTagged(value.ptr()); } void CompressedObjectSlot::store_map(Map map) const { @@ -64,36 +63,36 @@ Map CompressedObjectSlot::load_map() const { Object CompressedObjectSlot::Acquire_Load() const { AtomicTagged_t value = AsAtomicTagged::Acquire_Load(location()); - return Object(DecompressTaggedAny(address(), value)); + return Object(TCompressionScheme::DecompressTaggedAny(address(), value)); } Object CompressedObjectSlot::Relaxed_Load() const { AtomicTagged_t value = AsAtomicTagged::Relaxed_Load(location()); - return Object(DecompressTaggedAny(address(), value)); + return Object(TCompressionScheme::DecompressTaggedAny(address(), value)); } Object CompressedObjectSlot::Relaxed_Load(PtrComprCageBase cage_base) const { AtomicTagged_t value = AsAtomicTagged::Relaxed_Load(location()); - return Object(DecompressTaggedAny(cage_base, value)); + return Object(TCompressionScheme::DecompressTaggedAny(cage_base, value)); } void CompressedObjectSlot::Relaxed_Store(Object value) const { - Tagged_t ptr = CompressTagged(value.ptr()); + Tagged_t ptr = TCompressionScheme::CompressTagged(value.ptr()); AsAtomicTagged::Relaxed_Store(location(), ptr); } void CompressedObjectSlot::Release_Store(Object value) const { - Tagged_t ptr = CompressTagged(value.ptr()); + Tagged_t ptr = TCompressionScheme::CompressTagged(value.ptr()); AsAtomicTagged::Release_Store(location(), ptr); } Object CompressedObjectSlot::Release_CompareAndSwap(Object old, Object target) const { - Tagged_t old_ptr = CompressTagged(old.ptr()); - Tagged_t target_ptr = CompressTagged(target.ptr()); + Tagged_t old_ptr = TCompressionScheme::CompressTagged(old.ptr()); + Tagged_t target_ptr = TCompressionScheme::CompressTagged(target.ptr()); Tagged_t result = AsAtomicTagged::Release_CompareAndSwap(location(), old_ptr, target_ptr); - return Object(DecompressTaggedAny(address(), result)); + return Object(TCompressionScheme::DecompressTaggedAny(address(), result)); } // @@ -102,38 +101,38 @@ Object CompressedObjectSlot::Release_CompareAndSwap(Object old, MaybeObject CompressedMaybeObjectSlot::operator*() const { Tagged_t value = *location(); - return MaybeObject(DecompressTaggedAny(address(), value)); + return MaybeObject(TCompressionScheme::DecompressTaggedAny(address(), value)); } MaybeObject CompressedMaybeObjectSlot::load(PtrComprCageBase cage_base) const { Tagged_t value = *location(); - return MaybeObject(DecompressTaggedAny(cage_base, value)); + return MaybeObject(TCompressionScheme::DecompressTaggedAny(cage_base, value)); } void CompressedMaybeObjectSlot::store(MaybeObject value) const { - *location() = CompressTagged(value.ptr()); + *location() = TCompressionScheme::CompressTagged(value.ptr()); } MaybeObject CompressedMaybeObjectSlot::Relaxed_Load() const { AtomicTagged_t value = AsAtomicTagged::Relaxed_Load(location()); - return MaybeObject(DecompressTaggedAny(address(), value)); + return MaybeObject(TCompressionScheme::DecompressTaggedAny(address(), value)); } MaybeObject CompressedMaybeObjectSlot::Relaxed_Load( PtrComprCageBase cage_base) const { AtomicTagged_t value = AsAtomicTagged::Relaxed_Load(location()); - return MaybeObject(DecompressTaggedAny(cage_base, value)); + return MaybeObject(TCompressionScheme::DecompressTaggedAny(cage_base, value)); } void CompressedMaybeObjectSlot::Relaxed_Store(MaybeObject value) const { - Tagged_t ptr = CompressTagged(value.ptr()); + Tagged_t ptr = TCompressionScheme::CompressTagged(value.ptr()); AsAtomicTagged::Relaxed_Store(location(), ptr); } void CompressedMaybeObjectSlot::Release_CompareAndSwap( MaybeObject old, MaybeObject target) const { - Tagged_t old_ptr = CompressTagged(old.ptr()); - Tagged_t target_ptr = CompressTagged(target.ptr()); + Tagged_t old_ptr = TCompressionScheme::CompressTagged(old.ptr()); + Tagged_t target_ptr = TCompressionScheme::CompressTagged(target.ptr()); AsAtomicTagged::Release_CompareAndSwap(location(), old_ptr, target_ptr); } @@ -143,73 +142,86 @@ void CompressedMaybeObjectSlot::Release_CompareAndSwap( HeapObjectReference CompressedHeapObjectSlot::operator*() const { Tagged_t value = *location(); - return HeapObjectReference(DecompressTaggedPointer(address(), value)); + return HeapObjectReference( + TCompressionScheme::DecompressTaggedPointer(address(), value)); } HeapObjectReference CompressedHeapObjectSlot::load( PtrComprCageBase cage_base) const { Tagged_t value = *location(); - return HeapObjectReference(DecompressTaggedPointer(cage_base, value)); + return HeapObjectReference( + TCompressionScheme::DecompressTaggedPointer(cage_base, value)); } void CompressedHeapObjectSlot::store(HeapObjectReference value) const { - *location() = CompressTagged(value.ptr()); + *location() = TCompressionScheme::CompressTagged(value.ptr()); } HeapObject CompressedHeapObjectSlot::ToHeapObject() const { Tagged_t value = *location(); DCHECK(HAS_STRONG_HEAP_OBJECT_TAG(value)); - return HeapObject::cast(Object(DecompressTaggedPointer(address(), value))); + return HeapObject::cast( + Object(TCompressionScheme::DecompressTaggedPointer(address(), value))); } void CompressedHeapObjectSlot::StoreHeapObject(HeapObject value) const { - *location() = CompressTagged(value.ptr()); + *location() = TCompressionScheme::CompressTagged(value.ptr()); } // // OffHeapCompressedObjectSlot implementation. // -Object OffHeapCompressedObjectSlot::load(PtrComprCageBase cage_base) const { - Tagged_t value = *location(); - return Object(DecompressTaggedAny(cage_base, value)); +template <typename CompressionScheme> +Object OffHeapCompressedObjectSlot<CompressionScheme>::load( + PtrComprCageBase cage_base) const { + Tagged_t value = *TSlotBase::location(); + return Object(CompressionScheme::DecompressTaggedAny(cage_base, value)); } -void OffHeapCompressedObjectSlot::store(Object value) const { - *location() = CompressTagged(value.ptr()); +template <typename CompressionScheme> +void OffHeapCompressedObjectSlot<CompressionScheme>::store(Object value) const { + *TSlotBase::location() = CompressionScheme::CompressTagged(value.ptr()); } -Object OffHeapCompressedObjectSlot::Relaxed_Load( +template <typename CompressionScheme> +Object OffHeapCompressedObjectSlot<CompressionScheme>::Relaxed_Load( PtrComprCageBase cage_base) const { - AtomicTagged_t value = AsAtomicTagged::Relaxed_Load(location()); - return Object(DecompressTaggedAny(cage_base, value)); + AtomicTagged_t value = AsAtomicTagged::Relaxed_Load(TSlotBase::location()); + return Object(CompressionScheme::DecompressTaggedAny(cage_base, value)); } -Object OffHeapCompressedObjectSlot::Acquire_Load( +template <typename CompressionScheme> +Object OffHeapCompressedObjectSlot<CompressionScheme>::Acquire_Load( PtrComprCageBase cage_base) const { - AtomicTagged_t value = AsAtomicTagged::Acquire_Load(location()); - return Object(DecompressTaggedAny(cage_base, value)); + AtomicTagged_t value = AsAtomicTagged::Acquire_Load(TSlotBase::location()); + return Object(CompressionScheme::DecompressTaggedAny(cage_base, value)); } -void OffHeapCompressedObjectSlot::Relaxed_Store(Object value) const { - Tagged_t ptr = CompressTagged(value.ptr()); - AsAtomicTagged::Relaxed_Store(location(), ptr); +template <typename CompressionScheme> +void OffHeapCompressedObjectSlot<CompressionScheme>::Relaxed_Store( + Object value) const { + Tagged_t ptr = CompressionScheme::CompressTagged(value.ptr()); + AsAtomicTagged::Relaxed_Store(TSlotBase::location(), ptr); } -void OffHeapCompressedObjectSlot::Release_Store(Object value) const { - Tagged_t ptr = CompressTagged(value.ptr()); - AsAtomicTagged::Release_Store(location(), ptr); +template <typename CompressionScheme> +void OffHeapCompressedObjectSlot<CompressionScheme>::Release_Store( + Object value) const { + Tagged_t ptr = CompressionScheme::CompressTagged(value.ptr()); + AsAtomicTagged::Release_Store(TSlotBase::location(), ptr); } -void OffHeapCompressedObjectSlot::Release_CompareAndSwap(Object old, - Object target) const { - Tagged_t old_ptr = CompressTagged(old.ptr()); - Tagged_t target_ptr = CompressTagged(target.ptr()); - AsAtomicTagged::Release_CompareAndSwap(location(), old_ptr, target_ptr); +template <typename CompressionScheme> +void OffHeapCompressedObjectSlot<CompressionScheme>::Release_CompareAndSwap( + Object old, Object target) const { + Tagged_t old_ptr = CompressionScheme::CompressTagged(old.ptr()); + Tagged_t target_ptr = CompressionScheme::CompressTagged(target.ptr()); + AsAtomicTagged::Release_CompareAndSwap(TSlotBase::location(), old_ptr, + target_ptr); } -} // namespace internal -} // namespace v8 +} // namespace v8::internal #endif // V8_COMPRESS_POINTERS diff --git a/deps/v8/src/objects/compressed-slots.h b/deps/v8/src/objects/compressed-slots.h index c31856d0a571c6..fd6d9acf484633 100644 --- a/deps/v8/src/objects/compressed-slots.h +++ b/deps/v8/src/objects/compressed-slots.h @@ -8,16 +8,19 @@ #include "include/v8config.h" #include "src/objects/slots.h" -namespace v8 { -namespace internal { +namespace v8::internal { #ifdef V8_COMPRESS_POINTERS + +class V8HeapCompressionScheme; + // A CompressedObjectSlot instance describes a kTaggedSize-sized field ("slot") // holding a compressed tagged pointer (smi or heap object). // Its address() is the address of the slot. // The slot's contents can be read and written using operator* and store(). class CompressedObjectSlot : public SlotBase<CompressedObjectSlot, Tagged_t> { public: + using TCompressionScheme = V8HeapCompressionScheme; using TObject = Object; using THeapObjectSlot = CompressedHeapObjectSlot; @@ -64,6 +67,7 @@ class CompressedObjectSlot : public SlotBase<CompressedObjectSlot, Tagged_t> { class CompressedMaybeObjectSlot : public SlotBase<CompressedMaybeObjectSlot, Tagged_t> { public: + using TCompressionScheme = V8HeapCompressionScheme; using TObject = MaybeObject; using THeapObjectSlot = CompressedHeapObjectSlot; @@ -100,6 +104,8 @@ class CompressedMaybeObjectSlot class CompressedHeapObjectSlot : public SlotBase<CompressedHeapObjectSlot, Tagged_t> { public: + using TCompressionScheme = V8HeapCompressionScheme; + CompressedHeapObjectSlot() : SlotBase(kNullAddress) {} explicit CompressedHeapObjectSlot(Address ptr) : SlotBase(ptr) {} explicit CompressedHeapObjectSlot(Object* ptr) @@ -123,18 +129,23 @@ class CompressedHeapObjectSlot // and so does not provide an operator* with implicit Isolate* calculation. // Its address() is the address of the slot. // The slot's contents can be read and written using load() and store(). +template <typename CompressionScheme> class OffHeapCompressedObjectSlot - : public SlotBase<OffHeapCompressedObjectSlot, Tagged_t> { + : public SlotBase<OffHeapCompressedObjectSlot<CompressionScheme>, + Tagged_t> { public: + using TSlotBase = + SlotBase<OffHeapCompressedObjectSlot<CompressionScheme>, Tagged_t>; + using TCompressionScheme = CompressionScheme; using TObject = Object; - using THeapObjectSlot = OffHeapCompressedObjectSlot; + using THeapObjectSlot = OffHeapCompressedObjectSlot<CompressionScheme>; static constexpr bool kCanBeWeak = false; - OffHeapCompressedObjectSlot() : SlotBase(kNullAddress) {} - explicit OffHeapCompressedObjectSlot(Address ptr) : SlotBase(ptr) {} + OffHeapCompressedObjectSlot() : TSlotBase(kNullAddress) {} + explicit OffHeapCompressedObjectSlot(Address ptr) : TSlotBase(ptr) {} explicit OffHeapCompressedObjectSlot(const uint32_t* ptr) - : SlotBase(reinterpret_cast<Address>(ptr)) {} + : TSlotBase(reinterpret_cast<Address>(ptr)) {} inline Object load(PtrComprCageBase cage_base) const; inline void store(Object value) const; @@ -148,7 +159,6 @@ class OffHeapCompressedObjectSlot #endif // V8_COMPRESS_POINTERS -} // namespace internal -} // namespace v8 +} // namespace v8::internal #endif // V8_OBJECTS_COMPRESSED_SLOTS_H_ diff --git a/deps/v8/src/objects/contexts.cc b/deps/v8/src/objects/contexts.cc index 62c747d55a992e..e35e7ee7e40d7d 100644 --- a/deps/v8/src/objects/contexts.cc +++ b/deps/v8/src/objects/contexts.cc @@ -203,6 +203,7 @@ Handle<Object> Context::Lookup(Handle<Context> context, Handle<String> name, Isolate* isolate = context->GetIsolate(); bool follow_context_chain = (flags & FOLLOW_CONTEXT_CHAIN) != 0; + bool has_seen_debug_evaluate_context = false; *index = kNotFound; *attributes = ABSENT; *init_flag = kCreatedInitialized; @@ -223,6 +224,7 @@ Handle<Object> Context::Lookup(Handle<Context> context, Handle<String> name, reinterpret_cast<void*>(context->ptr())); if (context->IsScriptContext()) PrintF(" (script context)"); if (context->IsNativeContext()) PrintF(" (native context)"); + if (context->IsDebugEvaluateContext()) PrintF(" (debug context)"); PrintF("\n"); } @@ -381,6 +383,8 @@ Handle<Object> Context::Lookup(Handle<Context> context, Handle<String> name, } } } else if (context->IsDebugEvaluateContext()) { + has_seen_debug_evaluate_context = true; + // Check materialized locals. Object ext = context->get(EXTENSION_INDEX); if (ext.IsJSReceiver()) { @@ -395,6 +399,8 @@ Handle<Object> Context::Lookup(Handle<Context> context, Handle<String> name, // Check blocklist. Names that are listed, cannot be resolved further. ScopeInfo scope_info = context->scope_info(); + CHECK_IMPLIES(v8_flags.experimental_reuse_locals_blocklists, + !scope_info.HasLocalsBlockList()); if (scope_info.HasLocalsBlockList() && scope_info.LocalsBlockList().Has(isolate, name)) { if (v8_flags.trace_contexts) { @@ -417,6 +423,27 @@ Handle<Object> Context::Lookup(Handle<Context> context, Handle<String> name, // 3. Prepare to continue with the previous (next outermost) context. if (context->IsNativeContext()) break; + // In case we saw any DebugEvaluateContext, we'll need to check the block + // list before we can advance to properly "shadow" stack-allocated + // variables. + // Note that this implicitly skips the block list check for the + // "wrapped" context lookup for DebugEvaluateContexts. In that case + // `has_seen_debug_evaluate_context` will always be false. + if (v8_flags.experimental_reuse_locals_blocklists && + has_seen_debug_evaluate_context && + isolate->heap()->locals_block_list_cache().IsEphemeronHashTable()) { + Handle<ScopeInfo> scope_info = handle(context->scope_info(), isolate); + Object maybe_outer_block_list = + isolate->LocalsBlockListCacheGet(scope_info); + if (maybe_outer_block_list.IsStringSet() && + StringSet::cast(maybe_outer_block_list).Has(isolate, name)) { + if (v8_flags.trace_contexts) { + PrintF(" - name is blocklisted. Aborting.\n"); + } + break; + } + } + context = Handle<Context>(context->previous(), isolate); } while (follow_context_chain); @@ -426,6 +453,10 @@ Handle<Object> Context::Lookup(Handle<Context> context, Handle<String> name, return Handle<Object>::null(); } +bool NativeContext::HasTemplateLiteralObject(JSArray array) { + return array.map() == js_array_template_literal_object_map(); +} + void NativeContext::AddOptimizedCode(CodeT code) { DCHECK(CodeKindCanDeoptimize(code.kind())); DCHECK(code.next_code_link().IsUndefined()); diff --git a/deps/v8/src/objects/contexts.h b/deps/v8/src/objects/contexts.h index 5183f9063f95c5..7e1e715038ff6c 100644 --- a/deps/v8/src/objects/contexts.h +++ b/deps/v8/src/objects/contexts.h @@ -157,6 +157,8 @@ enum ContextLookupFlags { intl_date_time_format_function) \ V(INTL_DISPLAY_NAMES_FUNCTION_INDEX, JSFunction, \ intl_display_names_function) \ + V(INTL_DURATION_FORMAT_FUNCTION_INDEX, JSFunction, \ + intl_duration_format_function) \ V(INTL_NUMBER_FORMAT_FUNCTION_INDEX, JSFunction, \ intl_number_format_function) \ V(INTL_LOCALE_FUNCTION_INDEX, JSFunction, intl_locale_function) \ @@ -178,11 +180,14 @@ enum ContextLookupFlags { js_array_packed_double_elements_map) \ V(JS_ARRAY_HOLEY_DOUBLE_ELEMENTS_MAP_INDEX, Map, \ js_array_holey_double_elements_map) \ + V(JS_ARRAY_TEMPLATE_LITERAL_OBJECT_MAP, Map, \ + js_array_template_literal_object_map) \ V(JS_ATOMICS_CONDITION_MAP, Map, js_atomics_condition_map) \ V(JS_ATOMICS_MUTEX_MAP, Map, js_atomics_mutex_map) \ V(JS_MAP_FUN_INDEX, JSFunction, js_map_fun) \ V(JS_MAP_MAP_INDEX, Map, js_map_map) \ V(JS_MODULE_NAMESPACE_MAP, Map, js_module_namespace_map) \ + V(JS_RAW_JSON_MAP, Map, js_raw_json_map) \ V(JS_SET_FUN_INDEX, JSFunction, js_set_fun) \ V(JS_SET_MAP_INDEX, Map, js_set_map) \ V(JS_WEAK_MAP_FUN_INDEX, JSFunction, js_weak_map_fun) \ @@ -209,6 +214,7 @@ enum ContextLookupFlags { temporal_time_zone_function) \ V(JS_TEMPORAL_ZONED_DATE_TIME_FUNCTION_INDEX, JSFunction, \ temporal_zoned_date_time_function) \ + V(JSON_OBJECT, JSObject, json_object) \ V(TEMPORAL_INSTANT_FIXED_ARRAY_FROM_ITERABLE_FUNCTION_INDEX, JSFunction, \ temporal_instant_fixed_array_from_iterable) \ V(STRING_FIXED_ARRAY_FROM_ITERABLE_FUNCTION_INDEX, JSFunction, \ @@ -746,6 +752,8 @@ class NativeContext : public Context { inline Map TypedArrayElementsKindToRabGsabCtorMap( ElementsKind element_kind) const; + bool HasTemplateLiteralObject(JSArray array); + // Dispatched behavior. DECL_PRINTER(NativeContext) DECL_VERIFIER(NativeContext) diff --git a/deps/v8/src/objects/dictionary.h b/deps/v8/src/objects/dictionary.h index d0e6648e769aff..3744faa6396e42 100644 --- a/deps/v8/src/objects/dictionary.h +++ b/deps/v8/src/objects/dictionary.h @@ -172,6 +172,10 @@ class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) BaseNameDictionary Handle<Object> value, PropertyDetails details, InternalIndex* entry_out = nullptr); + // Exposed for NameDictionaryLookupForwardedString slow path for forwarded + // strings. + using Dictionary<Derived, Shape>::FindInsertionEntry; + OBJECT_CONSTRUCTORS(BaseNameDictionary, Dictionary<Derived, Shape>); }; diff --git a/deps/v8/src/objects/elements.cc b/deps/v8/src/objects/elements.cc index 16167bc19ff2b1..5c0e5e5ecf01e2 100644 --- a/deps/v8/src/objects/elements.cc +++ b/deps/v8/src/objects/elements.cc @@ -479,8 +479,8 @@ void SortIndices(Isolate* isolate, Handle<FixedArray> indices, AtomicSlot end(start + sort_size); std::sort(start, end, [isolate](Tagged_t elementA, Tagged_t elementB) { #ifdef V8_COMPRESS_POINTERS - Object a(DecompressTaggedAny(isolate, elementA)); - Object b(DecompressTaggedAny(isolate, elementB)); + Object a(V8HeapCompressionScheme::DecompressTaggedAny(isolate, elementA)); + Object b(V8HeapCompressionScheme::DecompressTaggedAny(isolate, elementB)); #else Object a(elementA); Object b(elementB); diff --git a/deps/v8/src/objects/feedback-vector-inl.h b/deps/v8/src/objects/feedback-vector-inl.h index d63260be7f0bc1..a8bbc3834130ed 100644 --- a/deps/v8/src/objects/feedback-vector-inl.h +++ b/deps/v8/src/objects/feedback-vector-inl.h @@ -70,7 +70,6 @@ int FeedbackMetadata::GetSlotSize(FeedbackSlotKind kind) { case FeedbackSlotKind::kCompareOp: case FeedbackSlotKind::kBinaryOp: case FeedbackSlotKind::kLiteral: - case FeedbackSlotKind::kTypeProfile: case FeedbackSlotKind::kJumpLoop: return 1; @@ -321,6 +320,8 @@ BinaryOperationHint BinaryOperationHintFromFeedback(int type_feedback) { return BinaryOperationHint::kString; case BinaryOperationFeedback::kBigInt: return BinaryOperationHint::kBigInt; + case BinaryOperationFeedback::kBigInt64: + return BinaryOperationHint::kBigInt64; default: return BinaryOperationHint::kAny; } diff --git a/deps/v8/src/objects/feedback-vector.cc b/deps/v8/src/objects/feedback-vector.cc index 4e5c3d39e1df5e..1f8b1d333c8cbc 100644 --- a/deps/v8/src/objects/feedback-vector.cc +++ b/deps/v8/src/objects/feedback-vector.cc @@ -31,20 +31,6 @@ FeedbackSlot FeedbackVectorSpec::AddSlot(FeedbackSlotKind kind) { return FeedbackSlot(slot); } -FeedbackSlot FeedbackVectorSpec::AddTypeProfileSlot() { - FeedbackSlot slot = AddSlot(FeedbackSlotKind::kTypeProfile); - CHECK_EQ(FeedbackVectorSpec::kTypeProfileSlotIndex, - FeedbackVector::GetIndex(slot)); - return slot; -} - -bool FeedbackVectorSpec::HasTypeProfileSlot() const { - FeedbackSlot slot = - FeedbackVector::ToSlot(FeedbackVectorSpec::kTypeProfileSlotIndex); - if (slot_count() <= slot.ToInt()) return false; - return GetKind(slot) == FeedbackSlotKind::kTypeProfile; -} - static bool IsPropertyNameFeedback(MaybeObject feedback) { HeapObject heap_object; if (!feedback->GetHeapObjectIfStrong(&heap_object)) return false; @@ -184,8 +170,6 @@ const char* FeedbackMetadata::Kind2String(FeedbackSlotKind kind) { return "DefineKeyedOwnPropertyInLiteral"; case FeedbackSlotKind::kLiteral: return "Literal"; - case FeedbackSlotKind::kTypeProfile: - return "TypeProfile"; case FeedbackSlotKind::kForIn: return "ForIn"; case FeedbackSlotKind::kInstanceOf: @@ -197,13 +181,6 @@ const char* FeedbackMetadata::Kind2String(FeedbackSlotKind kind) { } } -bool FeedbackMetadata::HasTypeProfileSlot() const { - FeedbackSlot slot = - FeedbackVector::ToSlot(FeedbackVectorSpec::kTypeProfileSlotIndex); - return slot.ToInt() < slot_count() && - GetKind(slot) == FeedbackSlotKind::kTypeProfile; -} - FeedbackSlotKind FeedbackVector::GetKind(FeedbackSlot slot) const { DCHECK(!is_empty()); return metadata().GetKind(slot); @@ -215,14 +192,6 @@ FeedbackSlotKind FeedbackVector::GetKind(FeedbackSlot slot, return metadata(tag).GetKind(slot); } -FeedbackSlot FeedbackVector::GetTypeProfileSlot() const { - DCHECK(metadata().HasTypeProfileSlot()); - FeedbackSlot slot = - FeedbackVector::ToSlot(FeedbackVectorSpec::kTypeProfileSlotIndex); - DCHECK_EQ(FeedbackSlotKind::kTypeProfile, GetKind(slot)); - return slot; -} - // static Handle<ClosureFeedbackCellArray> ClosureFeedbackCellArray::New( Isolate* isolate, Handle<SharedFunctionInfo> shared) { @@ -310,7 +279,6 @@ Handle<FeedbackVector> FeedbackVector::New( case FeedbackSlotKind::kSetKeyedStrict: case FeedbackSlotKind::kStoreInArrayLiteral: case FeedbackSlotKind::kDefineKeyedOwnPropertyInLiteral: - case FeedbackSlotKind::kTypeProfile: case FeedbackSlotKind::kInstanceOf: vector->Set(slot, *uninitialized_sentinel, SKIP_WRITE_BARRIER); break; @@ -325,8 +293,7 @@ Handle<FeedbackVector> FeedbackVector::New( } Handle<FeedbackVector> result = Handle<FeedbackVector>::cast(vector); - if (!isolate->is_best_effort_code_coverage() || - isolate->is_collecting_type_profile()) { + if (!isolate->is_best_effort_code_coverage()) { AddToVectorsForProfilingTools(isolate, result); } return result; @@ -372,8 +339,7 @@ Handle<FeedbackVector> FeedbackVector::NewWithOneCompareSlotForTesting( // static void FeedbackVector::AddToVectorsForProfilingTools( Isolate* isolate, Handle<FeedbackVector> vector) { - DCHECK(!isolate->is_best_effort_code_coverage() || - isolate->is_collecting_type_profile()); + DCHECK(!isolate->is_best_effort_code_coverage()); if (!vector->shared_function_info().IsSubjectToDebugging()) return; Handle<ArrayList> list = Handle<ArrayList>::cast( isolate->factory()->feedback_vectors_for_profiling_tools()); @@ -613,16 +579,6 @@ bool FeedbackNexus::Clear(ClearBehavior behavior) { bool feedback_updated = false; switch (kind()) { - case FeedbackSlotKind::kTypeProfile: - if (V8_LIKELY(behavior == ClearBehavior::kDefault)) { - // We don't clear these kinds ever. - } else if (!IsCleared()) { - DCHECK_EQ(behavior, ClearBehavior::kClearAll); - SetFeedback(UninitializedSentinel(), SKIP_WRITE_BARRIER); - feedback_updated = true; - } - break; - case FeedbackSlotKind::kCompareOp: case FeedbackSlotKind::kForIn: case FeedbackSlotKind::kBinaryOp: @@ -849,12 +805,6 @@ InlineCacheState FeedbackNexus::ic_state() const { return InlineCacheState::MEGAMORPHIC; } - case FeedbackSlotKind::kTypeProfile: { - if (feedback == UninitializedSentinel()) { - return InlineCacheState::UNINITIALIZED; - } - return InlineCacheState::MONOMORPHIC; - } case FeedbackSlotKind::kCloneObject: { if (feedback == UninitializedSentinel()) { @@ -1355,120 +1305,6 @@ MaybeHandle<JSObject> FeedbackNexus::GetConstructorFeedback() const { return MaybeHandle<JSObject>(); } -namespace { - -bool InList(Handle<ArrayList> types, Handle<String> type) { - for (int i = 0; i < types->Length(); i++) { - Object obj = types->Get(i); - if (String::cast(obj).Equals(*type)) { - return true; - } - } - return false; -} -} // anonymous namespace - -void FeedbackNexus::Collect(Handle<String> type, int position) { - DCHECK(IsTypeProfileKind(kind())); - DCHECK_GE(position, 0); - DCHECK(config()->can_write()); - Isolate* isolate = GetIsolate(); - - MaybeObject const feedback = GetFeedback(); - - // Map source position to collection of types - Handle<SimpleNumberDictionary> types; - - if (feedback == UninitializedSentinel()) { - types = SimpleNumberDictionary::New(isolate, 1); - } else { - types = handle( - SimpleNumberDictionary::cast(feedback->GetHeapObjectAssumeStrong()), - isolate); - } - - Handle<ArrayList> position_specific_types; - - InternalIndex entry = types->FindEntry(isolate, position); - if (entry.is_not_found()) { - position_specific_types = ArrayList::New(isolate, 1); - types = SimpleNumberDictionary::Set( - isolate, types, position, - ArrayList::Add(isolate, position_specific_types, type)); - } else { - DCHECK(types->ValueAt(entry).IsArrayList()); - position_specific_types = - handle(ArrayList::cast(types->ValueAt(entry)), isolate); - if (!InList(position_specific_types, type)) { // Add type - types = SimpleNumberDictionary::Set( - isolate, types, position, - ArrayList::Add(isolate, position_specific_types, type)); - } - } - SetFeedback(*types); -} - -std::vector<int> FeedbackNexus::GetSourcePositions() const { - DCHECK(IsTypeProfileKind(kind())); - std::vector<int> source_positions; - Isolate* isolate = GetIsolate(); - - MaybeObject const feedback = GetFeedback(); - - if (feedback == UninitializedSentinel()) { - return source_positions; - } - - Handle<SimpleNumberDictionary> types( - SimpleNumberDictionary::cast(feedback->GetHeapObjectAssumeStrong()), - isolate); - - for (int index = SimpleNumberDictionary::kElementsStartIndex; - index < types->length(); index += SimpleNumberDictionary::kEntrySize) { - int key_index = index + SimpleNumberDictionary::kEntryKeyIndex; - Object key = types->get(key_index); - if (key.IsSmi()) { - int position = Smi::cast(key).value(); - source_positions.push_back(position); - } - } - return source_positions; -} - -std::vector<Handle<String>> FeedbackNexus::GetTypesForSourcePositions( - uint32_t position) const { - DCHECK(IsTypeProfileKind(kind())); - Isolate* isolate = GetIsolate(); - - MaybeObject const feedback = GetFeedback(); - std::vector<Handle<String>> types_for_position; - if (feedback == UninitializedSentinel()) { - return types_for_position; - } - - Handle<SimpleNumberDictionary> types( - SimpleNumberDictionary::cast(feedback->GetHeapObjectAssumeStrong()), - isolate); - - InternalIndex entry = types->FindEntry(isolate, position); - if (entry.is_not_found()) return types_for_position; - - DCHECK(types->ValueAt(entry).IsArrayList()); - Handle<ArrayList> position_specific_types = - Handle<ArrayList>(ArrayList::cast(types->ValueAt(entry)), isolate); - for (int i = 0; i < position_specific_types->Length(); i++) { - Object t = position_specific_types->Get(i); - types_for_position.push_back(Handle<String>(String::cast(t), isolate)); - } - - return types_for_position; -} - -void FeedbackNexus::ResetTypeProfile() { - DCHECK(IsTypeProfileKind(kind())); - SetFeedback(UninitializedSentinel()); -} - FeedbackIterator::FeedbackIterator(const FeedbackNexus* nexus) : done_(false), index_(-1), state_(kOther) { DCHECK( diff --git a/deps/v8/src/objects/feedback-vector.h b/deps/v8/src/objects/feedback-vector.h index c410ed1e660b72..987505a99cfb5e 100644 --- a/deps/v8/src/objects/feedback-vector.h +++ b/deps/v8/src/objects/feedback-vector.h @@ -62,7 +62,6 @@ enum class FeedbackSlotKind : uint8_t { kBinaryOp, kCompareOp, kDefineKeyedOwnPropertyInLiteral, - kTypeProfile, kLiteral, kForIn, kInstanceOf, @@ -134,10 +133,6 @@ inline bool IsGlobalICKind(FeedbackSlotKind kind) { return IsLoadGlobalICKind(kind) || IsStoreGlobalICKind(kind); } -inline bool IsTypeProfileKind(FeedbackSlotKind kind) { - return kind == FeedbackSlotKind::kTypeProfile; -} - inline bool IsCloneObjectKind(FeedbackSlotKind kind) { return kind == FeedbackSlotKind::kCloneObject; } @@ -321,8 +316,6 @@ class FeedbackVector V8_EXPORT_PRIVATE FeedbackSlotKind GetKind(FeedbackSlot slot, AcquireLoadTag tag) const; - FeedbackSlot GetTypeProfileSlot() const; - V8_EXPORT_PRIVATE static Handle<FeedbackVector> New( Isolate* isolate, Handle<SharedFunctionInfo> shared, Handle<ClosureFeedbackCellArray> closure_feedback_cell_array, @@ -345,7 +338,6 @@ class FeedbackVector DEFINE_SLOT_KIND_PREDICATE(IsDefineNamedOwnIC) DEFINE_SLOT_KIND_PREDICATE(IsStoreGlobalIC) DEFINE_SLOT_KIND_PREDICATE(IsKeyedStoreIC) - DEFINE_SLOT_KIND_PREDICATE(IsTypeProfile) #undef DEFINE_SLOT_KIND_PREDICATE // Returns typeof mode encoded into kind of given slot. @@ -436,13 +428,6 @@ class V8_EXPORT_PRIVATE FeedbackVectorSpec { return slot_kinds_.at(slot.ToInt()); } - bool HasTypeProfileSlot() const; - - // If used, the TypeProfileSlot is always added as the first slot and its - // index is constant. If other slots are added before the TypeProfileSlot, - // this number changes. - static const int kTypeProfileSlotIndex = 0; - FeedbackSlot AddCallICSlot() { return AddSlot(FeedbackSlotKind::kCall); } FeedbackSlot AddLoadICSlot() { @@ -524,8 +509,6 @@ class V8_EXPORT_PRIVATE FeedbackVectorSpec { return AddSlot(FeedbackSlotKind::kDefineKeyedOwnPropertyInLiteral); } - FeedbackSlot AddTypeProfileSlot(); - FeedbackSlot AddCloneObjectSlot() { return AddSlot(FeedbackSlotKind::kCloneObject); } @@ -611,7 +594,6 @@ class FeedbackMetadata : public HeapObject { DECL_VERIFIER(FeedbackMetadata) static const char* Kind2String(FeedbackSlotKind kind); - bool HasTypeProfileSlot() const; // Garbage collection support. // This includes any necessary padding at the end of the object for pointer @@ -902,16 +884,6 @@ class V8_EXPORT_PRIVATE FeedbackNexus final { // Make sure we don't overflow the smi. static_assert(LEXICAL_MODE_BIT_FIELDS_Ranges::kBitsCount <= kSmiValueSize); - // For TypeProfile feedback vector slots. - // ResetTypeProfile will always reset type profile information. - void ResetTypeProfile(); - - // Add a type to the list of types for source position <position>. - void Collect(Handle<String> type, int position); - - std::vector<int> GetSourcePositions() const; - std::vector<Handle<String>> GetTypesForSourcePositions(uint32_t pos) const; - private: template <typename FeedbackType> inline void SetFeedback(FeedbackType feedback, diff --git a/deps/v8/src/objects/hash-table.h b/deps/v8/src/objects/hash-table.h index 2a7d5531bc3e99..a77d7e8aa4b272 100644 --- a/deps/v8/src/objects/hash-table.h +++ b/deps/v8/src/objects/hash-table.h @@ -470,6 +470,10 @@ class V8_EXPORT_PRIVATE NameToIndexHashTable Handle<NameToIndexHashTable> table, Handle<Name> key, int32_t value); + // Exposed for NameDictionaryLookupForwardedString slow path for forwarded + // strings. + using HashTable<NameToIndexHashTable, NameToIndexShape>::FindInsertionEntry; + DECL_CAST(NameToIndexHashTable) DECL_PRINTER(NameToIndexHashTable) diff --git a/deps/v8/src/objects/heap-number.tq b/deps/v8/src/objects/heap-number.tq index af2545c1fb3e0b..b1c52a0b000c2d 100644 --- a/deps/v8/src/objects/heap-number.tq +++ b/deps/v8/src/objects/heap-number.tq @@ -5,6 +5,8 @@ extern class HeapNumber extends PrimitiveHeapObject { // Marked as a relaxed store because of a race with reading on the // compiler thread. + // TODO(v8:13070): With 8GB+ pointer compression, the number in a HeapNumber + // is unaligned. Modify the HeapNumber layout so it remains aligned. @cppRelaxedStore value: float64; } diff --git a/deps/v8/src/objects/intl-objects.cc b/deps/v8/src/objects/intl-objects.cc index 0e73a546a9b814..881842fbdc76f1 100644 --- a/deps/v8/src/objects/intl-objects.cc +++ b/deps/v8/src/objects/intl-objects.cc @@ -2962,10 +2962,54 @@ const icu::BasicTimeZone* CreateBasicTimeZoneFromIndex( Intl::TimeZoneIdFromIndex(time_zone_index).c_str(), -1, US_INV))); } +// ICU only support TimeZone information in millisecond but Temporal require +// nanosecond. For most of the case, we find a approximate millisecond by +// floor to the millisecond just past the nanosecond_epoch. For negative epoch +// value, the BigInt Divide will floor closer to zero so we need to minus 1 if +// the remainder is not zero. For the case of finding previous transition, we +// need to ceil to the millisecond in the near future of the nanosecond_epoch. +enum class Direction { kPast, kFuture }; +int64_t ApproximateMillisecondEpoch(Isolate* isolate, + Handle<BigInt> nanosecond_epoch, + Direction direction = Direction::kPast) { + Handle<BigInt> one_million = BigInt::FromUint64(isolate, 1000000); + int64_t ms = BigInt::Divide(isolate, nanosecond_epoch, one_million) + .ToHandleChecked() + ->AsInt64(); + Handle<BigInt> remainder = + BigInt::Remainder(isolate, nanosecond_epoch, one_million) + .ToHandleChecked(); + // If the nanosecond_epoch is not on the exact millisecond + if (remainder->ToBoolean()) { + if (direction == Direction::kPast) { + if (remainder->IsNegative()) { + // If the remaninder is negative, we know we have an negative epoch + // We need to decrease one millisecond. + // Move to the previous millisecond + ms -= 1; + } + } else { + if (!remainder->IsNegative()) { + // Move to the future millisecond + ms += 1; + } + } + } + return ms; +} + +// Helper function to convert the milliseconds in int64_t +// to a BigInt in nanoseconds. +Handle<BigInt> MillisecondToNanosecond(Isolate* isolate, int64_t ms) { + return BigInt::Multiply(isolate, BigInt::FromInt64(isolate, ms), + BigInt::FromUint64(isolate, 1000000)) + .ToHandleChecked(); +} + } // namespace -Maybe<int64_t> Intl::GetTimeZoneOffsetTransitionMilliseconds( - Isolate* isolate, int32_t time_zone_index, int64_t time_ms, +Handle<Object> Intl::GetTimeZoneOffsetTransitionNanoseconds( + Isolate* isolate, int32_t time_zone_index, Handle<BigInt> nanosecond_epoch, Intl::Transition transition) { std::unique_ptr<const icu::BasicTimeZone> basic_time_zone( CreateBasicTimeZoneFromIndex(time_zone_index)); @@ -2974,56 +3018,77 @@ Maybe<int64_t> Intl::GetTimeZoneOffsetTransitionMilliseconds( UBool has_transition; switch (transition) { case Intl::Transition::kNext: - has_transition = - basic_time_zone->getNextTransition(time_ms, false, icu_transition); + has_transition = basic_time_zone->getNextTransition( + ApproximateMillisecondEpoch(isolate, nanosecond_epoch), false, + icu_transition); break; case Intl::Transition::kPrevious: - has_transition = basic_time_zone->getPreviousTransition(time_ms, false, - icu_transition); + has_transition = basic_time_zone->getPreviousTransition( + ApproximateMillisecondEpoch(isolate, nanosecond_epoch, + Direction::kFuture), + false, icu_transition); break; } if (!has_transition) { - return Nothing<int64_t>(); + return isolate->factory()->null_value(); } - return Just(static_cast<int64_t>(icu_transition.getTime())); -} - -std::vector<int64_t> Intl::GetTimeZonePossibleOffsetMilliseconds( - Isolate* isolate, int32_t time_zone_index, int64_t time_in_millisecond) { + // #sec-temporal-getianatimezonenexttransition and + // #sec-temporal-getianatimezoneprevioustransition states: + // "The operation returns null if no such transition exists for which t ≤ + // ℤ(nsMaxInstant)." and "The operation returns null if no such transition + // exists for which t ≥ ℤ(nsMinInstant)." + // + // nsMinInstant = -nsMaxInstant = -8.64 × 10^21 => msMinInstant = -8.64 x + // 10^15 + constexpr int64_t kMsMinInstant = -8.64e15; + // nsMaxInstant = 10^8 × nsPerDay = 8.64 × 10^21 => msMaxInstant = 8.64 x + // 10^15 + constexpr int64_t kMsMaxInstant = 8.64e15; + int64_t time_ms = static_cast<int64_t>(icu_transition.getTime()); + if (time_ms < kMsMinInstant || time_ms > kMsMaxInstant) { + return isolate->factory()->null_value(); + } + return MillisecondToNanosecond(isolate, time_ms); +} + +std::vector<Handle<BigInt>> Intl::GetTimeZonePossibleOffsetNanoseconds( + Isolate* isolate, int32_t time_zone_index, + Handle<BigInt> nanosecond_epoch) { std::unique_ptr<const icu::BasicTimeZone> basic_time_zone( CreateBasicTimeZoneFromIndex(time_zone_index)); + int64_t time_ms = ApproximateMillisecondEpoch(isolate, nanosecond_epoch); int32_t raw_offset; int32_t dst_offset; UErrorCode status = U_ZERO_ERROR; - basic_time_zone->getOffsetFromLocal(time_in_millisecond, UCAL_TZ_LOCAL_FORMER, + basic_time_zone->getOffsetFromLocal(time_ms, UCAL_TZ_LOCAL_FORMER, UCAL_TZ_LOCAL_FORMER, raw_offset, dst_offset, status); DCHECK(U_SUCCESS(status)); - // offset for time_in_milliseconds interpretted as before a time zone + // offset for time_ms interpretted as before a time zone // transition - int32_t offset_former = raw_offset + dst_offset; + int64_t offset_former = raw_offset + dst_offset; - basic_time_zone->getOffsetFromLocal(time_in_millisecond, UCAL_TZ_LOCAL_LATTER, + basic_time_zone->getOffsetFromLocal(time_ms, UCAL_TZ_LOCAL_LATTER, UCAL_TZ_LOCAL_LATTER, raw_offset, dst_offset, status); DCHECK(U_SUCCESS(status)); - // offset for time_in_milliseconds interpretted as after a time zone + // offset for time_ms interpretted as after a time zone // transition - int32_t offset_latter = raw_offset + dst_offset; + int64_t offset_latter = raw_offset + dst_offset; - std::vector<int64_t> result; + std::vector<Handle<BigInt>> result; if (offset_former == offset_latter) { // For most of the time, when either interpretation are the same, we are not // in a moment of offset transition based on rule changing: Just return that // value. - result.push_back(offset_former); + result.push_back(MillisecondToNanosecond(isolate, offset_former)); } else if (offset_former > offset_latter) { // When the input represents a local time repeating multiple times at a // negative time zone transition (e.g. when the daylight saving time ends // or the time zone offset is decreased due to a time zone rule change). - result.push_back(offset_former); - result.push_back(offset_latter); + result.push_back(MillisecondToNanosecond(isolate, offset_former)); + result.push_back(MillisecondToNanosecond(isolate, offset_latter)); } else { // If the offset after the transition is greater than the offset before the // transition, that mean it is in the moment the time "skip" an hour, or two @@ -3033,18 +3098,19 @@ std::vector<int64_t> Intl::GetTimeZonePossibleOffsetMilliseconds( return result; } -Maybe<int64_t> Intl::GetTimeZoneOffsetMilliseconds( - Isolate* isolate, int32_t time_zone_index, int64_t time_in_millisecond) { +int64_t Intl::GetTimeZoneOffsetNanoseconds(Isolate* isolate, + int32_t time_zone_index, + Handle<BigInt> nanosecond_epoch) { std::unique_ptr<const icu::BasicTimeZone> basic_time_zone( CreateBasicTimeZoneFromIndex(time_zone_index)); + int64_t time_ms = ApproximateMillisecondEpoch(isolate, nanosecond_epoch); int32_t raw_offset; int32_t dst_offset; UErrorCode status = U_ZERO_ERROR; - basic_time_zone->getOffsetFromLocal(time_in_millisecond, UCAL_TZ_LOCAL_FORMER, - UCAL_TZ_LOCAL_FORMER, raw_offset, - dst_offset, status); + basic_time_zone->getOffset(time_ms, false, raw_offset, dst_offset, status); DCHECK(U_SUCCESS(status)); - return Just(static_cast<int64_t>(raw_offset + dst_offset)); + // Turn ms into ns + return static_cast<int64_t>(raw_offset + dst_offset) * 1000000; } } // namespace internal diff --git a/deps/v8/src/objects/intl-objects.h b/deps/v8/src/objects/intl-objects.h index 0ac4b14096c8a7..4339673d56693a 100644 --- a/deps/v8/src/objects/intl-objects.h +++ b/deps/v8/src/objects/intl-objects.h @@ -365,30 +365,35 @@ class Intl { // Functions to support Temporal - V8_WARN_UNUSED_RESULT static Maybe<int64_t> - GetTimeZoneOffsetTransitionMilliseconds(Isolate* isolate, - int32_t time_zone_index, - int64_t time_ms, - Transition transition); - - static Handle<String> DefaultTimeZone(Isolate* isolate); - - V8_WARN_UNUSED_RESULT static Maybe<int64_t> GetTimeZoneOffsetMilliseconds( - Isolate* isolate, int32_t time_zone_index, int64_t millisecond); + // Return the epoch of transition in BigInt or null if there are no + // transition. + static Handle<Object> GetTimeZoneOffsetTransitionNanoseconds( + Isolate* isolate, int32_t time_zone_index, + Handle<BigInt> nanosecond_epoch, Transition transition); + + // Return the Time Zone offset, in the unit of nanosecond by int64_t, during + // the time of the nanosecond_epoch. + static int64_t GetTimeZoneOffsetNanoseconds(Isolate* isolate, + int32_t time_zone_index, + Handle<BigInt> nanosecond_epoch); // This function may return the result, the std::vector<int64_t> in one of // the following three condictions: - // 1. While time_in_millisecond fall into the daylight saving time change + // 1. While nanosecond_epoch fall into the daylight saving time change // moment that skipped one (or two or even six, in some Time Zone) hours // later in local time: // [], // 2. In other moment not during daylight saving time change: // [offset_former], and - // 3. when time_in_millisecond fall into they daylight saving time change hour + // 3. when nanosecond_epoch fall into they daylight saving time change hour // which the clock time roll back one (or two or six, in some Time Zone) hour: // [offset_former, offset_later] - static std::vector<int64_t> GetTimeZonePossibleOffsetMilliseconds( - Isolate* isolate, int32_t time_zone_index, int64_t time_ms); + // The unit of the return values in BigInt is nanosecond. + static std::vector<Handle<BigInt>> GetTimeZonePossibleOffsetNanoseconds( + Isolate* isolate, int32_t time_zone_index, + Handle<BigInt> nanosecond_epoch); + + static Handle<String> DefaultTimeZone(Isolate* isolate); V8_WARN_UNUSED_RESULT static MaybeHandle<String> CanonicalizeTimeZoneName( Isolate* isolate, Handle<String> identifier); diff --git a/deps/v8/src/objects/js-array-buffer-inl.h b/deps/v8/src/objects/js-array-buffer-inl.h index c1a142d1b55b1e..d30fd968400cc3 100644 --- a/deps/v8/src/objects/js-array-buffer-inl.h +++ b/deps/v8/src/objects/js-array-buffer-inl.h @@ -28,11 +28,19 @@ RELEASE_ACQUIRE_ACCESSORS(JSTypedArray, base_pointer, Object, kBasePointerOffset) size_t JSArrayBuffer::byte_length() const { - return ReadField<size_t>(kByteLengthOffset); + return ReadBoundedSizeField(kRawByteLengthOffset); } void JSArrayBuffer::set_byte_length(size_t value) { - WriteField<size_t>(kByteLengthOffset, value); + WriteBoundedSizeField(kRawByteLengthOffset, value); +} + +size_t JSArrayBuffer::max_byte_length() const { + return ReadBoundedSizeField(kRawMaxByteLengthOffset); +} + +void JSArrayBuffer::set_max_byte_length(size_t value) { + WriteBoundedSizeField(kRawMaxByteLengthOffset, value); } DEF_GETTER(JSArrayBuffer, backing_store, void*) { @@ -51,7 +59,7 @@ std::shared_ptr<BackingStore> JSArrayBuffer::GetBackingStore() const { } size_t JSArrayBuffer::GetByteLength() const { - if (V8_UNLIKELY(is_shared() && is_resizable())) { + if (V8_UNLIKELY(is_shared() && is_resizable_by_js())) { // Invariant: byte_length for GSAB is 0 (it needs to be read from the // BackingStore). DCHECK_EQ(0, byte_length()); @@ -158,8 +166,8 @@ BIT_FIELD_ACCESSORS(JSArrayBuffer, bit_field, is_asmjs_memory, JSArrayBuffer::IsAsmJsMemoryBit) BIT_FIELD_ACCESSORS(JSArrayBuffer, bit_field, is_shared, JSArrayBuffer::IsSharedBit) -BIT_FIELD_ACCESSORS(JSArrayBuffer, bit_field, is_resizable, - JSArrayBuffer::IsResizableBit) +BIT_FIELD_ACCESSORS(JSArrayBuffer, bit_field, is_resizable_by_js, + JSArrayBuffer::IsResizableByJsBit) bool JSArrayBuffer::IsEmpty() const { auto backing_store = GetBackingStore(); @@ -169,19 +177,19 @@ bool JSArrayBuffer::IsEmpty() const { } size_t JSArrayBufferView::byte_offset() const { - return ReadField<size_t>(kByteOffsetOffset); + return ReadBoundedSizeField(kRawByteOffsetOffset); } void JSArrayBufferView::set_byte_offset(size_t value) { - WriteField<size_t>(kByteOffsetOffset, value); + WriteBoundedSizeField(kRawByteOffsetOffset, value); } size_t JSArrayBufferView::byte_length() const { - return ReadField<size_t>(kByteLengthOffset); + return ReadBoundedSizeField(kRawByteLengthOffset); } void JSArrayBufferView::set_byte_length(size_t value) { - WriteField<size_t>(kByteLengthOffset, value); + WriteBoundedSizeField(kRawByteLengthOffset, value); } bool JSArrayBufferView::WasDetached() const { @@ -250,15 +258,15 @@ inline void JSTypedArray::ForFixedTypedArray(ExternalArrayType array_type, size_t JSTypedArray::length() const { DCHECK(!is_length_tracking()); DCHECK(!is_backed_by_rab()); - return ReadField<size_t>(kLengthOffset); + return ReadBoundedSizeField(kRawLengthOffset); } size_t JSTypedArray::LengthUnchecked() const { - return ReadField<size_t>(kLengthOffset); + return ReadBoundedSizeField(kRawLengthOffset); } void JSTypedArray::set_length(size_t value) { - WriteField<size_t>(kLengthOffset, value); + WriteBoundedSizeField(kRawLengthOffset, value); } DEF_GETTER(JSTypedArray, external_pointer, Address) { diff --git a/deps/v8/src/objects/js-array-buffer.cc b/deps/v8/src/objects/js-array-buffer.cc index 3622a30f5d9d2c..a1aca2a9dfef5a 100644 --- a/deps/v8/src/objects/js-array-buffer.cc +++ b/deps/v8/src/objects/js-array-buffer.cc @@ -48,7 +48,7 @@ void JSArrayBuffer::Setup(SharedFlag shared, ResizableFlag resizable, clear_padding(); set_bit_field(0); set_is_shared(shared == SharedFlag::kShared); - set_is_resizable(resizable == ResizableFlag::kResizable); + set_is_resizable_by_js(resizable == ResizableFlag::kResizable); set_is_detachable(shared != SharedFlag::kShared); for (int i = 0; i < v8::ArrayBuffer::kEmbedderFieldCount; i++) { SetEmbedderField(i, Smi::zero()); @@ -70,9 +70,9 @@ void JSArrayBuffer::Setup(SharedFlag shared, ResizableFlag resizable, void JSArrayBuffer::Attach(std::shared_ptr<BackingStore> backing_store) { DCHECK_NOT_NULL(backing_store); DCHECK_EQ(is_shared(), backing_store->is_shared()); - DCHECK_EQ(is_resizable(), backing_store->is_resizable()); + DCHECK_EQ(is_resizable_by_js(), backing_store->is_resizable_by_js()); DCHECK_IMPLIES( - !backing_store->is_wasm_memory() && !backing_store->is_resizable(), + !backing_store->is_wasm_memory() && !backing_store->is_resizable_by_js(), backing_store->byte_length() == backing_store->max_byte_length()); DCHECK(!was_detached()); Isolate* isolate = GetIsolate(); @@ -84,7 +84,7 @@ void JSArrayBuffer::Attach(std::shared_ptr<BackingStore> backing_store) { set_backing_store(isolate, backing_store->buffer_start()); } - if (is_shared() && is_resizable()) { + if (is_shared() && is_resizable_by_js()) { // GSABs need to read their byte_length from the BackingStore. Maintain the // invariant that their byte_length field is always 0. set_byte_length(0); @@ -141,7 +141,7 @@ size_t JSArrayBuffer::GsabByteLength(Isolate* isolate, DisallowGarbageCollection no_gc; DisallowJavascriptExecution no_js(isolate); JSArrayBuffer buffer = JSArrayBuffer::cast(Object(raw_array_buffer)); - CHECK(buffer.is_resizable()); + CHECK(buffer.is_resizable_by_js()); CHECK(buffer.is_shared()); return buffer.GetBackingStore()->byte_length(std::memory_order_seq_cst); } @@ -226,7 +226,7 @@ Handle<JSArrayBuffer> JSTypedArray::GetBuffer() { // Already is off heap, so return the existing buffer. return array_buffer; } - DCHECK(!array_buffer->is_resizable()); + DCHECK(!array_buffer->is_resizable_by_js()); // The existing array buffer should be empty. DCHECK(array_buffer->IsEmpty()); @@ -370,7 +370,7 @@ size_t JSTypedArray::LengthTrackingGsabBackedTypedArrayLength( JSTypedArray array = JSTypedArray::cast(Object(raw_array)); CHECK(array.is_length_tracking()); JSArrayBuffer buffer = array.buffer(); - CHECK(buffer.is_resizable()); + CHECK(buffer.is_resizable_by_js()); CHECK(buffer.is_shared()); size_t backing_byte_length = buffer.GetBackingStore()->byte_length(std::memory_order_seq_cst); diff --git a/deps/v8/src/objects/js-array-buffer.h b/deps/v8/src/objects/js-array-buffer.h index 01efac77412392..ad6bc4fbf66511 100644 --- a/deps/v8/src/objects/js-array-buffer.h +++ b/deps/v8/src/objects/js-array-buffer.h @@ -28,7 +28,9 @@ class JSArrayBuffer // On 32-bit architectures we limit this to 2GiB, so that // we can continue to use CheckBounds with the Unsigned31 // restriction for the length. -#if V8_HOST_ARCH_32_BIT +#if V8_ENABLE_SANDBOX + static constexpr size_t kMaxByteLength = kMaxSafeBufferSizeForSandbox; +#elif V8_HOST_ARCH_32_BIT static constexpr size_t kMaxByteLength = kMaxInt; #else static constexpr size_t kMaxByteLength = kMaxSafeInteger; @@ -37,6 +39,9 @@ class JSArrayBuffer // [byte_length]: length in bytes DECL_PRIMITIVE_ACCESSORS(byte_length, size_t) + // [max_byte_length]: maximum length in bytes + DECL_PRIMITIVE_ACCESSORS(max_byte_length, size_t) + // [backing_store]: backing memory for this array // It should not be assumed that this will be nullptr for empty ArrayBuffers. DECL_GETTER(backing_store, void*) @@ -73,9 +78,9 @@ class JSArrayBuffer // GrowableSharedArrayBuffer. DECL_BOOLEAN_ACCESSORS(is_shared) - // [is_resizable]: true if this is a ResizableArrayBuffer or a + // [is_resizable_by_js]: true if this is a ResizableArrayBuffer or a // GrowableSharedArrayBuffer. - DECL_BOOLEAN_ACCESSORS(is_resizable) + DECL_BOOLEAN_ACCESSORS(is_resizable_by_js) // An ArrayBuffer is empty if its BackingStore is empty or if there is none. // An empty ArrayBuffer will have a byte_length of zero but not necessarily a @@ -257,10 +262,10 @@ class JSArrayBufferView DECL_BOOLEAN_ACCESSORS(is_backed_by_rab) inline bool IsVariableLength() const; - static constexpr int kEndOfTaggedFieldsOffset = kByteOffsetOffset; + static constexpr int kEndOfTaggedFieldsOffset = kRawByteOffsetOffset; - static_assert(IsAligned(kByteOffsetOffset, kUIntptrSize)); - static_assert(IsAligned(kByteLengthOffset, kUIntptrSize)); + static_assert(IsAligned(kRawByteOffsetOffset, kUIntptrSize)); + static_assert(IsAligned(kRawByteLengthOffset, kUIntptrSize)); TQ_OBJECT_CONSTRUCTORS(JSArrayBufferView) }; @@ -271,6 +276,7 @@ class JSTypedArray // TODO(v8:4153): This should be equal to JSArrayBuffer::kMaxByteLength // eventually. static constexpr size_t kMaxLength = v8::TypedArray::kMaxLength; + static_assert(kMaxLength <= JSArrayBuffer::kMaxByteLength); // [length]: length of typed array in elements. DECL_PRIMITIVE_GETTER(length, size_t) diff --git a/deps/v8/src/objects/js-array-buffer.tq b/deps/v8/src/objects/js-array-buffer.tq index d00febb179f51f..415b15d5c56fbc 100644 --- a/deps/v8/src/objects/js-array-buffer.tq +++ b/deps/v8/src/objects/js-array-buffer.tq @@ -8,12 +8,14 @@ bitfield struct JSArrayBufferFlags extends uint32 { was_detached: bool: 1 bit; is_asm_js_memory: bool: 1 bit; is_shared: bool: 1 bit; - is_resizable: bool: 1 bit; + is_resizable_by_js: bool: 1 bit; } extern class JSArrayBuffer extends JSObjectWithEmbedderSlots { - byte_length: uintptr; - max_byte_length: uintptr; + // A BoundedSize if the sandbox is enabled + raw_byte_length: uintptr; + // A BoundedSize if the sandbox is enabled + raw_max_byte_length: uintptr; // A SandboxedPtr if the sandbox is enabled backing_store: RawPtr; extension: RawPtr; @@ -23,6 +25,11 @@ extern class JSArrayBuffer extends JSObjectWithEmbedderSlots { @ifnot(TAGGED_SIZE_8_BYTES) optional_padding: void; } +extern operator '.byte_length' macro LoadJSArrayBufferByteLength(JSArrayBuffer): + uintptr; +extern operator '.max_byte_length' macro LoadJSArrayBufferMaxByteLength( + JSArrayBuffer): uintptr; + extern operator '.backing_store_ptr' macro LoadJSArrayBufferBackingStorePtr( JSArrayBuffer): RawPtr; @@ -38,7 +45,7 @@ macro IsSharedArrayBuffer(buffer: JSArrayBuffer): bool { @export macro IsResizableArrayBuffer(buffer: JSArrayBuffer): bool { - return buffer.bit_field.is_resizable; + return buffer.bit_field.is_resizable_by_js; } // We have 4 different DataViews & TypedArrays: @@ -55,14 +62,25 @@ bitfield struct JSArrayBufferViewFlags extends uint32 { @abstract extern class JSArrayBufferView extends JSObjectWithEmbedderSlots { buffer: JSArrayBuffer; - byte_offset: uintptr; - byte_length: uintptr; + // A BoundedSize if the sandbox is enabled + raw_byte_offset: uintptr; + // A BoundedSize if the sandbox is enabled + raw_byte_length: uintptr; bit_field: JSArrayBufferViewFlags; // Pads header size to be a multiple of kTaggedSize. @if(TAGGED_SIZE_8_BYTES) optional_padding: uint32; @ifnot(TAGGED_SIZE_8_BYTES) optional_padding: void; } +extern operator '.byte_offset' macro LoadJSArrayBufferViewByteOffset( + JSArrayBufferView): uintptr; +extern operator '.byte_offset=' macro StoreJSArrayBufferViewByteOffset( + JSArrayBufferView, uintptr): void; +extern operator '.byte_length' macro LoadJSArrayBufferViewByteLength( + JSArrayBufferView): uintptr; +extern operator '.byte_length=' macro StoreJSArrayBufferViewByteLength( + JSArrayBufferView, uintptr): void; + @export macro IsVariableLengthJSArrayBufferView(array: JSArrayBufferView): bool { return array.bit_field.is_length_tracking || array.bit_field.is_backed_by_rab; @@ -88,12 +106,17 @@ macro LoadJSArrayBufferViewByteLength( } extern class JSTypedArray extends JSArrayBufferView { - length: uintptr; + // A BoundedSize if the sandbox is enabled + raw_length: uintptr; // A SandboxedPtr if the sandbox is enabled external_pointer: RawPtr; base_pointer: ByteArray|Smi; } +extern operator '.length' macro LoadJSTypedArrayLength(JSTypedArray): uintptr; +extern operator '.length=' macro StoreJSTypedArrayLength( + JSTypedArray, uintptr): void; + @export macro IsOnHeapTypedArray(array: JSTypedArray): bool { // See JSTypedArray::is_on_heap() diff --git a/deps/v8/src/objects/js-array-inl.h b/deps/v8/src/objects/js-array-inl.h index 2e5b192c3b3a92..fe1973b0b76974 100644 --- a/deps/v8/src/objects/js-array-inl.h +++ b/deps/v8/src/objects/js-array-inl.h @@ -73,6 +73,14 @@ void JSArrayIterator::set_kind(IterationKind kind) { set_raw_kind(static_cast<int>(kind)); } +// static +void TemplateLiteralObject::SetRaw(Handle<JSArray> template_object, + Handle<JSArray> raw_object) { + TaggedField<Object, kRawFieldOffset>::store(*template_object, *raw_object); + CONDITIONAL_WRITE_BARRIER(*template_object, kRawFieldOffset, *raw_object, + UPDATE_WRITE_BARRIER); +} + } // namespace internal } // namespace v8 diff --git a/deps/v8/src/objects/js-array.h b/deps/v8/src/objects/js-array.h index 2af61eea0c1e83..6bc2bb80cdaf6c 100644 --- a/deps/v8/src/objects/js-array.h +++ b/deps/v8/src/objects/js-array.h @@ -159,6 +159,14 @@ class JSArrayIterator TQ_OBJECT_CONSTRUCTORS(JSArrayIterator) }; +// Helper class for JSArrays that are template literal objects +class TemplateLiteralObject { + public: + static const int kRawFieldOffset = JSArray::kLengthOffset + kTaggedSize; + static inline void SetRaw(Handle<JSArray> template_object, + Handle<JSArray> raw_object); +}; + } // namespace internal } // namespace v8 diff --git a/deps/v8/src/objects/js-duration-format-inl.h b/deps/v8/src/objects/js-duration-format-inl.h new file mode 100644 index 00000000000000..024e1ed7b24faa --- /dev/null +++ b/deps/v8/src/objects/js-duration-format-inl.h @@ -0,0 +1,104 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef V8_INTL_SUPPORT +#error Internationalization is expected to be enabled. +#endif // V8_INTL_SUPPORT + +#ifndef V8_OBJECTS_JS_DURATION_FORMAT_INL_H_ +#define V8_OBJECTS_JS_DURATION_FORMAT_INL_H_ + +#include "src/objects/js-duration-format.h" +#include "src/objects/objects-inl.h" + +// Has to be the last include (doesn't have include guards): +#include "src/objects/object-macros.h" + +namespace v8 { +namespace internal { + +#include "torque-generated/src/objects/js-duration-format-tq-inl.inc" + +TQ_OBJECT_CONSTRUCTORS_IMPL(JSDurationFormat) + +ACCESSORS(JSDurationFormat, icu_locale, Managed<icu::Locale>, kIcuLocaleOffset) + +#define IMPL_INLINE_SETTER_GETTER(T, n, B, f, M) \ + inline void JSDurationFormat::set_##n(T value) { \ + DCHECK_GE(B::kMax, value); \ + DCHECK_GE(T::M, value); \ + set_##f(B::update(f(), value)); \ + } \ + inline JSDurationFormat::T JSDurationFormat::n() const { \ + return B::decode(f()); \ + } + +#define IMPL_INLINE_DISPLAY_SETTER_GETTER(f, R) \ + IMPL_INLINE_SETTER_GETTER(Display, f##_display, R##DisplayBit, \ + display_flags, kAlways) + +#define IMPL_INLINE_FIELD_STYLE3_SETTER_GETTER(f, R) \ + IMPL_INLINE_SETTER_GETTER(FieldStyle, f##_style, R##StyleBits, style_flags, \ + kNarrow) + +#define IMPL_INLINE_FIELD_STYLE4_SETTER_GETTER(f, R) \ + IMPL_INLINE_SETTER_GETTER(FieldStyle, f##_style, R##StyleBits, style_flags, \ + kNumeric) + +#define IMPL_INLINE_FIELD_STYLE5_SETTER_GETTER(f, R) \ + IMPL_INLINE_SETTER_GETTER(FieldStyle, f##_style, R##StyleBits, style_flags, \ + k2Digit) + +IMPL_INLINE_DISPLAY_SETTER_GETTER(years, Years) +IMPL_INLINE_DISPLAY_SETTER_GETTER(months, Months) +IMPL_INLINE_DISPLAY_SETTER_GETTER(weeks, Weeks) +IMPL_INLINE_DISPLAY_SETTER_GETTER(days, Days) +IMPL_INLINE_DISPLAY_SETTER_GETTER(hours, Hours) +IMPL_INLINE_DISPLAY_SETTER_GETTER(minutes, Minutes) +IMPL_INLINE_DISPLAY_SETTER_GETTER(seconds, Seconds) +IMPL_INLINE_DISPLAY_SETTER_GETTER(milliseconds, Milliseconds) +IMPL_INLINE_DISPLAY_SETTER_GETTER(microseconds, Microseconds) +IMPL_INLINE_DISPLAY_SETTER_GETTER(nanoseconds, Nanoseconds) + +IMPL_INLINE_SETTER_GETTER(Style, style, StyleBits, style_flags, kDigital) + +IMPL_INLINE_FIELD_STYLE3_SETTER_GETTER(years, Years) +IMPL_INLINE_FIELD_STYLE3_SETTER_GETTER(months, Months) +IMPL_INLINE_FIELD_STYLE3_SETTER_GETTER(weeks, Weeks) +IMPL_INLINE_FIELD_STYLE3_SETTER_GETTER(days, Days) +IMPL_INLINE_FIELD_STYLE5_SETTER_GETTER(hours, Hours) +IMPL_INLINE_FIELD_STYLE5_SETTER_GETTER(minutes, Minutes) +IMPL_INLINE_FIELD_STYLE5_SETTER_GETTER(seconds, Seconds) +IMPL_INLINE_FIELD_STYLE4_SETTER_GETTER(milliseconds, Milliseconds) +IMPL_INLINE_FIELD_STYLE4_SETTER_GETTER(microseconds, Microseconds) +IMPL_INLINE_FIELD_STYLE4_SETTER_GETTER(nanoseconds, Nanoseconds) + +#undef IMPL_INLINE_SETTER_GETTER +#undef IMPL_INLINE_DISPLAY_SETTER_GETTER +#undef IMPL_INLINE_FIELD_STYLE3_SETTER_GETTER +#undef IMPL_INLINE_FIELD_STYLE5_SETTER_GETTER + +inline void JSDurationFormat::set_fractional_digits(int32_t digits) { + DCHECK_GE(9, digits); + DCHECK_LE(0, digits); + int hints = display_flags(); + hints = FractionalDigitsBits::update(hints, digits); + set_display_flags(hints); +} +inline int32_t JSDurationFormat::fractional_digits() const { + int32_t v = FractionalDigitsBits::decode(display_flags()); + DCHECK_GE(9, v); + DCHECK_LE(0, v); + return v; +} + +ACCESSORS(JSDurationFormat, icu_number_formatter, + Managed<icu::number::LocalizedNumberFormatter>, + kIcuNumberFormatterOffset) +} // namespace internal +} // namespace v8 + +#include "src/objects/object-macros-undef.h" + +#endif // V8_OBJECTS_JS_DURATION_FORMAT_INL_H_ diff --git a/deps/v8/src/objects/js-duration-format.cc b/deps/v8/src/objects/js-duration-format.cc new file mode 100644 index 00000000000000..fa7fcf3a91fb28 --- /dev/null +++ b/deps/v8/src/objects/js-duration-format.cc @@ -0,0 +1,774 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef V8_INTL_SUPPORT +#error Internationalization is expected to be enabled. +#endif // V8_INTL_SUPPORT + +#include "src/objects/js-duration-format.h" + +#include <map> +#include <memory> +#include <string> +#include <string_view> + +#include "src/execution/isolate.h" +#include "src/heap/factory.h" +#include "src/objects/intl-objects.h" +#include "src/objects/js-duration-format-inl.h" +#include "src/objects/js-number-format.h" +#include "src/objects/js-temporal-objects.h" +#include "src/objects/managed-inl.h" +#include "src/objects/objects-inl.h" +#include "src/objects/option-utils.h" +#include "unicode/listformatter.h" +#include "unicode/locid.h" +#include "unicode/numberformatter.h" +#include "unicode/ulistformatter.h" +#include "unicode/unumberformatter.h" + +namespace v8 { +namespace internal { + +using temporal::DurationRecord; + +namespace { + +// #sec-getdurationunitoptions +enum class StylesList { k3Styles, k4Styles, k5Styles }; +enum class UnitKind { kMinutesOrSeconds, kOthers }; +struct DurationUnitOptions { + JSDurationFormat::FieldStyle style; + JSDurationFormat::Display display; +}; +Maybe<DurationUnitOptions> GetDurationUnitOptions( + Isolate* isolate, const char* unit, const char* display_field, + Handle<JSReceiver> options, JSDurationFormat::Style base_style, + StylesList styles_list, JSDurationFormat::FieldStyle prev_style, + UnitKind unit_kind, const char* method_name) { + JSDurationFormat::FieldStyle style; + JSDurationFormat::FieldStyle digital_base; + // 1. Let style be ? GetOption(options, unit, "string", stylesList, + // undefined). + switch (styles_list) { + case StylesList::k3Styles: + // For years, months, weeks, days + MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate, style, + GetStringOption<JSDurationFormat::FieldStyle>( + isolate, options, unit, method_name, {"long", "short", "narrow"}, + {JSDurationFormat::FieldStyle::kLong, + JSDurationFormat::FieldStyle::kShort, + JSDurationFormat::FieldStyle::kNarrow}, + JSDurationFormat::FieldStyle::kUndefined), + Nothing<DurationUnitOptions>()); + digital_base = JSDurationFormat::FieldStyle::kShort; + break; + case StylesList::k4Styles: + // For milliseconds, microseconds, nanoseconds + MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate, style, + GetStringOption<JSDurationFormat::FieldStyle>( + isolate, options, unit, method_name, + {"long", "short", "narrow", "numeric"}, + {JSDurationFormat::FieldStyle::kLong, + JSDurationFormat::FieldStyle::kShort, + JSDurationFormat::FieldStyle::kNarrow, + JSDurationFormat::FieldStyle::kNumeric}, + JSDurationFormat::FieldStyle::kUndefined), + Nothing<DurationUnitOptions>()); + digital_base = JSDurationFormat::FieldStyle::kNumeric; + break; + case StylesList::k5Styles: + // For hours, minutes, seconds + MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate, style, + GetStringOption<JSDurationFormat::FieldStyle>( + isolate, options, unit, method_name, + {"long", "short", "narrow", "numeric", "2-digit"}, + {JSDurationFormat::FieldStyle::kLong, + JSDurationFormat::FieldStyle::kShort, + JSDurationFormat::FieldStyle::kNarrow, + JSDurationFormat::FieldStyle::kNumeric, + JSDurationFormat::FieldStyle::k2Digit}, + JSDurationFormat::FieldStyle::kUndefined), + Nothing<DurationUnitOptions>()); + digital_base = JSDurationFormat::FieldStyle::kNumeric; + break; + } + + // 2. Let displayDefault be "always". + JSDurationFormat::Display display_default = + JSDurationFormat::Display::kAlways; + // 3. If style is undefined, then + if (style == JSDurationFormat::FieldStyle::kUndefined) { + // a. If baseStyle is "digital", then + if (base_style == JSDurationFormat::Style::kDigital) { + // i. If unit is not one of "hours", "minutes", or "seconds", then + if (styles_list != StylesList::k5Styles) { + DCHECK_NE(0, strcmp(unit, "hours")); + DCHECK_NE(0, strcmp(unit, "minutes")); + DCHECK_NE(0, strcmp(unit, "seconds")); + // a. Set displayDefault to "auto". + display_default = JSDurationFormat::Display::kAuto; + } + // ii. Set style to digitalBase. + style = digital_base; + // b. Else + } else { + // i. Set displayDefault to "auto". + display_default = JSDurationFormat::Display::kAuto; + // ii. if prevStyle is "numeric" or "2-digit", then + if (prev_style == JSDurationFormat::FieldStyle::kNumeric || + prev_style == JSDurationFormat::FieldStyle::k2Digit) { + // 1. Set style to "numeric". + style = JSDurationFormat::FieldStyle::kNumeric; + // iii. Else, + } else { + // 1. Set style to baseStyle. + switch (base_style) { + case JSDurationFormat::Style::kLong: + style = JSDurationFormat::FieldStyle::kLong; + break; + case JSDurationFormat::Style::kShort: + style = JSDurationFormat::FieldStyle::kShort; + break; + case JSDurationFormat::Style::kNarrow: + style = JSDurationFormat::FieldStyle::kNarrow; + break; + default: + UNREACHABLE(); + } + } + } + } + // 4. Let displayField be the string-concatenation of unit and "Display". + // 5. Let display be ? GetOption(options, displayField, "string", « "auto", + // "always" », displayDefault). + JSDurationFormat::Display display; + MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate, display, + GetStringOption<JSDurationFormat::Display>( + isolate, options, display_field, method_name, {"auto", "always"}, + {JSDurationFormat::Display::kAuto, + JSDurationFormat::Display::kAlways}, + display_default), + Nothing<DurationUnitOptions>()); + // 6. If prevStyle is "numeric" or "2-digit", then + if (prev_style == JSDurationFormat::FieldStyle::kNumeric || + prev_style == JSDurationFormat::FieldStyle::k2Digit) { + // a. If style is not "numeric" or "2-digit", then + if (style != JSDurationFormat::FieldStyle::kNumeric && + style != JSDurationFormat::FieldStyle::k2Digit) { + // i. Throw a RangeError exception. + // b. Else if unit is "minutes" or "seconds", then + } else if (unit_kind == UnitKind::kMinutesOrSeconds) { + CHECK(strcmp(unit, "minutes") == 0 || strcmp(unit, "seconds") == 0); + // i. Set style to "2-digit". + style = JSDurationFormat::FieldStyle::k2Digit; + } + } + // 7. Return the Record { [[Style]]: style, [[Display]]: display }. + return Just(DurationUnitOptions({style, display})); +} + +} // namespace +MaybeHandle<JSDurationFormat> JSDurationFormat::New( + Isolate* isolate, Handle<Map> map, Handle<Object> locales, + Handle<Object> input_options) { + Factory* factory = isolate->factory(); + const char* method_name = "Intl.DurationFormat"; + + // 3. Let requestedLocales be ? CanonicalizeLocaleList(locales). + std::vector<std::string> requested_locales; + MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate, requested_locales, + Intl::CanonicalizeLocaleList(isolate, locales), + Handle<JSDurationFormat>()); + + // 4. Let options be ? GetOptionsObject(options). + Handle<JSReceiver> options; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, options, GetOptionsObject(isolate, input_options, method_name), + JSDurationFormat); + + // 5. Let matcher be ? GetOption(options, "localeMatcher", "string", « + // "lookup", "best fit" », "best fit"). + Intl::MatcherOption matcher; + MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate, matcher, Intl::GetLocaleMatcher(isolate, options, method_name), + Handle<JSDurationFormat>()); + + // 6. Let numberingSystem be ? GetOption(options, "numberingSystem", "string", + // undefined, undefined). + // + // 7. If numberingSystem is not undefined, then + // + // a. If numberingSystem does not match the Unicode Locale Identifier type + // nonterminal, throw a RangeError exception. + // Note: The matching test and throw in Step 7-a is throw inside + // Intl::GetNumberingSystem. + std::unique_ptr<char[]> numbering_system_str = nullptr; + bool get; + MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate, get, + Intl::GetNumberingSystem(isolate, options, method_name, + &numbering_system_str), + Handle<JSDurationFormat>()); + + // 8. Let opt be the Record { [[localeMatcher]]: matcher, [[nu]]: + // numberingSystem }. + // 9. Let r be ResolveLocale(%DurationFormat%.[[AvailableLocales]], + // requestedLocales, opt, %DurationFormat%.[[RelevantExtensionKeys]], + // %DurationFormat%.[[LocaleData]]). + std::set<std::string> relevant_extension_keys{"nu"}; + Intl::ResolvedLocale r; + MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate, r, + Intl::ResolveLocale(isolate, JSDurationFormat::GetAvailableLocales(), + requested_locales, matcher, relevant_extension_keys), + Handle<JSDurationFormat>()); + + // 10. Let locale be r.[[locale]]. + icu::Locale r_locale = r.icu_locale; + UErrorCode status = U_ZERO_ERROR; + // 11. Set durationFormat.[[Locale]] to locale. + // 12. Set durationFormat.[[NumberingSystem]] to r.[[nu]]. + if (numbering_system_str != nullptr) { + auto nu_extension_it = r.extensions.find("nu"); + if (nu_extension_it != r.extensions.end() && + nu_extension_it->second != numbering_system_str.get()) { + r_locale.setUnicodeKeywordValue("nu", nullptr, status); + DCHECK(U_SUCCESS(status)); + } + } + icu::Locale icu_locale = r_locale; + if (numbering_system_str != nullptr && + Intl::IsValidNumberingSystem(numbering_system_str.get())) { + r_locale.setUnicodeKeywordValue("nu", numbering_system_str.get(), status); + DCHECK(U_SUCCESS(status)); + } + std::string numbering_system = Intl::GetNumberingSystem(r_locale); + + // 13. Let style be ? GetOption(options, "style", "string", « "long", "short", + // "narrow", "digital" », "long"). + Style style; + MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate, style, + GetStringOption<Style>( + isolate, options, "style", method_name, + {"long", "short", "narrow", "digital"}, + {Style::kLong, Style::kShort, Style::kNarrow, Style::kDigital}, + Style::kShort), + Handle<JSDurationFormat>()); + + // 14. Set durationFormat.[[Style]] to style. + // 15. Set durationFormat.[[DataLocale]] to r.[[dataLocale]]. + Handle<Managed<icu::Locale>> managed_locale = + Managed<icu::Locale>::FromRawPtr(isolate, 0, icu_locale.clone()); + // 16. Let prevStyle be the empty String. + FieldStyle prev_style = FieldStyle::kUndefined; + // 17. For each row of Table 1, except the header row, in table order, do + // a. Let styleSlot be the Style Slot value of the current row. + // b. Let displaySlot be the Display Slot value of the current row. + // c. Let unit be the Unit value. + // d. Let valueList be the Values value. + // e. Let digitalBase be the Digital Default value. + // f. Let unitOptions be ? GetDurationUnitOptions(unit, options, style, + // valueList, digitalBase, prevStyle). + // of durationFormat to unitOptions.[[Style]]. + // h. Set the value of the + // displaySlot slot of durationFormat to unitOptions.[[Display]]. + // i. If unit is one of "hours", "minutes", "seconds", "milliseconds", + // or "microseconds", then + // i. Set prevStyle to unitOptions.[[Style]]. + // g. Set the value of the styleSlot slot + DurationUnitOptions years_option; + DurationUnitOptions months_option; + DurationUnitOptions weeks_option; + DurationUnitOptions days_option; + DurationUnitOptions hours_option; + DurationUnitOptions minutes_option; + DurationUnitOptions seconds_option; + DurationUnitOptions milliseconds_option; + DurationUnitOptions microseconds_option; + DurationUnitOptions nanoseconds_option; + +#define CALL_GET_DURATION_UNIT_OPTIONS(u, sl, uk) \ + MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( \ + isolate, u##_option, \ + GetDurationUnitOptions(isolate, #u, #u "Display", options, style, sl, \ + prev_style, uk, method_name), \ + Handle<JSDurationFormat>()); + CALL_GET_DURATION_UNIT_OPTIONS(years, StylesList::k3Styles, UnitKind::kOthers) + CALL_GET_DURATION_UNIT_OPTIONS(months, StylesList::k3Styles, + UnitKind::kOthers) + CALL_GET_DURATION_UNIT_OPTIONS(weeks, StylesList::k3Styles, UnitKind::kOthers) + CALL_GET_DURATION_UNIT_OPTIONS(days, StylesList::k3Styles, UnitKind::kOthers) + CALL_GET_DURATION_UNIT_OPTIONS(hours, StylesList::k5Styles, UnitKind::kOthers) + prev_style = hours_option.style; + CALL_GET_DURATION_UNIT_OPTIONS(minutes, StylesList::k5Styles, + UnitKind::kMinutesOrSeconds) + prev_style = minutes_option.style; + CALL_GET_DURATION_UNIT_OPTIONS(seconds, StylesList::k5Styles, + UnitKind::kMinutesOrSeconds) + prev_style = seconds_option.style; + CALL_GET_DURATION_UNIT_OPTIONS(milliseconds, StylesList::k4Styles, + UnitKind::kOthers) + prev_style = milliseconds_option.style; + CALL_GET_DURATION_UNIT_OPTIONS(microseconds, StylesList::k4Styles, + UnitKind::kOthers) + prev_style = microseconds_option.style; + CALL_GET_DURATION_UNIT_OPTIONS(nanoseconds, StylesList::k4Styles, + UnitKind::kOthers) +#undef CALL_GET_DURATION_UNIT_OPTIONS + // 18. Set durationFormat.[[FractionalDigits]] to ? GetNumberOption(options, + // "fractionalDigits", 0, 9, undefined). + int fractional_digits; + MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate, fractional_digits, + GetNumberOption(isolate, options, factory->fractionalDigits_string(), 0, + 9, 0), + Handle<JSDurationFormat>()); + + icu::number::LocalizedNumberFormatter fmt = + icu::number::UnlocalizedNumberFormatter() + .roundingMode(UNUM_ROUND_HALFUP) + .locale(icu_locale); + if (!numbering_system.empty() && numbering_system != "latn") { + fmt = fmt.adoptSymbols(icu::NumberingSystem::createInstanceByName( + numbering_system.c_str(), status)); + DCHECK(U_SUCCESS(status)); + } + Handle<Managed<icu::number::LocalizedNumberFormatter>> + managed_number_formatter = + Managed<icu::number::LocalizedNumberFormatter>::FromRawPtr( + isolate, 0, new icu::number::LocalizedNumberFormatter(fmt)); + + // 19. Return durationFormat. + Handle<JSDurationFormat> duration_format = Handle<JSDurationFormat>::cast( + factory->NewFastOrSlowJSObjectFromMap(map)); + duration_format->set_style_flags(0); + duration_format->set_display_flags(0); + duration_format->set_style(style); + duration_format->set_years_style(years_option.style); + duration_format->set_months_style(months_option.style); + duration_format->set_weeks_style(weeks_option.style); + duration_format->set_days_style(days_option.style); + duration_format->set_hours_style(hours_option.style); + duration_format->set_minutes_style(minutes_option.style); + duration_format->set_seconds_style(seconds_option.style); + duration_format->set_milliseconds_style(milliseconds_option.style); + duration_format->set_microseconds_style(microseconds_option.style); + duration_format->set_nanoseconds_style(nanoseconds_option.style); + + duration_format->set_years_display(years_option.display); + duration_format->set_months_display(months_option.display); + duration_format->set_weeks_display(weeks_option.display); + duration_format->set_days_display(days_option.display); + duration_format->set_hours_display(hours_option.display); + duration_format->set_minutes_display(minutes_option.display); + duration_format->set_seconds_display(seconds_option.display); + duration_format->set_milliseconds_display(milliseconds_option.display); + duration_format->set_microseconds_display(microseconds_option.display); + duration_format->set_nanoseconds_display(nanoseconds_option.display); + + duration_format->set_fractional_digits(fractional_digits); + + duration_format->set_icu_locale(*managed_locale); + duration_format->set_icu_number_formatter(*managed_number_formatter); + + return duration_format; +} + +namespace { + +Handle<String> StyleToString(Isolate* isolate, JSDurationFormat::Style style) { + switch (style) { + case JSDurationFormat::Style::kLong: + return ReadOnlyRoots(isolate).long_string_handle(); + case JSDurationFormat::Style::kShort: + return ReadOnlyRoots(isolate).short_string_handle(); + case JSDurationFormat::Style::kNarrow: + return ReadOnlyRoots(isolate).narrow_string_handle(); + case JSDurationFormat::Style::kDigital: + return ReadOnlyRoots(isolate).digital_string_handle(); + } +} + +Handle<String> StyleToString(Isolate* isolate, + JSDurationFormat::FieldStyle style) { + switch (style) { + case JSDurationFormat::FieldStyle::kLong: + return ReadOnlyRoots(isolate).long_string_handle(); + case JSDurationFormat::FieldStyle::kShort: + return ReadOnlyRoots(isolate).short_string_handle(); + case JSDurationFormat::FieldStyle::kNarrow: + return ReadOnlyRoots(isolate).narrow_string_handle(); + case JSDurationFormat::FieldStyle::kNumeric: + return ReadOnlyRoots(isolate).numeric_string_handle(); + case JSDurationFormat::FieldStyle::k2Digit: + return ReadOnlyRoots(isolate).two_digit_string_handle(); + case JSDurationFormat::FieldStyle::kUndefined: + UNREACHABLE(); + } +} + +Handle<String> DisplayToString(Isolate* isolate, + JSDurationFormat::Display display) { + switch (display) { + case JSDurationFormat::Display::kAuto: + return ReadOnlyRoots(isolate).auto_string_handle(); + case JSDurationFormat::Display::kAlways: + return ReadOnlyRoots(isolate).always_string_handle(); + } +} + +} // namespace + +Handle<JSObject> JSDurationFormat::ResolvedOptions( + Isolate* isolate, Handle<JSDurationFormat> format) { + Factory* factory = isolate->factory(); + Handle<JSObject> options = factory->NewJSObject(isolate->object_function()); + + Handle<String> locale = factory->NewStringFromAsciiChecked( + Intl::ToLanguageTag(*format->icu_locale().raw()).FromJust().c_str()); + UErrorCode status = U_ZERO_ERROR; + icu::UnicodeString skeleton = + format->icu_number_formatter().raw()->toSkeleton(status); + DCHECK(U_SUCCESS(status)); + + Handle<String> numbering_system; + CHECK(Intl::ToString(isolate, + JSNumberFormat::NumberingSystemFromSkeleton(skeleton)) + .ToHandle(&numbering_system)); + + Handle<Smi> fractional_digits = + handle(Smi::FromInt(format->fractional_digits()), isolate); + + bool created; + +#define OUTPUT_PROPERTY(s, f) \ + MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( \ + isolate, created, \ + JSReceiver::CreateDataProperty(isolate, options, factory->s(), f, \ + Just(kDontThrow)), \ + Handle<JSObject>()); \ + CHECK(created); +#define OUTPUT_STYLE_PROPERTY(p) \ + OUTPUT_PROPERTY(p##_string, StyleToString(isolate, format->p##_style())) +#define OUTPUT_DISPLAY_PROPERTY(p) \ + OUTPUT_PROPERTY(p##Display_string, \ + DisplayToString(isolate, format->p##_display())) +#define OUTPUT_STYLE_AND_DISPLAY_PROPERTIES(p) \ + OUTPUT_STYLE_PROPERTY(p); \ + OUTPUT_DISPLAY_PROPERTY(p); + + OUTPUT_PROPERTY(locale_string, locale); + OUTPUT_PROPERTY(style_string, StyleToString(isolate, format->style())); + + OUTPUT_STYLE_AND_DISPLAY_PROPERTIES(years); + OUTPUT_STYLE_AND_DISPLAY_PROPERTIES(months); + OUTPUT_STYLE_AND_DISPLAY_PROPERTIES(weeks); + OUTPUT_STYLE_AND_DISPLAY_PROPERTIES(days); + OUTPUT_STYLE_AND_DISPLAY_PROPERTIES(hours); + OUTPUT_STYLE_AND_DISPLAY_PROPERTIES(minutes); + OUTPUT_STYLE_AND_DISPLAY_PROPERTIES(seconds); + OUTPUT_STYLE_AND_DISPLAY_PROPERTIES(milliseconds); + OUTPUT_STYLE_AND_DISPLAY_PROPERTIES(microseconds); + OUTPUT_STYLE_AND_DISPLAY_PROPERTIES(nanoseconds); + + OUTPUT_PROPERTY(fractionalDigits_string, fractional_digits); + OUTPUT_PROPERTY(numberingSystem_string, numbering_system); +#undef OUTPUT_PROPERTY +#undef OUTPUT_STYLE_PROPERTY +#undef OUTPUT_DISPLAY_PROPERTY +#undef OUTPUT_STYLE_AND_DISPLAY_PROPERTIES + + return options; +} + +namespace { + +UNumberUnitWidth ToUNumberUnitWidth(JSDurationFormat::FieldStyle style) { + switch (style) { + case JSDurationFormat::FieldStyle::kShort: + return UNumberUnitWidth::UNUM_UNIT_WIDTH_SHORT; + case JSDurationFormat::FieldStyle::kLong: + return UNumberUnitWidth::UNUM_UNIT_WIDTH_FULL_NAME; + case JSDurationFormat::FieldStyle::kNarrow: + return UNumberUnitWidth::UNUM_UNIT_WIDTH_NARROW; + default: + UNREACHABLE(); + } +} + +void Output(std::vector<icu::UnicodeString>* out, double value, + const icu::number::LocalizedNumberFormatter& fmt) { + UErrorCode status = U_ZERO_ERROR; + out->push_back(fmt.formatDouble(value, status).toString(status)); + CHECK(U_SUCCESS(status)); +} + +void Output3Styles(std::vector<icu::UnicodeString>* out, + std::vector<std::string>* types, const char* type, + double value, JSDurationFormat::Display display, + const icu::number::LocalizedNumberFormatter& fmt) { + if (value == 0 && display == JSDurationFormat::Display::kAuto) return; + types->push_back(type); + Output(out, value, fmt); +} + +void Output4Styles(std::vector<icu::UnicodeString>* out, + std::vector<std::string>* types, const char* type, + double value, JSDurationFormat::Display display, + JSDurationFormat::FieldStyle style, + const icu::number::LocalizedNumberFormatter& fmt, + icu::MeasureUnit unit) { + if (value == 0 && display == JSDurationFormat::Display::kAuto) return; + if (style == JSDurationFormat::FieldStyle::kNumeric) { + types->push_back(type); + return Output(out, value, fmt); + } + Output3Styles(out, types, type, value, display, + fmt.unit(unit).unitWidth(ToUNumberUnitWidth(style))); +} +void Output5Styles(std::vector<icu::UnicodeString>* out, + std::vector<std::string>* types, const char* type, + double value, JSDurationFormat::Display display, + JSDurationFormat::FieldStyle style, + const icu::number::LocalizedNumberFormatter& fmt, + icu::MeasureUnit unit) { + if (value == 0 && display == JSDurationFormat::Display::kAuto) return; + if (style == JSDurationFormat::FieldStyle::k2Digit) { + types->push_back(type); + return Output(out, value, + fmt.integerWidth(icu::number::IntegerWidth::zeroFillTo(2))); + } + Output4Styles(out, types, type, value, display, style, fmt, unit); +} + +void DurationRecordToListOfStrings( + std::vector<icu::UnicodeString>* out, std::vector<std::string>* types, + Handle<JSDurationFormat> df, + const icu::number::LocalizedNumberFormatter& fmt, + const DurationRecord& record) { + // The handling of "2-digit" or "numeric" style of + // step l.i.6.c.i-ii "Let separator be + // dataLocaleData.[[digitalFormat]].[[separator]]." and + // "Append the new Record { [[Type]]: "literal", [[Value]]: separator} to the + // end of result." are not implemented following the spec due to unresolved + // issues in + // https://github.com/tc39/proposal-intl-duration-format/issues/55 + Output3Styles(out, types, "years", record.years, df->years_display(), + fmt.unit(icu::MeasureUnit::getYear()) + .unitWidth(ToUNumberUnitWidth(df->years_style()))); + Output3Styles(out, types, "months", record.months, df->months_display(), + fmt.unit(icu::MeasureUnit::getMonth()) + .unitWidth(ToUNumberUnitWidth(df->months_style()))); + Output3Styles(out, types, "weeks", record.weeks, df->weeks_display(), + fmt.unit(icu::MeasureUnit::getWeek()) + .unitWidth(ToUNumberUnitWidth(df->weeks_style()))); + Output3Styles(out, types, "days", record.time_duration.days, + df->days_display(), + fmt.unit(icu::MeasureUnit::getDay()) + .unitWidth(ToUNumberUnitWidth(df->days_style()))); + Output5Styles(out, types, "hours", record.time_duration.hours, + df->hours_display(), df->hours_style(), fmt, + icu::MeasureUnit::getHour()); + Output5Styles(out, types, "minutes", record.time_duration.minutes, + df->minutes_display(), df->minutes_style(), fmt, + icu::MeasureUnit::getMinute()); + int32_t fractional_digits = df->fractional_digits(); + if (df->milliseconds_style() == JSDurationFormat::FieldStyle::kNumeric) { + // a. Set value to value + duration.[[Milliseconds]] / 10^3 + + // duration.[[Microseconds]] / 10^6 + duration.[[Nanoseconds]] / 10^9. + double value = record.time_duration.seconds + + record.time_duration.milliseconds / 1e3 + + record.time_duration.microseconds / 1e6 + + record.time_duration.nanoseconds / 1e9; + Output5Styles(out, types, "seconds", value, df->seconds_display(), + df->seconds_style(), + fmt.precision(icu::number::Precision::minMaxFraction( + fractional_digits, fractional_digits)), + icu::MeasureUnit::getSecond()); + return; + } + Output5Styles(out, types, "seconds", record.time_duration.seconds, + df->seconds_display(), df->seconds_style(), fmt, + icu::MeasureUnit::getSecond()); + + if (df->microseconds_style() == JSDurationFormat::FieldStyle::kNumeric) { + // a. Set value to value + duration.[[Microseconds]] / 10^3 + + // duration.[[Nanoseconds]] / 10^6. + double value = record.time_duration.milliseconds + + record.time_duration.microseconds / 1e3 + + record.time_duration.nanoseconds / 1e6; + Output4Styles(out, types, "milliseconds", value, df->milliseconds_display(), + df->milliseconds_style(), + fmt.precision(icu::number::Precision::minMaxFraction( + fractional_digits, fractional_digits)), + icu::MeasureUnit::getMillisecond()); + return; + } + Output4Styles(out, types, "milliseconds", record.time_duration.milliseconds, + df->milliseconds_display(), df->milliseconds_style(), fmt, + icu::MeasureUnit::getMillisecond()); + + if (df->nanoseconds_style() == JSDurationFormat::FieldStyle::kNumeric) { + // a. Set value to value + duration.[[Nanoseconds]] / 10^3. + double value = record.time_duration.microseconds + + record.time_duration.nanoseconds / 1e3; + Output4Styles(out, types, "microseconds", value, df->microseconds_display(), + df->microseconds_style(), + fmt.precision(icu::number::Precision::minMaxFraction( + fractional_digits, fractional_digits)), + icu::MeasureUnit::getMicrosecond()); + return; + } + Output4Styles(out, types, "microseconds", record.time_duration.microseconds, + df->microseconds_display(), df->microseconds_style(), fmt, + icu::MeasureUnit::getMicrosecond()); + + Output4Styles(out, types, "nanoseconds", record.time_duration.nanoseconds, + df->nanoseconds_display(), df->nanoseconds_style(), fmt, + icu::MeasureUnit::getNanosecond()); +} + +UListFormatterWidth StyleToWidth(JSDurationFormat::Style style) { + switch (style) { + case JSDurationFormat::Style::kLong: + return ULISTFMT_WIDTH_WIDE; + case JSDurationFormat::Style::kShort: + return ULISTFMT_WIDTH_SHORT; + case JSDurationFormat::Style::kNarrow: + case JSDurationFormat::Style::kDigital: + return ULISTFMT_WIDTH_NARROW; + } + UNREACHABLE(); +} + +template <typename T, + MaybeHandle<T> (*Format)(Isolate*, const icu::FormattedValue&, + const std::vector<std::string>&)> +MaybeHandle<T> PartitionDurationFormatPattern(Isolate* isolate, + Handle<JSDurationFormat> df, + const DurationRecord& record, + const char* method_name) { + // 4. Let lfOpts be ! OrdinaryObjectCreate(null). + // 5. Perform ! CreateDataPropertyOrThrow(lfOpts, "type", "unit"). + UListFormatterType type = ULISTFMT_TYPE_UNITS; + // 6. Let listStyle be durationFormat.[[Style]]. + // 7. If listStyle is "digital", then + // a. Set listStyle to "narrow". + // 8. Perform ! CreateDataPropertyOrThrow(lfOpts, "style", listStyle). + UListFormatterWidth list_style = StyleToWidth(df->style()); + // 9. Let lf be ! Construct(%ListFormat%, « durationFormat.[[Locale]], lfOpts + // »). + UErrorCode status = U_ZERO_ERROR; + icu::Locale icu_locale = *df->icu_locale().raw(); + std::unique_ptr<icu::ListFormatter> formatter( + icu::ListFormatter::createInstance(icu_locale, type, list_style, status)); + CHECK(U_SUCCESS(status)); + + std::vector<icu::UnicodeString> list; + std::vector<std::string> types; + + DurationRecordToListOfStrings(&list, &types, df, + *(df->icu_number_formatter().raw()), record); + + icu::FormattedList formatted = formatter->formatStringsToValue( + list.data(), static_cast<int32_t>(list.size()), status); + CHECK(U_SUCCESS(status)); + return Format(isolate, formatted, types); +} + +template <typename T, + MaybeHandle<T> (*Format)(Isolate*, const icu::FormattedValue&, + const std::vector<std::string>&)> +MaybeHandle<T> FormatCommon(Isolate* isolate, Handle<JSDurationFormat> df, + Handle<Object> duration, const char* method_name) { + // 1. Let df be this value. + // 2. Perform ? RequireInternalSlot(df, [[InitializedDurationFormat]]). + // 3. Let record be ? ToDurationRecord(duration). + DurationRecord record; + MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate, record, + temporal::ToPartialDuration(isolate, duration, + {0, 0, 0, {0, 0, 0, 0, 0, 0, 0}}), + Handle<T>()); + // 4. If IsValidDurationRecord(record) is false, throw a RangeError exception. + if (!temporal::IsValidDuration(isolate, record)) { + THROW_NEW_ERROR( + isolate, + NewRangeError(MessageTemplate::kInvalid, + isolate->factory()->object_string(), duration), + T); + } + // 5. Let parts be ! PartitionDurationFormatPattern(df, record). + return PartitionDurationFormatPattern<T, Format>(isolate, df, record, + method_name); +} + +} // namespace + +MaybeHandle<String> FormattedToString(Isolate* isolate, + const icu::FormattedValue& formatted, + const std::vector<std::string>&) { + return Intl::FormattedToString(isolate, formatted); +} + +MaybeHandle<JSArray> FormattedListToJSArray( + Isolate* isolate, const icu::FormattedValue& formatted, + const std::vector<std::string>& types) { + Factory* factory = isolate->factory(); + Handle<JSArray> array = factory->NewJSArray(0); + icu::ConstrainedFieldPosition cfpos; + cfpos.constrainCategory(UFIELD_CATEGORY_LIST); + int index = 0; + int type_index = 0; + UErrorCode status = U_ZERO_ERROR; + icu::UnicodeString string = formatted.toString(status); + Handle<String> substring; + while (formatted.nextPosition(cfpos, status) && U_SUCCESS(status)) { + ASSIGN_RETURN_ON_EXCEPTION( + isolate, substring, + Intl::ToString(isolate, string, cfpos.getStart(), cfpos.getLimit()), + JSArray); + Handle<String> type_string = factory->literal_string(); + if (cfpos.getField() == ULISTFMT_ELEMENT_FIELD) { + type_string = + factory->NewStringFromAsciiChecked(types[type_index].c_str()); + type_index++; + } + Intl::AddElement(isolate, array, index++, type_string, substring); + } + if (U_FAILURE(status)) { + THROW_NEW_ERROR(isolate, NewTypeError(MessageTemplate::kIcuError), JSArray); + } + JSObject::ValidateElements(*array); + return array; +} + +MaybeHandle<String> JSDurationFormat::Format(Isolate* isolate, + Handle<JSDurationFormat> df, + Handle<Object> duration) { + const char* method_name = "Intl.DurationFormat.prototype.format"; + return FormatCommon<String, FormattedToString>(isolate, df, duration, + method_name); +} + +MaybeHandle<JSArray> JSDurationFormat::FormatToParts( + Isolate* isolate, Handle<JSDurationFormat> df, Handle<Object> duration) { + const char* method_name = "Intl.DurationFormat.prototype.formatToParts"; + return FormatCommon<JSArray, FormattedListToJSArray>(isolate, df, duration, + method_name); +} + +const std::set<std::string>& JSDurationFormat::GetAvailableLocales() { + return JSNumberFormat::GetAvailableLocales(); +} + +} // namespace internal +} // namespace v8 diff --git a/deps/v8/src/objects/js-duration-format.h b/deps/v8/src/objects/js-duration-format.h new file mode 100644 index 00000000000000..b91ee47e157bc2 --- /dev/null +++ b/deps/v8/src/objects/js-duration-format.h @@ -0,0 +1,166 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef V8_INTL_SUPPORT +#error Internationalization is expected to be enabled. +#endif // V8_INTL_SUPPORT + +#ifndef V8_OBJECTS_JS_DURATION_FORMAT_H_ +#define V8_OBJECTS_JS_DURATION_FORMAT_H_ + +#include "src/execution/isolate.h" +#include "src/heap/factory.h" +#include "src/objects/managed.h" +#include "src/objects/objects.h" + +// Has to be the last include (doesn't have include guards): +#include "src/objects/object-macros.h" + +namespace U_ICU_NAMESPACE { +class Locale; +namespace number { +class LocalizedNumberFormatter; +} // namespace number +} // namespace U_ICU_NAMESPACE + +namespace v8 { +namespace internal { + +#include "torque-generated/src/objects/js-duration-format-tq.inc" + +class JSDurationFormat + : public TorqueGeneratedJSDurationFormat<JSDurationFormat, JSObject> { + public: + // Creates duration format object with properties derived from input + // locales and options. + V8_WARN_UNUSED_RESULT static MaybeHandle<JSDurationFormat> New( + Isolate* isolate, Handle<Map> map, Handle<Object> locales, + Handle<Object> options); + + V8_WARN_UNUSED_RESULT static Handle<JSObject> ResolvedOptions( + Isolate* isolate, Handle<JSDurationFormat> format_holder); + + V8_WARN_UNUSED_RESULT static MaybeHandle<String> Format( + Isolate* isolate, Handle<JSDurationFormat> df, Handle<Object> duration); + + V8_WARN_UNUSED_RESULT static MaybeHandle<JSArray> FormatToParts( + Isolate* isolate, Handle<JSDurationFormat> df, Handle<Object> duration); + + V8_EXPORT_PRIVATE static const std::set<std::string>& GetAvailableLocales(); + + enum class Display { + kAuto, + kAlways, + + kMax = kAlways + }; + + enum class Style { + kLong, + kShort, + kNarrow, + kDigital, + + kMax = kDigital + }; + + // The ordering of these values is significant, because sub-ranges are + // encoded using bitfields. + enum class FieldStyle { + kLong, + kShort, + kNarrow, + kNumeric, + k2Digit, + kUndefined, + + kStyle3Max = kNarrow, + kStyle4Max = kNumeric, + kStyle5Max = k2Digit, + }; + +#define DECLARE_INLINE_SETTER_GETTER(T, n) \ + inline void set_##n(T display); \ + inline T n() const; + +#define DECLARE_INLINE_DISPLAY_SETTER_GETTER(f) \ + DECLARE_INLINE_SETTER_GETTER(Display, f##_display) + +#define DECLARE_INLINE_FIELD_STYLE_SETTER_GETTER(f) \ + DECLARE_INLINE_SETTER_GETTER(FieldStyle, f##_style) + + DECLARE_INLINE_DISPLAY_SETTER_GETTER(years) + DECLARE_INLINE_DISPLAY_SETTER_GETTER(months) + DECLARE_INLINE_DISPLAY_SETTER_GETTER(weeks) + DECLARE_INLINE_DISPLAY_SETTER_GETTER(days) + DECLARE_INLINE_DISPLAY_SETTER_GETTER(hours) + DECLARE_INLINE_DISPLAY_SETTER_GETTER(minutes) + DECLARE_INLINE_DISPLAY_SETTER_GETTER(seconds) + DECLARE_INLINE_DISPLAY_SETTER_GETTER(milliseconds) + DECLARE_INLINE_DISPLAY_SETTER_GETTER(microseconds) + DECLARE_INLINE_DISPLAY_SETTER_GETTER(nanoseconds) + + DECLARE_INLINE_SETTER_GETTER(Style, style) + + DECLARE_INLINE_FIELD_STYLE_SETTER_GETTER(years) + DECLARE_INLINE_FIELD_STYLE_SETTER_GETTER(months) + DECLARE_INLINE_FIELD_STYLE_SETTER_GETTER(weeks) + DECLARE_INLINE_FIELD_STYLE_SETTER_GETTER(days) + DECLARE_INLINE_FIELD_STYLE_SETTER_GETTER(hours) + DECLARE_INLINE_FIELD_STYLE_SETTER_GETTER(minutes) + DECLARE_INLINE_FIELD_STYLE_SETTER_GETTER(seconds) + DECLARE_INLINE_FIELD_STYLE_SETTER_GETTER(milliseconds) + DECLARE_INLINE_FIELD_STYLE_SETTER_GETTER(microseconds) + DECLARE_INLINE_FIELD_STYLE_SETTER_GETTER(nanoseconds) + +#undef DECLARE_INLINE_SETTER_GETTER +#undef DECLARE_INLINE_STYLE_SETTER_GETTER +#undef DECLARE_INLINE_FIELD_STYLE_SETTER_GETTER + + inline void set_fractional_digits(int32_t digits); + inline int32_t fractional_digits() const; + + // Bit positions in |flags|. + DEFINE_TORQUE_GENERATED_JS_DURATION_FORMAT_DISPLAY_FLAGS() + DEFINE_TORQUE_GENERATED_JS_DURATION_FORMAT_STYLE_FLAGS() + + static_assert(YearsDisplayBit::is_valid(Display::kMax)); + static_assert(MonthsDisplayBit::is_valid(Display::kMax)); + static_assert(WeeksDisplayBit::is_valid(Display::kMax)); + static_assert(DaysDisplayBit::is_valid(Display::kMax)); + static_assert(HoursDisplayBit::is_valid(Display::kMax)); + static_assert(MinutesDisplayBit::is_valid(Display::kMax)); + static_assert(SecondsDisplayBit::is_valid(Display::kMax)); + static_assert(MillisecondsDisplayBit::is_valid(Display::kMax)); + static_assert(MicrosecondsDisplayBit::is_valid(Display::kMax)); + static_assert(NanosecondsDisplayBit::is_valid(Display::kMax)); + + static_assert(StyleBits::is_valid(Style::kMax)); + + static_assert(YearsStyleBits::is_valid(FieldStyle::kStyle3Max)); + static_assert(MonthsStyleBits::is_valid(FieldStyle::kStyle3Max)); + static_assert(WeeksStyleBits::is_valid(FieldStyle::kStyle3Max)); + static_assert(DaysStyleBits::is_valid(FieldStyle::kStyle3Max)); + static_assert(HoursStyleBits::is_valid(FieldStyle::kStyle5Max)); + static_assert(MinutesStyleBits::is_valid(FieldStyle::kStyle5Max)); + static_assert(SecondsStyleBits::is_valid(FieldStyle::kStyle5Max)); + static_assert(MillisecondsStyleBits::is_valid(FieldStyle::kStyle4Max)); + static_assert(MicrosecondsStyleBits::is_valid(FieldStyle::kStyle4Max)); + static_assert(NanosecondsStyleBits::is_valid(FieldStyle::kStyle4Max)); + + DECL_ACCESSORS(icu_locale, Managed<icu::Locale>) + DECL_ACCESSORS(icu_number_formatter, + Managed<icu::number::LocalizedNumberFormatter>) + + DECL_PRINTER(JSDurationFormat) + + TQ_OBJECT_CONSTRUCTORS(JSDurationFormat) +}; + +} // namespace internal +} // namespace v8 + +#include "src/objects/object-macros-undef.h" + +#endif // V8_OBJECTS_JS_DURATION_FORMAT_H_ diff --git a/deps/v8/src/objects/js-duration-format.tq b/deps/v8/src/objects/js-duration-format.tq new file mode 100644 index 00000000000000..55fe354105e3cd --- /dev/null +++ b/deps/v8/src/objects/js-duration-format.tq @@ -0,0 +1,45 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include 'src/objects/js-duration-format.h' + +type JSDurationFormatStyle extends int32 constexpr 'JSDurationFormat::Style'; +type JSDurationFormatFieldStyle extends int32 +constexpr 'JSDurationFormat::FieldStyle'; +type JSDurationFormatDisplay extends int32 +constexpr 'JSDurationFormat::Display'; +bitfield struct JSDurationFormatStyleFlags extends uint31 { + style: JSDurationFormatStyle: 2 bit; + years_style: JSDurationFormatFieldStyle: 2 bit; + months_style: JSDurationFormatFieldStyle: 2 bit; + weeks_style: JSDurationFormatFieldStyle: 2 bit; + days_style: JSDurationFormatFieldStyle: 2 bit; + hours_style: JSDurationFormatFieldStyle: 3 bit; + minutes_style: JSDurationFormatFieldStyle: 3 bit; + seconds_style: JSDurationFormatFieldStyle: 3 bit; + milliseconds_style: JSDurationFormatFieldStyle: 2 bit; + microseconds_style: JSDurationFormatFieldStyle: 2 bit; + nanoseconds_style: JSDurationFormatFieldStyle: 2 bit; +} +bitfield struct JSDurationFormatDisplayFlags extends uint31 { + years_display: JSDurationFormatDisplay: 1 bit; + months_display: JSDurationFormatDisplay: 1 bit; + weeks_display: JSDurationFormatDisplay: 1 bit; + days_display: JSDurationFormatDisplay: 1 bit; + hours_display: JSDurationFormatDisplay: 1 bit; + minutes_display: JSDurationFormatDisplay: 1 bit; + seconds_display: JSDurationFormatDisplay: 1 bit; + milliseconds_display: JSDurationFormatDisplay: 1 bit; + microseconds_display: JSDurationFormatDisplay: 1 bit; + nanoseconds_display: JSDurationFormatDisplay: 1 bit; + fractional_digits: int32: 4 bit; +} + +extern class JSDurationFormat extends JSObject { + style_flags: SmiTagged<JSDurationFormatStyleFlags>; + display_flags: SmiTagged<JSDurationFormatDisplayFlags>; + icu_locale: Foreign; // Managed<icu::Locale> + icu_number_formatter: + Foreign; // Managed<icu::number::LocalizedNumberFormatter> +} diff --git a/deps/v8/src/objects/js-function.cc b/deps/v8/src/objects/js-function.cc index a7b71455964696..62fe309a471b32 100644 --- a/deps/v8/src/objects/js-function.cc +++ b/deps/v8/src/objects/js-function.cc @@ -628,7 +628,6 @@ void JSFunction::InitializeFeedbackCell( // profile and more precise code coverage. v8_flags.log_function_events || !isolate->is_best_effort_code_coverage() || - isolate->is_collecting_type_profile() || function->shared().sparkplug_compiled(); if (needs_feedback_vector) { @@ -844,6 +843,7 @@ bool CanSubclassHaveInobjectProperties(InstanceType instance_type) { case JS_COLLATOR_TYPE: case JS_DATE_TIME_FORMAT_TYPE: case JS_DISPLAY_NAMES_TYPE: + case JS_DURATION_FORMAT_TYPE: case JS_LIST_FORMAT_TYPE: case JS_LOCALE_TYPE: case JS_NUMBER_FORMAT_TYPE: diff --git a/deps/v8/src/objects/js-number-format.cc b/deps/v8/src/objects/js-number-format.cc index af7bef632a557f..606fd3caf09dc4 100644 --- a/deps/v8/src/objects/js-number-format.cc +++ b/deps/v8/src/objects/js-number-format.cc @@ -458,7 +458,8 @@ const icu::UnicodeString CurrencyFromSkeleton( return skeleton.tempSubString(index, 3); } -const icu::UnicodeString NumberingSystemFromSkeleton( +} // namespace +const icu::UnicodeString JSNumberFormat::NumberingSystemFromSkeleton( const icu::UnicodeString& skeleton) { const char numbering_system[] = "numbering-system/"; int32_t index = skeleton.indexOf(numbering_system); @@ -470,6 +471,8 @@ const icu::UnicodeString NumberingSystemFromSkeleton( return res.tempSubString(0, index); } +namespace { + // Return CurrencySign as string based on skeleton. Handle<String> CurrencySignString(Isolate* isolate, const icu::UnicodeString& skeleton) { @@ -949,7 +952,7 @@ Handle<JSObject> JSNumberFormat::ResolvedOptions( Handle<String> locale = Handle<String>(number_format->locale(), isolate); const icu::UnicodeString numberingSystem_ustr = - NumberingSystemFromSkeleton(skeleton); + JSNumberFormat::NumberingSystemFromSkeleton(skeleton); // 5. For each row of Table 4, except the header row, in table order, do // Table 4: Resolved Options of NumberFormat Instances // Internal Slot Property diff --git a/deps/v8/src/objects/js-number-format.h b/deps/v8/src/objects/js-number-format.h index 9122fdaf432ab3..1b2a0041a9b3e7 100644 --- a/deps/v8/src/objects/js-number-format.h +++ b/deps/v8/src/objects/js-number-format.h @@ -96,6 +96,9 @@ class JSNumberFormat const Intl::NumberFormatDigitOptions& digit_options, int rounding_increment, ShowTrailingZeros show); + static const icu::UnicodeString NumberingSystemFromSkeleton( + const icu::UnicodeString& skeleton); + V8_WARN_UNUSED_RESULT static Maybe<icu::number::LocalizedNumberRangeFormatter> GetRangeFormatter( Isolate* isolate, String locale, diff --git a/deps/v8/src/objects/js-objects-inl.h b/deps/v8/src/objects/js-objects-inl.h index ddb5e54c16611d..660f8c7a2ee737 100644 --- a/deps/v8/src/objects/js-objects-inl.h +++ b/deps/v8/src/objects/js-objects-inl.h @@ -99,6 +99,13 @@ MaybeHandle<HeapObject> JSReceiver::GetPrototype(Isolate* isolate, Handle<JSReceiver> receiver) { // We don't expect access checks to be needed on JSProxy objects. DCHECK(!receiver->IsAccessCheckNeeded() || receiver->IsJSObject()); + + if (receiver->IsWasmObject()) { + THROW_NEW_ERROR(isolate, + NewTypeError(MessageTemplate::kWasmObjectsAreOpaque), + HeapObject); + } + PrototypeIterator iter(isolate, receiver, kStartAtReceiver, PrototypeIterator::END_AT_NON_HIDDEN); do { diff --git a/deps/v8/src/objects/js-objects.cc b/deps/v8/src/objects/js-objects.cc index 02c5128ea9c749..15356b6c58d2f7 100644 --- a/deps/v8/src/objects/js-objects.cc +++ b/deps/v8/src/objects/js-objects.cc @@ -16,6 +16,7 @@ #include "src/heap/factory-inl.h" #include "src/heap/heap-inl.h" #include "src/heap/memory-chunk.h" +#include "src/heap/pretenuring-handler-inl.h" #include "src/init/bootstrapper.h" #include "src/logging/counters.h" #include "src/logging/log.h" @@ -42,6 +43,7 @@ #ifdef V8_INTL_SUPPORT #include "src/objects/js-date-time-format.h" #include "src/objects/js-display-names.h" +#include "src/objects/js-duration-format.h" #endif // V8_INTL_SUPPORT #include "src/objects/js-generator-inl.h" #ifdef V8_INTL_SUPPORT @@ -60,6 +62,7 @@ #include "src/objects/js-segmenter.h" #include "src/objects/js-segments.h" #endif // V8_INTL_SUPPORT +#include "src/objects/js-raw-json-inl.h" #include "src/objects/js-shared-array-inl.h" #include "src/objects/js-struct-inl.h" #include "src/objects/js-temporal-objects-inl.h" @@ -97,6 +100,8 @@ Maybe<bool> JSReceiver::HasProperty(LookupIterator* it) { case LookupIterator::JSPROXY: return JSProxy::HasProperty(it->isolate(), it->GetHolder<JSProxy>(), it->GetName()); + case LookupIterator::WASM_OBJECT: + return Just(false); case LookupIterator::INTERCEPTOR: { Maybe<PropertyAttributes> result = JSObject::GetPropertyAttributesWithInterceptor(it); @@ -157,6 +162,7 @@ Handle<Object> JSReceiver::GetDataProperty(LookupIterator* it, if (!it->isolate()->context().is_null() && it->HasAccess()) continue; V8_FALLTHROUGH; case LookupIterator::JSPROXY: + case LookupIterator::WASM_OBJECT: it->NotFound(); return it->isolate()->factory()->undefined_value(); case LookupIterator::ACCESSOR: @@ -223,6 +229,9 @@ Maybe<bool> JSReceiver::CheckPrivateNameStore(LookupIterator* it, NewTypeError(message, name_string, it->GetReceiver())); } return Just(true); + case LookupIterator::WASM_OBJECT: + RETURN_FAILURE(isolate, kThrowOnError, + NewTypeError(MessageTemplate::kWasmObjectsAreOpaque)); } } DCHECK(!it->IsFound()); @@ -734,6 +743,8 @@ Maybe<PropertyAttributes> JSReceiver::GetPropertyAttributes( UNREACHABLE(); case LookupIterator::JSPROXY: return JSProxy::GetPropertyAttributes(it); + case LookupIterator::WASM_OBJECT: + return Just(ABSENT); case LookupIterator::INTERCEPTOR: { Maybe<PropertyAttributes> result = JSObject::GetPropertyAttributesWithInterceptor(it); @@ -948,7 +959,6 @@ Maybe<bool> JSReceiver::DeleteProperty(LookupIterator* it, } return Just(true); } - Handle<JSObject> receiver = Handle<JSObject>::cast(it->GetReceiver()); for (; it->IsFound(); it->Next()) { switch (it->state()) { @@ -956,6 +966,9 @@ Maybe<bool> JSReceiver::DeleteProperty(LookupIterator* it, case LookupIterator::NOT_FOUND: case LookupIterator::TRANSITION: UNREACHABLE(); + case LookupIterator::WASM_OBJECT: + RETURN_FAILURE(isolate, kThrowOnError, + NewTypeError(MessageTemplate::kWasmObjectsAreOpaque)); case LookupIterator::ACCESS_CHECK: if (it->HasAccess()) break; isolate->ReportFailedAccessCheck(it->GetHolder<JSObject>()); @@ -986,7 +999,7 @@ Maybe<bool> JSReceiver::DeleteProperty(LookupIterator* it, if (is_strict(language_mode)) { isolate->Throw(*isolate->factory()->NewTypeError( MessageTemplate::kStrictDeleteProperty, it->GetName(), - receiver)); + it->GetReceiver())); return Nothing<bool>(); } return Just(false); @@ -1158,6 +1171,10 @@ Maybe<bool> JSReceiver::DefineOwnProperty(Isolate* isolate, isolate, Handle<JSModuleNamespace>::cast(object), key, desc, should_throw); } + if (object->IsWasmObject()) { + RETURN_FAILURE(isolate, kThrowOnError, + NewTypeError(MessageTemplate::kWasmObjectsAreOpaque)); + } // OrdinaryDefineOwnProperty, by virtue of calling // DefineOwnPropertyIgnoreAttributes, can handle arguments @@ -1745,6 +1762,9 @@ Maybe<bool> JSReceiver::AddPrivateField(LookupIterator* it, return JSProxy::SetPrivateSymbol(isolate, Handle<JSProxy>::cast(receiver), symbol, &new_desc, should_throw); } + case LookupIterator::WASM_OBJECT: + RETURN_FAILURE(isolate, kThrowOnError, + NewTypeError(MessageTemplate::kWasmObjectsAreOpaque)); case LookupIterator::DATA: case LookupIterator::INTERCEPTOR: case LookupIterator::ACCESSOR: @@ -2033,6 +2053,10 @@ Maybe<bool> JSReceiver::PreventExtensions(Handle<JSReceiver> object, return JSProxy::PreventExtensions(Handle<JSProxy>::cast(object), should_throw); } + if (object->IsWasmObject()) { + RETURN_FAILURE(object->GetIsolate(), kThrowOnError, + NewTypeError(MessageTemplate::kWasmObjectsAreOpaque)); + } DCHECK(object->IsJSObject()); return JSObject::PreventExtensions(Handle<JSObject>::cast(object), should_throw); @@ -2042,6 +2066,9 @@ Maybe<bool> JSReceiver::IsExtensible(Handle<JSReceiver> object) { if (object->IsJSProxy()) { return JSProxy::IsExtensible(Handle<JSProxy>::cast(object)); } + if (object->IsWasmObject()) { + return Just(false); + } return Just(JSObject::IsExtensible(Handle<JSObject>::cast(object))); } @@ -2282,6 +2309,11 @@ Maybe<bool> JSReceiver::SetPrototype(Isolate* isolate, Handle<JSReceiver> object, Handle<Object> value, bool from_javascript, ShouldThrow should_throw) { + if (object->IsWasmObject()) { + RETURN_FAILURE(isolate, should_throw, + NewTypeError(MessageTemplate::kWasmObjectsAreOpaque)); + } + if (object->IsJSProxy()) { return JSProxy::SetPrototype(isolate, Handle<JSProxy>::cast(object), value, from_javascript, should_throw); @@ -2506,6 +2538,8 @@ int JSObject::GetHeaderSize(InstanceType type, return JSTemporalZonedDateTime::kHeaderSize; case JS_WRAPPED_FUNCTION_TYPE: return JSWrappedFunction::kHeaderSize; + case JS_RAW_JSON_TYPE: + return JSRawJson::kHeaderSize; #ifdef V8_INTL_SUPPORT case JS_V8_BREAK_ITERATOR_TYPE: return JSV8BreakIterator::kHeaderSize; @@ -2515,6 +2549,8 @@ int JSObject::GetHeaderSize(InstanceType type, return JSDateTimeFormat::kHeaderSize; case JS_DISPLAY_NAMES_TYPE: return JSDisplayNames::kHeaderSize; + case JS_DURATION_FORMAT_TYPE: + return JSDurationFormat::kHeaderSize; case JS_LIST_FORMAT_TYPE: return JSListFormat::kHeaderSize; case JS_LOCALE_TYPE: @@ -3561,6 +3597,7 @@ Maybe<bool> JSObject::DefineOwnPropertyIgnoreAttributes( for (; it->IsFound(); it->Next()) { switch (it->state()) { case LookupIterator::JSPROXY: + case LookupIterator::WASM_OBJECT: case LookupIterator::TRANSITION: case LookupIterator::NOT_FOUND: UNREACHABLE(); @@ -5296,8 +5333,11 @@ bool JSObject::UpdateAllocationSite(Handle<JSObject> object, DisallowGarbageCollection no_gc; Heap* heap = object->GetHeap(); + PretenturingHandler* pretunring_handler = heap->pretenuring_handler(); AllocationMemento memento = - heap->FindAllocationMemento<Heap::kForRuntime>(object->map(), *object); + pretunring_handler + ->FindAllocationMemento<PretenturingHandler::kForRuntime>( + object->map(), *object); if (memento.is_null()) return false; // Walk through to the Allocation Site diff --git a/deps/v8/src/objects/js-raw-json-inl.h b/deps/v8/src/objects/js-raw-json-inl.h new file mode 100644 index 00000000000000..b20a6ad1170b87 --- /dev/null +++ b/deps/v8/src/objects/js-raw-json-inl.h @@ -0,0 +1,26 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef V8_OBJECTS_JS_RAW_JSON_INL_H_ +#define V8_OBJECTS_JS_RAW_JSON_INL_H_ + +#include "src/objects/js-raw-json.h" +#include "src/objects/objects-inl.h" + +// Has to be the last include (doesn't have include guards): +#include "src/objects/object-macros.h" + +namespace v8 { +namespace internal { + +#include "torque-generated/src/objects/js-raw-json-tq-inl.inc" + +TQ_OBJECT_CONSTRUCTORS_IMPL(JSRawJson) + +} // namespace internal +} // namespace v8 + +#include "src/objects/object-macros-undef.h" + +#endif // V8_OBJECTS_JS_RAW_JSON_INL_H_ diff --git a/deps/v8/src/objects/js-raw-json.cc b/deps/v8/src/objects/js-raw-json.cc new file mode 100644 index 00000000000000..1c820660760459 --- /dev/null +++ b/deps/v8/src/objects/js-raw-json.cc @@ -0,0 +1,42 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "src/objects/js-raw-json.h" + +#include "src/execution/isolate.h" +#include "src/heap/factory.h" +#include "src/json/json-parser.h" +#include "src/objects/js-raw-json-inl.h" +#include "src/objects/string-inl.h" + +namespace v8 { +namespace internal { + +// https://tc39.es/proposal-json-parse-with-source/#sec-json.rawjson +MaybeHandle<JSRawJson> JSRawJson::Create(Isolate* isolate, + Handle<Object> text) { + DCHECK(v8_flags.harmony_json_parse_with_source); + Handle<String> json_string; + ASSIGN_RETURN_ON_EXCEPTION(isolate, json_string, + Object::ToString(isolate, text), JSRawJson); + if (String::IsOneByteRepresentationUnderneath(*json_string)) { + if (!JsonParser<uint8_t>::CheckRawJson(isolate, json_string)) { + DCHECK(isolate->has_pending_exception()); + return MaybeHandle<JSRawJson>(); + } + } else { + if (!JsonParser<uint16_t>::CheckRawJson(isolate, json_string)) { + DCHECK(isolate->has_pending_exception()); + return MaybeHandle<JSRawJson>(); + } + } + Handle<JSObject> result = + isolate->factory()->NewJSObjectFromMap(isolate->js_raw_json_map()); + result->InObjectPropertyAtPut(JSRawJson::kRawJsonIndex, *json_string); + JSObject::SetIntegrityLevel(result, FROZEN, kThrowOnError).Check(); + return Handle<JSRawJson>::cast(result); +} + +} // namespace internal +} // namespace v8 diff --git a/deps/v8/src/objects/js-raw-json.h b/deps/v8/src/objects/js-raw-json.h new file mode 100644 index 00000000000000..bb9f90cbc54d9d --- /dev/null +++ b/deps/v8/src/objects/js-raw-json.h @@ -0,0 +1,43 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef V8_OBJECTS_JS_RAW_JSON_H_ +#define V8_OBJECTS_JS_RAW_JSON_H_ + +#include "src/execution/isolate.h" + +// Has to be the last include (doesn't have include guards): +#include "src/objects/object-macros.h" + +namespace v8 { +namespace internal { + +#include "torque-generated/src/objects/js-raw-json-tq.inc" + +class JSRawJson : public TorqueGeneratedJSRawJson<JSRawJson, JSObject> { + public: + // Layout description. +#define JS_RAW_JSON_FIELDS(V) \ + V(kRawJsonOffset, kTaggedSize) \ + /* Total size. */ \ + V(kSize, 0) + DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize, JS_RAW_JSON_FIELDS) +#undef JS_RAW_JSON_FIELDS + + static const int kRawJsonIndex = 0; + + V8_WARN_UNUSED_RESULT static MaybeHandle<JSRawJson> Create( + Isolate* isolate, Handle<Object> text); + + DECL_PRINTER(JSRawJson) + + TQ_OBJECT_CONSTRUCTORS(JSRawJson) +}; + +} // namespace internal +} // namespace v8 + +#include "src/objects/object-macros-undef.h" + +#endif // V8_OBJECTS_JS_RAW_JSON_H_ diff --git a/deps/v8/src/objects/js-raw-json.tq b/deps/v8/src/objects/js-raw-json.tq new file mode 100644 index 00000000000000..03eb6af35307f3 --- /dev/null +++ b/deps/v8/src/objects/js-raw-json.tq @@ -0,0 +1,7 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include 'src/objects/js-raw-json.h' + +extern class JSRawJson extends JSObject {} diff --git a/deps/v8/src/objects/js-regexp.cc b/deps/v8/src/objects/js-regexp.cc index e76ee87b894b3e..d90e88dc1ec57e 100644 --- a/deps/v8/src/objects/js-regexp.cc +++ b/deps/v8/src/objects/js-regexp.cc @@ -258,7 +258,7 @@ int CountAdditionalEscapeChars(Handle<String> source, bool* needs_escapes_out) { DisallowGarbageCollection no_gc; int escapes = 0; bool needs_escapes = false; - bool in_char_class = false; + bool in_character_class = false; base::Vector<const Char> src = source->GetCharVector<Char>(no_gc); for (int i = 0; i < src.length(); i++) { const Char c = src[i]; @@ -270,14 +270,14 @@ int CountAdditionalEscapeChars(Handle<String> source, bool* needs_escapes_out) { // Escape. Skip next character, which will be copied verbatim; i++; } - } else if (c == '/' && !in_char_class) { + } else if (c == '/' && !in_character_class) { // Not escaped forward-slash needs escape. needs_escapes = true; escapes++; } else if (c == '[') { - in_char_class = true; + in_character_class = true; } else if (c == ']') { - in_char_class = false; + in_character_class = false; } else if (c == '\n') { needs_escapes = true; escapes++; @@ -294,7 +294,7 @@ int CountAdditionalEscapeChars(Handle<String> source, bool* needs_escapes_out) { DCHECK(!IsLineTerminator(c)); } } - DCHECK(!in_char_class); + DCHECK(!in_character_class); DCHECK_GE(escapes, 0); DCHECK_IMPLIES(escapes != 0, needs_escapes); *needs_escapes_out = needs_escapes; @@ -315,7 +315,7 @@ Handle<StringType> WriteEscapedRegExpSource(Handle<String> source, base::Vector<Char> dst(result->GetChars(no_gc), result->length()); int s = 0; int d = 0; - bool in_char_class = false; + bool in_character_class = false; while (s < src.length()) { const Char c = src[s]; if (c == '\\') { @@ -328,13 +328,13 @@ Handle<StringType> WriteEscapedRegExpSource(Handle<String> source, dst[d++] = src[s++]; } if (s == src.length()) break; - } else if (c == '/' && !in_char_class) { + } else if (c == '/' && !in_character_class) { // Not escaped forward-slash needs escape. dst[d++] = '\\'; } else if (c == '[') { - in_char_class = true; + in_character_class = true; } else if (c == ']') { - in_char_class = false; + in_character_class = false; } else if (c == '\n') { WriteStringToCharVector(dst, &d, "\\n"); s++; @@ -357,7 +357,7 @@ Handle<StringType> WriteEscapedRegExpSource(Handle<String> source, dst[d++] = src[s++]; } DCHECK_EQ(result->length(), d); - DCHECK(!in_char_class); + DCHECK(!in_character_class); return result; } diff --git a/deps/v8/src/objects/js-regexp.tq b/deps/v8/src/objects/js-regexp.tq index d2aa4f9a3e7c59..3888767ecbb462 100644 --- a/deps/v8/src/objects/js-regexp.tq +++ b/deps/v8/src/objects/js-regexp.tq @@ -30,6 +30,8 @@ extern operator '.global' macro RegExpBuiltinsAssembler::FastFlagGetterGlobal(FastJSRegExp): bool; extern operator '.unicode' macro RegExpBuiltinsAssembler::FastFlagGetterUnicode(FastJSRegExp): bool; +extern operator '.unicodeSets' macro +RegExpBuiltinsAssembler::FastFlagGetterUnicodeSets(FastJSRegExp): bool; extern operator '.lastIndex' macro RegExpBuiltinsAssembler::FastLoadLastIndex(FastJSRegExp): Smi; extern operator '.lastIndex=' macro diff --git a/deps/v8/src/objects/js-temporal-objects.cc b/deps/v8/src/objects/js-temporal-objects.cc index 0251da5c7298ce..015b68f60013d2 100644 --- a/deps/v8/src/objects/js-temporal-objects.cc +++ b/deps/v8/src/objects/js-temporal-objects.cc @@ -61,7 +61,7 @@ enum class Unit { // Struct // only for BalanceTime -struct UnbalancedTimeRecordCommon { +struct UnbalancedTimeRecord { double hour; double minute; double second; @@ -70,73 +70,48 @@ struct UnbalancedTimeRecordCommon { double nanosecond; }; -using temporal::DateRecordCommon; -using temporal::DateTimeRecordCommon; -using temporal::TimeRecordCommon; +using temporal::DateRecord; +using temporal::DateTimeRecord; +using temporal::TimeRecord; -struct DateRecord { - DateRecordCommon date; +struct DateRecordWithCalendar { + DateRecord date; Handle<Object> calendar; // String or Undefined }; -struct TimeRecord { - TimeRecordCommon time; +struct TimeRecordWithCalendar { + TimeRecord time; Handle<Object> calendar; // String or Undefined }; -struct DateTimeRecord { - DateRecordCommon date; - TimeRecordCommon time; +struct TimeZoneRecord { + bool z; + Handle<Object> offset_string; // String or Undefined + Handle<Object> name; // String or Undefined +}; + +struct DateTimeRecordWithCalendar { + DateRecord date; + TimeRecord time; + TimeZoneRecord time_zone; Handle<Object> calendar; // String or Undefined }; struct InstantRecord { - DateRecordCommon date; - TimeRecordCommon time; + DateRecord date; + TimeRecord time; Handle<Object> offset_string; // String or Undefined }; -// #sec-temporal-time-duration-records -struct TimeDurationRecord { - double days; - double hours; - double minutes; - double seconds; - double milliseconds; - double microseconds; - double nanoseconds; - - // #sec-temporal-createtimedurationrecord - static Maybe<TimeDurationRecord> Create(Isolate* isolate, double days, - double hours, double minutes, - double seconds, double milliseconds, - double microseconds, - double nanoseconds); -}; - -// #sec-temporal-duration-records -// Cannot reuse DateDurationRecord here due to duplicate days. -struct DurationRecord { - double years; - double months; - double weeks; - TimeDurationRecord time_duration; - // #sec-temporal-createdurationrecord - static Maybe<DurationRecord> Create(Isolate* isolate, double years, - double months, double weeks, double days, - double hours, double minutes, - double seconds, double milliseconds, - double microseconds, double nanoseconds); -}; +using temporal::DurationRecord; +using temporal::IsValidDuration; +using temporal::TimeDurationRecord; struct DurationRecordWithRemainder { DurationRecord record; double remainder; }; -// #sec-temporal-isvalidduration -bool IsValidDuration(Isolate* isolate, const DurationRecord& dur); - // #sec-temporal-date-duration-records struct DateDurationRecord { double years; @@ -149,16 +124,6 @@ struct DateDurationRecord { double days); }; -struct TimeZoneRecord { - bool z; - Handle<Object> offset_string; // String or Undefined - Handle<Object> name; // String or Undefined -}; - -struct ZonedDateTimeRecord { - DateTimeRecord date_time; - TimeZoneRecord time_zone; -}; // Options V8_WARN_UNUSED_RESULT Handle<String> UnitToString(Isolate* isolate, Unit unit); @@ -264,16 +229,16 @@ V8_WARN_UNUSED_RESULT MaybeHandle<String> ParseTemporalCalendarString( Isolate* isolate, Handle<String> iso_string); // #sec-temporal-parsetemporaldatetimestring -V8_WARN_UNUSED_RESULT Maybe<DateTimeRecord> ParseTemporalDateTimeString( - Isolate* isolate, Handle<String> iso_string); +V8_WARN_UNUSED_RESULT Maybe<DateTimeRecordWithCalendar> +ParseTemporalDateTimeString(Isolate* isolate, Handle<String> iso_string); // #sec-temporal-parsetemporaldatestring -V8_WARN_UNUSED_RESULT Maybe<DateRecord> ParseTemporalDateString( +V8_WARN_UNUSED_RESULT Maybe<DateRecordWithCalendar> ParseTemporalDateString( Isolate* isolate, Handle<String> iso_string); // #sec-temporal-parsetemporaltimestring -Maybe<TimeRecord> ParseTemporalTimeString(Isolate* isolate, - Handle<String> iso_string); +Maybe<TimeRecordWithCalendar> ParseTemporalTimeString( + Isolate* isolate, Handle<String> iso_string); // #sec-temporal-parsetemporaldurationstring V8_WARN_UNUSED_RESULT Maybe<DurationRecord> ParseTemporalDurationString( @@ -293,7 +258,7 @@ V8_WARN_UNUSED_RESULT MaybeHandle<BigInt> ParseTemporalInstant( V8_WARN_UNUSED_RESULT MaybeHandle<BigInt> ParseTemporalInstant( Isolate* isolate, Handle<String> iso_string); -DateRecordCommon BalanceISODate(Isolate* isolate, const DateRecordCommon& date); +DateRecord BalanceISODate(Isolate* isolate, const DateRecord& date); // Math and Misc @@ -350,13 +315,13 @@ BalancePossiblyInfiniteDuration(Isolate* isolate, Unit largest_unit, } V8_WARN_UNUSED_RESULT Maybe<DurationRecord> DifferenceISODateTime( - Isolate* isolate, const DateTimeRecordCommon& date_time1, - const DateTimeRecordCommon& date_time2, Handle<JSReceiver> calendar, + Isolate* isolate, const DateTimeRecord& date_time1, + const DateTimeRecord& date_time2, Handle<JSReceiver> calendar, Unit largest_unit, Handle<JSReceiver> relative_to, const char* method_name); // #sec-temporal-adddatetime -V8_WARN_UNUSED_RESULT Maybe<DateTimeRecordCommon> AddDateTime( - Isolate* isolate, const DateTimeRecordCommon& date_time, +V8_WARN_UNUSED_RESULT Maybe<DateTimeRecord> AddDateTime( + Isolate* isolate, const DateTimeRecord& date_time, Handle<JSReceiver> calendar, const DurationRecord& addend, Handle<Object> options); @@ -395,17 +360,24 @@ Maybe<RoundingMode> ToTemporalRoundingMode(Isolate* isolate, Handle<JSReceiver> options, RoundingMode fallback, const char* method_name) { + // 1. Return ? GetOption(normalizedOptions, "roundingMode", "string", « + // "ceil", "floor", "expand", "trunc", "halfCeil", "halfFloor", "halfExpand", + // "halfTrunc", "halfEven" », fallback). + return GetStringOption<RoundingMode>( isolate, options, "roundingMode", method_name, - {"ceil", "floor", "trunc", "halfExpand"}, - {RoundingMode::kCeil, RoundingMode::kFloor, RoundingMode::kTrunc, - RoundingMode::kHalfExpand}, + {"ceil", "floor", "expand", "trunc", "halfCeil", "halfFloor", + "halfExpand", "halfTrunc", "halfEven"}, + {RoundingMode::kCeil, RoundingMode::kFloor, RoundingMode::kExpand, + RoundingMode::kTrunc, RoundingMode::kHalfCeil, RoundingMode::kHalfFloor, + RoundingMode::kHalfExpand, RoundingMode::kHalfTrunc, + RoundingMode::kHalfEven}, fallback); } V8_WARN_UNUSED_RESULT Handle<BigInt> GetEpochFromISOParts(Isolate* isolate, - const DateTimeRecordCommon& date_time); + const DateTimeRecord& date_time); int32_t DurationSign(Isolate* isolaet, const DurationRecord& dur); @@ -415,31 +387,29 @@ int32_t ISODaysInMonth(Isolate* isolate, int32_t year, int32_t month); // #sec-temporal-isodaysinyear int32_t ISODaysInYear(Isolate* isolate, int32_t year); -bool IsValidTime(Isolate* isolate, const TimeRecordCommon& time); +bool IsValidTime(Isolate* isolate, const TimeRecord& time); // #sec-temporal-isvalidisodate -bool IsValidISODate(Isolate* isolate, const DateRecordCommon& date); +bool IsValidISODate(Isolate* isolate, const DateRecord& date); // #sec-temporal-compareisodate -int32_t CompareISODate(const DateRecordCommon& date1, - const DateRecordCommon& date2); +int32_t CompareISODate(const DateRecord& date1, const DateRecord& date2); // #sec-temporal-balanceisoyearmonth void BalanceISOYearMonth(Isolate* isolate, int32_t* year, int32_t* month); // #sec-temporal-balancetime -V8_WARN_UNUSED_RESULT DateTimeRecordCommon -BalanceTime(const UnbalancedTimeRecordCommon& time); +V8_WARN_UNUSED_RESULT DateTimeRecord +BalanceTime(const UnbalancedTimeRecord& time); // #sec-temporal-differencetime V8_WARN_UNUSED_RESULT Maybe<TimeDurationRecord> DifferenceTime( - Isolate* isolate, const TimeRecordCommon& time1, - const TimeRecordCommon& time2); + Isolate* isolate, const TimeRecord& time1, const TimeRecord& time2); // #sec-temporal-addtime -V8_WARN_UNUSED_RESULT DateTimeRecordCommon -AddTime(Isolate* isolate, const TimeRecordCommon& time, - const TimeDurationRecord& addend); +V8_WARN_UNUSED_RESULT DateTimeRecord AddTime(Isolate* isolate, + const TimeRecord& time, + const TimeDurationRecord& addend); // #sec-temporal-totaldurationnanoseconds Handle<BigInt> TotalDurationNanoseconds(Isolate* isolate, @@ -447,9 +417,9 @@ Handle<BigInt> TotalDurationNanoseconds(Isolate* isolate, double offset_shift); // #sec-temporal-totemporaltimerecord -Maybe<TimeRecordCommon> ToTemporalTimeRecord( - Isolate* isolate, Handle<JSReceiver> temporal_time_like, - const char* method_name); +Maybe<TimeRecord> ToTemporalTimeRecord(Isolate* isolate, + Handle<JSReceiver> temporal_time_like, + const char* method_name); // Calendar Operations // #sec-temporal-calendardateadd @@ -548,7 +518,7 @@ Handle<String> DefaultTimeZone(Isolate* isolate) { // #sec-temporal-isodatetimewithinlimits bool ISODateTimeWithinLimits(Isolate* isolate, - const DateTimeRecordCommon& date_time) { + const DateTimeRecord& date_time) { TEMPORAL_ENTER_FUNC(); /** * Note: It is really overkill to decide within the limit by following the @@ -681,7 +651,7 @@ MaybeHandle<JSTemporalCalendar> CreateTemporalCalendar( // #sec-temporal-createtemporaldate MaybeHandle<JSTemporalPlainDate> CreateTemporalDate( Isolate* isolate, Handle<JSFunction> target, Handle<HeapObject> new_target, - const DateRecordCommon& date, Handle<JSReceiver> calendar) { + const DateRecord& date, Handle<JSReceiver> calendar) { TEMPORAL_ENTER_FUNC(); // 1. Assert: isoYear is an integer. // 2. Assert: isoMonth is an integer. @@ -718,8 +688,7 @@ MaybeHandle<JSTemporalPlainDate> CreateTemporalDate( } MaybeHandle<JSTemporalPlainDate> CreateTemporalDate( - Isolate* isolate, const DateRecordCommon& date, - Handle<JSReceiver> calendar) { + Isolate* isolate, const DateRecord& date, Handle<JSReceiver> calendar) { TEMPORAL_ENTER_FUNC(); return CreateTemporalDate(isolate, CONSTRUCTOR(plain_date), CONSTRUCTOR(plain_date), date, calendar); @@ -728,7 +697,7 @@ MaybeHandle<JSTemporalPlainDate> CreateTemporalDate( // #sec-temporal-createtemporaldatetime MaybeHandle<JSTemporalPlainDateTime> CreateTemporalDateTime( Isolate* isolate, Handle<JSFunction> target, Handle<HeapObject> new_target, - const DateTimeRecordCommon& date_time, Handle<JSReceiver> calendar) { + const DateTimeRecord& date_time, Handle<JSReceiver> calendar) { TEMPORAL_ENTER_FUNC(); // 1. Assert: isoYear, isoMonth, isoDay, hour, minute, second, millisecond, // microsecond, and nanosecond are integers. @@ -786,7 +755,7 @@ MaybeHandle<JSTemporalPlainDateTime> CreateTemporalDateTime( } MaybeHandle<JSTemporalPlainDateTime> CreateTemporalDateTimeDefaultTarget( - Isolate* isolate, const DateTimeRecordCommon& date_time, + Isolate* isolate, const DateTimeRecord& date_time, Handle<JSReceiver> calendar) { TEMPORAL_ENTER_FUNC(); return CreateTemporalDateTime(isolate, CONSTRUCTOR(plain_date_time), @@ -799,7 +768,7 @@ MaybeHandle<JSTemporalPlainDateTime> CreateTemporalDateTimeDefaultTarget( namespace temporal { MaybeHandle<JSTemporalPlainDateTime> CreateTemporalDateTime( - Isolate* isolate, const DateTimeRecordCommon& date_time, + Isolate* isolate, const DateTimeRecord& date_time, Handle<JSReceiver> calendar) { return CreateTemporalDateTimeDefaultTarget(isolate, date_time, calendar); } @@ -810,7 +779,7 @@ namespace { // #sec-temporal-createtemporaltime MaybeHandle<JSTemporalPlainTime> CreateTemporalTime( Isolate* isolate, Handle<JSFunction> target, Handle<HeapObject> new_target, - const TimeRecordCommon& time) { + const TimeRecord& time) { TEMPORAL_ENTER_FUNC(); // 2. If ! IsValidTime(hour, minute, second, millisecond, microsecond, // nanosecond) is false, throw a RangeError exception. @@ -847,8 +816,8 @@ MaybeHandle<JSTemporalPlainTime> CreateTemporalTime( return object; } -MaybeHandle<JSTemporalPlainTime> CreateTemporalTime( - Isolate* isolate, const TimeRecordCommon& time) { +MaybeHandle<JSTemporalPlainTime> CreateTemporalTime(Isolate* isolate, + const TimeRecord& time) { TEMPORAL_ENTER_FUNC(); return CreateTemporalTime(isolate, CONSTRUCTOR(plain_time), CONSTRUCTOR(plain_time), time); @@ -1004,6 +973,9 @@ Maybe<DateDurationRecord> DateDurationRecord::Create( return Just(record); } +} // namespace + +namespace temporal { // #sec-temporal-createtimedurationrecord Maybe<TimeDurationRecord> TimeDurationRecord::Create( Isolate* isolate, double days, double hours, double minutes, double seconds, @@ -1050,7 +1022,9 @@ Maybe<DurationRecord> DurationRecord::Create( // ℝ(𝔽(nanoseconds)) }. return Just(record); } +} // namespace temporal +namespace { // #sec-temporal-createtemporalduration MaybeHandle<JSTemporalDuration> CreateTemporalDuration( Isolate* isolate, Handle<JSFunction> target, Handle<HeapObject> new_target, @@ -1272,14 +1246,14 @@ Handle<JSTemporalTimeZone> SystemTimeZone(Isolate* isolate) { .ToHandleChecked(); } -DateTimeRecordCommon GetISOPartsFromEpoch(Isolate* isolate, - Handle<BigInt> epoch_nanoseconds) { +DateTimeRecord GetISOPartsFromEpoch(Isolate* isolate, + Handle<BigInt> epoch_nanoseconds) { TEMPORAL_ENTER_FUNC(); - DateTimeRecordCommon result; + DateTimeRecord result; // 1. Assert: ! IsValidEpochNanoseconds(ℤ(epochNanoseconds)) is true. DCHECK(IsValidEpochNanoseconds(isolate, epoch_nanoseconds)); // 2. Let remainderNs be epochNanoseconds modulo 10^6. - Handle<BigInt> million = BigInt::FromInt64(isolate, 1000000); + Handle<BigInt> million = BigInt::FromUint64(isolate, 1000000); Handle<BigInt> remainder_ns = BigInt::Remainder(isolate, epoch_nanoseconds, million).ToHandleChecked(); // Need to do some remainder magic to negative remainder. @@ -1350,14 +1324,14 @@ DateTimeRecordCommon GetISOPartsFromEpoch(Isolate* isolate, } // #sec-temporal-balanceisodatetime -DateTimeRecordCommon BalanceISODateTime(Isolate* isolate, - const DateTimeRecordCommon& date_time) { +DateTimeRecord BalanceISODateTime(Isolate* isolate, + const DateTimeRecord& date_time) { TEMPORAL_ENTER_FUNC(); // 1. Assert: year, month, day, hour, minute, second, millisecond, // microsecond, and nanosecond are integers. // 2. Let balancedTime be ! BalanceTime(hour, minute, second, millisecond, // microsecond, nanosecond). - DateTimeRecordCommon balanced_time = + DateTimeRecord balanced_time = BalanceTime({static_cast<double>(date_time.time.hour), static_cast<double>(date_time.time.minute), static_cast<double>(date_time.time.second), @@ -1366,9 +1340,9 @@ DateTimeRecordCommon BalanceISODateTime(Isolate* isolate, static_cast<double>(date_time.time.nanosecond)}); // 3. Let balancedDate be ! BalanceISODate(year, month, day + // balancedTime.[[Days]]). - DateRecordCommon added_date = date_time.date; + DateRecord added_date = date_time.date; added_date.day += balanced_time.date.day; - DateRecordCommon balanced_date = BalanceISODate(isolate, added_date); + DateRecord balanced_date = BalanceISODate(isolate, added_date); // 4. Return the Record { [[Year]]: balancedDate.[[Year]], [[Month]]: // balancedDate.[[Month]], [[Day]]: balancedDate.[[Day]], [[Hour]]: // balancedTime.[[Hour]], [[Minute]]: balancedTime.[[Minute]], [[Second]]: @@ -1379,7 +1353,7 @@ DateTimeRecordCommon BalanceISODateTime(Isolate* isolate, } // #sec-temporal-roundtowardszero -int64_t RoundTowardsZero(double x) { +double RoundTowardsZero(double x) { // 1. Return the mathematical value that is the same sign as x and whose // magnitude is floor(abs(x)). if (x < 0) { @@ -1399,26 +1373,32 @@ Handle<String> TemporalDurationToString(Isolate* isolate, // seconds, milliseconds, microseconds, nanoseconds). DurationRecord dur = duration; int32_t sign = DurationSign(isolate, dur); - + // Note: for the operation below, to avoid microseconds .. seconds lost + // precision while the resulting value may exceed the precision limit, we use + // extra double xx_add to hold the additional temp value. // 2. Set microseconds to microseconds + RoundTowardsZero(nanoseconds / 1000). - dur.time_duration.microseconds += + double microseconds_add = RoundTowardsZero(dur.time_duration.nanoseconds / 1000); // 3. Set nanoseconds to remainder(nanoseconds, 1000). dur.time_duration.nanoseconds = - static_cast<int64_t>(dur.time_duration.nanoseconds) % 1000; + std::fmod(dur.time_duration.nanoseconds, 1000); // 4. Set milliseconds to milliseconds + RoundTowardsZero(microseconds / // 1000). - dur.time_duration.milliseconds += - RoundTowardsZero(dur.time_duration.microseconds / 1000); + double milliseconds_add = RoundTowardsZero( + dur.time_duration.microseconds / 1000 + microseconds_add / 1000); // 5. Set microseconds to remainder(microseconds, 1000). dur.time_duration.microseconds = - static_cast<int64_t>(dur.time_duration.microseconds) % 1000; + std::fmod(std::fmod(dur.time_duration.microseconds, 1000) + + std::fmod(microseconds_add, 1000), + 1000); // 6. Set seconds to seconds + RoundTowardsZero(milliseconds / 1000). - dur.time_duration.seconds += - RoundTowardsZero(dur.time_duration.milliseconds / 1000); + double seconds_add = RoundTowardsZero(dur.time_duration.milliseconds / 1000 + + milliseconds_add / 1000); // 7. Set milliseconds to remainder(milliseconds, 1000). dur.time_duration.milliseconds = - static_cast<int64_t>(dur.time_duration.milliseconds) % 1000; + std::fmod(std::fmod(dur.time_duration.milliseconds, 1000) + + std::fmod(milliseconds_add, 1000), + 1000); // 8. Let datePart be "". IncrementalStringBuilder date_part(isolate); @@ -1485,7 +1465,8 @@ Handle<String> TemporalDurationToString(Isolate* isolate, // 16. If any of seconds, milliseconds, microseconds, and nanoseconds are not // 0; or years, months, weeks, days, hours, and minutes are all 0, or // precision is not "auto" then - if ((dur.time_duration.seconds != 0 || dur.time_duration.milliseconds != 0 || + if ((dur.time_duration.seconds != 0 || seconds_add != 0 || + dur.time_duration.milliseconds != 0 || dur.time_duration.microseconds != 0 || dur.time_duration.nanoseconds != 0) || (dur.years == 0 && dur.months == 0 && dur.weeks == 0 && @@ -1527,8 +1508,31 @@ Handle<String> TemporalDurationToString(Isolate* isolate, } } // f. Let secondsPart be abs(seconds) formatted as a decimal number. - SNPrintF(buf, "%.0f", std::abs(dur.time_duration.seconds)); - seconds_part.AppendCString(buf.data()); + if (std::abs(seconds_add + dur.time_duration.seconds) < kMaxSafeInteger) { + // Fast path: The seconds_add + dur.time_duration.seconds is in the range + // the double could keep the precision. + dur.time_duration.seconds += seconds_add; + SNPrintF(buf, "%.0f", std::abs(dur.time_duration.seconds)); + seconds_part.AppendCString(buf.data()); + } else { + // Slow path: The seconds_add + dur.time_duration.seconds is out of the + // range which the double could keep the precision. Format by math via + // BigInt. + seconds_part.AppendString( + BigInt::ToString( + isolate, + BigInt::Add( + isolate, + BigInt::FromNumber(isolate, isolate->factory()->NewNumber( + std::abs(seconds_add))) + .ToHandleChecked(), + BigInt::FromNumber(isolate, + isolate->factory()->NewNumber( + std::abs(dur.time_duration.seconds))) + .ToHandleChecked()) + .ToHandleChecked()) + .ToHandleChecked()); + } // g. If decimalPart is not "", then if (decimal_part.Length() != 0) { @@ -1621,8 +1625,7 @@ void FormatSecondsStringPart(IncrementalStringBuilder* builder, int32_t second, } // #sec-temporal-temporaltimetostring -Handle<String> TemporalTimeToString(Isolate* isolate, - const TimeRecordCommon& time, +Handle<String> TemporalTimeToString(Isolate* isolate, const TimeRecord& time, Precision precision) { // 1. Assert: hour, minute, second, millisecond, microsecond and nanosecond // are integers. @@ -1667,7 +1670,7 @@ MaybeHandle<JSTemporalPlainDateTime> BuiltinTimeZoneGetPlainDateTimeFor( GetOffsetNanosecondsFor(isolate, time_zone, instant, method_name), Handle<JSTemporalPlainDateTime>()); // 2. Let result be ! GetISOPartsFromEpoch(instant.[[Nanoseconds]]). - DateTimeRecordCommon result = + DateTimeRecord result = GetISOPartsFromEpoch(isolate, handle(instant->nanoseconds(), isolate)); // 3. Set result to ! BalanceISODateTime(result.[[Year]], result.[[Month]], @@ -1860,7 +1863,7 @@ MaybeHandle<JSTemporalInstant> DisambiguatePossibleInstants( // dateTime.[[ISOMicrosecond]], dateTime.[[ISONanosecond]], // dateTime.[[Calendar]], 0, 0, 0, 0, 0, 0, 0, 0, 0, −nanoseconds, // undefined). - DateTimeRecordCommon earlier; + DateTimeRecord earlier; MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( isolate, earlier, AddDateTime( @@ -1911,7 +1914,7 @@ MaybeHandle<JSTemporalInstant> DisambiguatePossibleInstants( // dateTime.[[ISOSecond]], dateTime.[[ISOMillisecond]], // dateTime.[[ISOMicrosecond]], dateTime.[[ISONanosecond]], // dateTime.[[Calendar]], 0, 0, 0, 0, 0, 0, 0, 0, 0, nanoseconds, undefined). - DateTimeRecordCommon later; + DateTimeRecord later; MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( isolate, later, AddDateTime(isolate, @@ -2371,21 +2374,18 @@ MaybeHandle<JSReceiver> ToTemporalCalendar( ASSIGN_RETURN_ON_EXCEPTION(isolate, identifier, Object::ToString(isolate, temporal_calendar_like), JSReceiver); - // 3. If ! IsBuiltinCalendar(identifier) is false, then + // 3. Let identifier be ? ParseTemporalCalendarString(identifier). + ASSIGN_RETURN_ON_EXCEPTION(isolate, identifier, + ParseTemporalCalendarString(isolate, identifier), + JSReceiver); + // 4. If IsBuiltinCalendar(identifier) is false, throw a RangeError + // exception. if (!IsBuiltinCalendar(isolate, identifier)) { - // a. Let identifier be ? ParseTemporalCalendarString(identifier). - ASSIGN_RETURN_ON_EXCEPTION(isolate, identifier, - ParseTemporalCalendarString(isolate, identifier), - JSReceiver); - // b. If IsBuiltinCalendar(identifier) is false, throw a RangeError - // exception. - if (!IsBuiltinCalendar(isolate, identifier)) { - THROW_NEW_ERROR( - isolate, NewRangeError(MessageTemplate::kInvalidCalendar, identifier), - JSReceiver); - } + THROW_NEW_ERROR( + isolate, NewRangeError(MessageTemplate::kInvalidCalendar, identifier), + JSReceiver); } - // 4. Return ? CreateTemporalCalendar(identifier). + // 5. Return ? CreateTemporalCalendar(identifier). return CreateTemporalCalendar(isolate, identifier); } @@ -2557,7 +2557,7 @@ MaybeHandle<JSTemporalPlainDate> ToTemporalDate(Isolate* isolate, Object::ToString(isolate, item_obj), JSTemporalPlainDate); // 6. Let result be ? ParseTemporalDateString(string). - DateRecord result; + DateRecordWithCalendar result; MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( isolate, result, ParseTemporalDateString(isolate, string), Handle<JSTemporalPlainDate>()); @@ -2622,9 +2622,8 @@ Maybe<double> ToIntegerWithoutRounding(Isolate* isolate, namespace temporal { // #sec-temporal-regulatetime -Maybe<TimeRecordCommon> RegulateTime(Isolate* isolate, - const TimeRecordCommon& time, - ShowOverflow overflow) { +Maybe<TimeRecord> RegulateTime(Isolate* isolate, const TimeRecord& time, + ShowOverflow overflow) { TEMPORAL_ENTER_FUNC(); // 1. Assert: hour, minute, second, millisecond, microsecond and nanosecond @@ -2632,7 +2631,7 @@ Maybe<TimeRecordCommon> RegulateTime(Isolate* isolate, // 2. Assert: overflow is either "constrain" or "reject". switch (overflow) { case ShowOverflow::kConstrain: { - TimeRecordCommon result(time); + TimeRecord result(time); // 3. If overflow is "constrain", then // a. Return ! ConstrainTime(hour, minute, second, millisecond, // microsecond, nanosecond). @@ -2651,7 +2650,7 @@ Maybe<TimeRecordCommon> RegulateTime(Isolate* isolate, if (!IsValidTime(isolate, time)) { THROW_NEW_ERROR_RETURN_VALUE(isolate, NEW_TEMPORAL_INVALID_ARG_RANGE_ERROR(), - Nothing<TimeRecordCommon>()); + Nothing<TimeRecord>()); } // b. Return the new Record { [[Hour]]: hour, [[Minute]]: minute, // [[Second]]: second, [[Millisecond]]: millisecond, [[Microsecond]]: @@ -2665,7 +2664,7 @@ MaybeHandle<JSTemporalPlainTime> ToTemporalTime( Isolate* isolate, Handle<Object> item_obj, const char* method_name, ShowOverflow overflow = ShowOverflow::kConstrain) { Factory* factory = isolate->factory(); - TimeRecord result; + TimeRecordWithCalendar result; // 2. Assert: overflow is either "constrain" or "reject". // 3. If Type(item) is Object, then if (item_obj->IsJSReceiver()) { @@ -3406,88 +3405,170 @@ MaybeHandle<String> BuiltinTimeZoneGetOffsetStringFor( } // #sec-temporal-parseisodatetime -Maybe<DateTimeRecord> ParseISODateTime(Isolate* isolate, - Handle<String> iso_string, - const ParsedISO8601Result& parsed) { +Maybe<DateTimeRecordWithCalendar> ParseISODateTime( + Isolate* isolate, Handle<String> iso_string, + const ParsedISO8601Result& parsed); +// Note: We split ParseISODateTime to two function because the spec text +// repeates some parsing unnecessary. If a function is calling ParseISODateTime +// from a AO which already call ParseText() for TemporalDateTimeString, +// TemporalInstantString, TemporalMonthDayString, TemporalTimeString, +// TemporalYearMonthString, TemporalZonedDateTimeString. But for the usage in +// ParseTemporalTimeZoneString, we use the following version. +Maybe<DateTimeRecordWithCalendar> ParseISODateTime(Isolate* isolate, + Handle<String> iso_string) { + // 2. For each nonterminal goal of « TemporalDateTimeString, + // TemporalInstantString, TemporalMonthDayString, TemporalTimeString, + // TemporalYearMonthString, TemporalZonedDateTimeString », do + + // a. If parseResult is not a Parse Node, set parseResult to + // ParseText(StringToCodePoints(isoString), goal). + base::Optional<ParsedISO8601Result> parsed; + if ((parsed = + TemporalParser::ParseTemporalDateTimeString(isolate, iso_string)) + .has_value() || + (parsed = TemporalParser::ParseTemporalInstantString(isolate, iso_string)) + .has_value() || + (parsed = + TemporalParser::ParseTemporalMonthDayString(isolate, iso_string)) + .has_value() || + (parsed = TemporalParser::ParseTemporalTimeString(isolate, iso_string)) + .has_value() || + (parsed = + TemporalParser::ParseTemporalYearMonthString(isolate, iso_string)) + .has_value() || + (parsed = TemporalParser::ParseTemporalZonedDateTimeString(isolate, + iso_string)) + .has_value()) { + return ParseISODateTime(isolate, iso_string, *parsed); + } + + // 3. If parseResult is not a Parse Node, throw a RangeError exception. + THROW_NEW_ERROR_RETURN_VALUE(isolate, NEW_TEMPORAL_INVALID_ARG_RANGE_ERROR(), + Nothing<DateTimeRecordWithCalendar>()); +} + +Maybe<DateTimeRecordWithCalendar> ParseISODateTime( + Isolate* isolate, Handle<String> iso_string, + const ParsedISO8601Result& parsed) { TEMPORAL_ENTER_FUNC(); - DateTimeRecord result; - // 5. Set year to ! ToIntegerOrInfinity(year). + DateTimeRecordWithCalendar result; + // 6. Set yearMV to ! ToIntegerOrInfinity(year). result.date.year = parsed.date_year; - // 6. If month is undefined, then + // 7. If month is undefined, then if (parsed.date_month_is_undefined()) { - // a. Set month to 1. + // a. Set monthMV to 1. result.date.month = 1; - // 7. Else, + // 8. Else, } else { - // a. Set month to ! ToIntegerOrInfinity(month). + // a. Set monthMV to ! ToIntegerOrInfinity(month). result.date.month = parsed.date_month; } - // 8. If day is undefined, then + // 9. If day is undefined, then if (parsed.date_day_is_undefined()) { - // a. Set day to 1. + // a. Set dayMV to 1. result.date.day = 1; - // 9. Else, + // 10. Else, } else { - // a. Set day to ! ToIntegerOrInfinity(day). + // a. Set dayMV to ! ToIntegerOrInfinity(day). result.date.day = parsed.date_day; } - // 10. Set hour to ! ToIntegerOrInfinity(hour). + // 11. Set hourMV to ! ToIntegerOrInfinity(hour). result.time.hour = parsed.time_hour_is_undefined() ? 0 : parsed.time_hour; - // 11. Set minute to ! ToIntegerOrInfinity(minute). + // 12. Set minuteMV to ! ToIntegerOrInfinity(minute). result.time.minute = parsed.time_minute_is_undefined() ? 0 : parsed.time_minute; - // 12. Set second to ! ToIntegerOrInfinity(second). + // 13. Set secondMV to ! ToIntegerOrInfinity(second). result.time.second = parsed.time_second_is_undefined() ? 0 : parsed.time_second; - // 13. If second is 60, then + // 14. If secondMV is 60, then if (result.time.second == 60) { - // a. Set second to 59. + // a. Set secondMV to 59. result.time.second = 59; } - // 14. If fraction is not undefined, then + // 15. If fSeconds is not empty, then if (!parsed.time_nanosecond_is_undefined()) { - // a. Set fraction to the string-concatenation of the previous value of - // fraction and the string "000000000". - // b. Let millisecond be the String value equal to the substring of fraction - // from 0 to 3. c. Set millisecond to ! ToIntegerOrInfinity(millisecond). + // a. Let fSecondsDigits be the substring of CodePointsToString(fSeconds) + // from 1. + // + // b. Let fSecondsDigitsExtended be the string-concatenation of + // fSecondsDigits and "000000000". + // + // c. Let millisecond be the substring of fSecondsDigitsExtended from 0 to + // 3. + // + // d. Let microsecond be the substring of fSecondsDigitsExtended from 3 to + // 6. + // + // e. Let nanosecond be the substring of fSecondsDigitsExtended from 6 to 9. + // + // f. Let millisecondMV be ! ToIntegerOrInfinity(millisecond). result.time.millisecond = parsed.time_nanosecond / 1000000; - // d. Let microsecond be the String value equal to the substring of fraction - // from 3 to 6. e. Set microsecond to ! ToIntegerOrInfinity(microsecond). + // g. Let microsecondMV be ! ToIntegerOrInfinity(microsecond). result.time.microsecond = (parsed.time_nanosecond / 1000) % 1000; - // f. Let nanosecond be the String value equal to the substring of fraction - // from 6 to 9. g. Set nanosecond to ! ToIntegerOrInfinity(nanosecond). + // h. Let nanosecondMV be ! ToIntegerOrInfinity(nanosecond). result.time.nanosecond = (parsed.time_nanosecond % 1000); - // 15. Else, + // 16. Else, } else { - // a. Let millisecond be 0. + // a. Let millisecondMV be 0. result.time.millisecond = 0; - // b. Let microsecond be 0. + // b. Let microsecondMV be 0. result.time.microsecond = 0; - // c. Let nanosecond be 0. + // c. Let nanosecondMV be 0. result.time.nanosecond = 0; } - // 16. If ! IsValidISODate(year, month, day) is false, throw a RangeError - // exception. + // 17. If ! IsValidISODate(yearMV, monthMV, dayMV) is false, throw a + // RangeError exception. if (!IsValidISODate(isolate, result.date)) { THROW_NEW_ERROR_RETURN_VALUE(isolate, NEW_TEMPORAL_INVALID_ARG_RANGE_ERROR(), - Nothing<DateTimeRecord>()); + Nothing<DateTimeRecordWithCalendar>()); } - // 17. If ! IsValidTime(hour, minute, second, millisecond, microsecond, - // nanosecond) is false, throw a RangeError exception. + // 18. If ! IsValidTime(hourMV, minuteMV, secondMV, millisecondMV, + // microsecondMV, nanosecond) is false, throw a RangeError exception. if (!IsValidTime(isolate, result.time)) { THROW_NEW_ERROR_RETURN_VALUE(isolate, NEW_TEMPORAL_INVALID_ARG_RANGE_ERROR(), - Nothing<DateTimeRecord>()); + Nothing<DateTimeRecordWithCalendar>()); } - // 22. If calendar is empty, then + // 19. Let timeZoneResult be the Record { [[Z]]: false, [[OffsetString]]: + // undefined, [[Name]]: undefined }. + result.time_zone = {false, isolate->factory()->undefined_value(), + isolate->factory()->undefined_value()}; + // 20. If parseResult contains a TimeZoneIdentifier Parse Node, then + if (parsed.tzi_name_length != 0) { + // a. Let name be the source text matched by the TimeZoneIdentifier Parse + // Node contained within parseResult. + // + // b. Set timeZoneResult.[[Name]] to CodePointsToString(name). + result.time_zone.name = isolate->factory()->NewSubString( + iso_string, parsed.tzi_name_start, + parsed.tzi_name_start + parsed.tzi_name_length); + } + // 21. If parseResult contains a UTCDesignator Parse Node, then + if (parsed.utc_designator) { + // a. Set timeZoneResult.[[Z]] to true. + result.time_zone.z = true; + // 22. Else, + } else { + // a. If parseResult contains a TimeZoneNumericUTCOffset Parse Node, then + if (parsed.offset_string_length != 0) { + // i. Let offset be the source text matched by the + // TimeZoneNumericUTCOffset Parse Node contained within parseResult. + // ii. Set timeZoneResult.[[OffsetString]] to CodePointsToString(offset). + result.time_zone.offset_string = isolate->factory()->NewSubString( + iso_string, parsed.offset_string_start, + parsed.offset_string_start + parsed.offset_string_length); + } + } + + // 23. If calendar is empty, then if (parsed.calendar_name_length == 0) { // a. Let calendarVal be undefined. result.calendar = isolate->factory()->undefined_value(); - // 23. Else, + // 24. Else, } else { // a. Let calendarVal be CodePointsToString(calendar). result.calendar = isolate->factory()->NewSubString( @@ -3497,29 +3578,30 @@ Maybe<DateTimeRecord> ParseISODateTime(Isolate* isolate, // 24. Return the Record { [[Year]]: yearMV, [[Month]]: monthMV, [[Day]]: // dayMV, [[Hour]]: hourMV, [[Minute]]: minuteMV, [[Second]]: secondMV, // [[Millisecond]]: millisecondMV, [[Microsecond]]: microsecondMV, - // [[Nanosecond]]: nanosecondMV, [[Calendar]]: calendarVal, }. + // [[Nanosecond]]: nanosecondMV, [[TimeZone]]: timeZoneResult, + // [[Calendar]]: calendarVal, }. return Just(result); } // #sec-temporal-parsetemporaldatestring -Maybe<DateRecord> ParseTemporalDateString(Isolate* isolate, - Handle<String> iso_string) { +Maybe<DateRecordWithCalendar> ParseTemporalDateString( + Isolate* isolate, Handle<String> iso_string) { TEMPORAL_ENTER_FUNC(); // 1. Let parts be ? ParseTemporalDateTimeString(isoString). // 2. Return the Record { [[Year]]: parts.[[Year]], [[Month]]: // parts.[[Month]], [[Day]]: parts.[[Day]], [[Calendar]]: parts.[[Calendar]] // }. - DateTimeRecord record; + DateTimeRecordWithCalendar record; MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( isolate, record, ParseTemporalDateTimeString(isolate, iso_string), - Nothing<DateRecord>()); - DateRecord result = {record.date, record.calendar}; + Nothing<DateRecordWithCalendar>()); + DateRecordWithCalendar result = {record.date, record.calendar}; return Just(result); } // #sec-temporal-parsetemporaltimestring -Maybe<TimeRecord> ParseTemporalTimeString(Isolate* isolate, - Handle<String> iso_string) { +Maybe<TimeRecordWithCalendar> ParseTemporalTimeString( + Isolate* isolate, Handle<String> iso_string) { TEMPORAL_ENTER_FUNC(); // 1. Assert: Type(isoString) is String. @@ -3529,27 +3611,29 @@ Maybe<TimeRecord> ParseTemporalTimeString(Isolate* isolate, TemporalParser::ParseTemporalTimeString(isolate, iso_string); if (!parsed.has_value()) { // a. Throw a *RangeError* exception. - THROW_NEW_ERROR_RETURN_VALUE( - isolate, NEW_TEMPORAL_INVALID_ARG_RANGE_ERROR(), Nothing<TimeRecord>()); + THROW_NEW_ERROR_RETURN_VALUE(isolate, + NEW_TEMPORAL_INVALID_ARG_RANGE_ERROR(), + Nothing<TimeRecordWithCalendar>()); } // 3. If _isoString_ contains a |UTCDesignator|, then if (parsed->utc_designator) { // a. Throw a *RangeError* exception. - THROW_NEW_ERROR_RETURN_VALUE( - isolate, NEW_TEMPORAL_INVALID_ARG_RANGE_ERROR(), Nothing<TimeRecord>()); + THROW_NEW_ERROR_RETURN_VALUE(isolate, + NEW_TEMPORAL_INVALID_ARG_RANGE_ERROR(), + Nothing<TimeRecordWithCalendar>()); } // 3. Let result be ? ParseISODateTime(isoString). - DateTimeRecord result; + DateTimeRecordWithCalendar result; MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( isolate, result, ParseISODateTime(isolate, iso_string, *parsed), - Nothing<TimeRecord>()); + Nothing<TimeRecordWithCalendar>()); // 4. Return the Record { [[Hour]]: result.[[Hour]], [[Minute]]: // result.[[Minute]], [[Second]]: result.[[Second]], [[Millisecond]]: // result.[[Millisecond]], [[Microsecond]]: result.[[Microsecond]], // [[Nanosecond]]: result.[[Nanosecond]], [[Calendar]]: result.[[Calendar]] }. - TimeRecord ret = {result.time, result.calendar}; + TimeRecordWithCalendar ret = {result.time, result.calendar}; return Just(ret); } @@ -3558,9 +3642,8 @@ Maybe<InstantRecord> ParseTemporalInstantString(Isolate* isolate, Handle<String> iso_string) { TEMPORAL_ENTER_FUNC(); - // 1. Assert: Type(isoString) is String. - // 2. If isoString does not satisfy the syntax of a TemporalInstantString - // (see 13.33), then + // 1. If ParseText(StringToCodePoints(isoString), TemporalInstantString) is a + // List of errors, throw a RangeError exception. base::Optional<ParsedISO8601Result> parsed = TemporalParser::ParseTemporalInstantString(isolate, iso_string); if (!parsed.has_value()) { @@ -3569,26 +3652,21 @@ Maybe<InstantRecord> ParseTemporalInstantString(Isolate* isolate, Nothing<InstantRecord>()); } - // 3. Let result be ! ParseISODateTime(isoString). - DateTimeRecord result; + // 2. Let result be ? ParseISODateTime(isoString). + DateTimeRecordWithCalendar result; MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( isolate, result, ParseISODateTime(isolate, iso_string, *parsed), Nothing<InstantRecord>()); - // 4. Let timeZoneResult be ? ParseTemporalTimeZoneString(isoString). - TimeZoneRecord time_zone_result; - MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( - isolate, time_zone_result, - ParseTemporalTimeZoneString(isolate, iso_string), - Nothing<InstantRecord>()); - // 5. Let offsetString be timeZoneResult.[[OffsetString]]. - Handle<Object> offset_string = time_zone_result.offset_string; - // 6. If timeZoneResult.[[Z]] is true, then - if (time_zone_result.z) { + // 3. Let offsetString be result.[[TimeZone]].[[OffsetString]]. + Handle<Object> offset_string = result.time_zone.offset_string; + + // 4. If result.[[TimeZone]].[[Z]] is true, then + if (result.time_zone.z) { // a. Set offsetString to "+00:00". offset_string = isolate->factory()->NewStringFromStaticChars("+00:00"); } - // 7. Assert: offsetString is not undefined. + // 5. Assert: offsetString is not undefined. DCHECK(!offset_string->IsUndefined()); // 6. Return the new Record { [[Year]]: result.[[Year]], @@ -3604,7 +3682,7 @@ Maybe<InstantRecord> ParseTemporalInstantString(Isolate* isolate, } // #sec-temporal-parsetemporalrelativetostring -Maybe<ZonedDateTimeRecord> ParseTemporalRelativeToString( +Maybe<DateTimeRecordWithCalendar> ParseTemporalRelativeToString( Isolate* isolate, Handle<String> iso_string) { TEMPORAL_ENTER_FUNC(); @@ -3616,41 +3694,10 @@ Maybe<ZonedDateTimeRecord> ParseTemporalRelativeToString( // a. Throw a *RangeError* exception. THROW_NEW_ERROR_RETURN_VALUE(isolate, NEW_TEMPORAL_INVALID_ARG_RANGE_ERROR(), - Nothing<ZonedDateTimeRecord>()); + Nothing<DateTimeRecordWithCalendar>()); } - // 2. Let result be ? ParseISODateTime(isoString). - ZonedDateTimeRecord result; - MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( - isolate, result.date_time, ParseISODateTime(isolate, iso_string, *parsed), - Nothing<ZonedDateTimeRecord>()); - - // 3. If ParseText(StringToCodePoints(isoString), TemporalZonedDateTimeString) - // is a Parse Node, then - base::Optional<ParsedISO8601Result> parsed2 = - TemporalParser::ParseTemporalZonedDateTimeString(isolate, iso_string); - if (parsed2.has_value()) { - // a. Let timeZoneResult be ! ParseTemporalTimeZoneString(isoString). - result.time_zone = - ParseTemporalTimeZoneString(isolate, iso_string).ToChecked(); - // b. Let z be timeZoneResult.[[Z]]. - // c. Let offsetString be timeZoneResult.[[OffsetString]]. - // d. Let timeZone be timeZoneResult.[[Name]]. - } else { - // a. Let z be false. - result.time_zone.z = false; - // b. Let offsetString be undefined. - result.time_zone.offset_string = isolate->factory()->undefined_value(); - // c. Let timeZone be undefined. - result.time_zone.name = isolate->factory()->undefined_value(); - } - // 5. Return the Record { [[Year]]: result.[[Year]], [[Month]]: - // result.[[Month]], [[Day]]: result.[[Day]], [[Hour]]: result.[[Hour]], - // [[Minute]]: result.[[Minute]], [[Second]]: result.[[Second]], - // [[Millisecond]]: result.[[Millisecond]], [[Microsecond]]: - // result.[[Microsecond]], [[Nanosecond]]: result.[[Nanosecond]], - // [[Calendar]]: result.[[Calendar]], [[TimeZoneZ]]: z, - // [[TimeZoneOffsetString]]: offsetString, [[TimeZoneIANAName]]: timeZone }. - return Just(result); + // 2. Returns ? ParseISODateTime(isoString). + return ParseISODateTime(isolate, iso_string, *parsed); } // #sec-temporal-parsetemporalinstant @@ -3699,7 +3746,7 @@ MaybeHandle<BigInt> ParseTemporalInstant(Isolate* isolate, } // #sec-temporal-parsetemporalzoneddatetimestring -Maybe<ZonedDateTimeRecord> ParseTemporalZonedDateTimeString( +Maybe<DateTimeRecordWithCalendar> ParseTemporalZonedDateTimeString( Isolate* isolate, Handle<String> iso_string) { TEMPORAL_ENTER_FUNC(); // 1. If ParseText(StringToCodePoints(isoString), TemporalZonedDateTimeString) @@ -3709,29 +3756,11 @@ Maybe<ZonedDateTimeRecord> ParseTemporalZonedDateTimeString( if (!parsed.has_value()) { THROW_NEW_ERROR_RETURN_VALUE(isolate, NEW_TEMPORAL_INVALID_ARG_RANGE_ERROR(), - Nothing<ZonedDateTimeRecord>()); + Nothing<DateTimeRecordWithCalendar>()); } - // 2. Let result be ? ParseISODateTime(isoString). - ZonedDateTimeRecord result; - MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( - isolate, result.date_time, ParseISODateTime(isolate, iso_string, *parsed), - Nothing<ZonedDateTimeRecord>()); - - // 3. Let timeZoneResult be ? ParseTemporalTimeZoneString(isoString). - MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( - isolate, result.time_zone, - ParseTemporalTimeZoneString(isolate, iso_string), - Nothing<ZonedDateTimeRecord>()); - // 4. Return the Record { [[Year]]: result.[[Year]], [[Month]]: - // result.[[Month]], [[Day]]: result.[[Day]], [[Hour]]: result.[[Hour]], - // [[Minute]]: result.[[Minute]], [[Second]]: result.[[Second]], - // [[Millisecond]]: result.[[Millisecond]], [[Microsecond]]: - // result.[[Microsecond]], [[Nanosecond]]: result.[[Nanosecond]], - // [[Calendar]]: result.[[Calendar]], [[TimeZoneZ]]: timeZoneResult.[[Z]], - // [[TimeZoneOffsetString]]: timeZoneResult.[[OffsetString]], - // [[TimeZoneName]]: timeZoneResult.[[Name]] }. - return Just(result); + // 2. Return ? ParseISODateTime(isoString). + return ParseISODateTime(isolate, iso_string, *parsed); } // #sec-temporal-createdurationrecord @@ -3899,57 +3928,40 @@ Maybe<DurationRecord> ParseTemporalDurationString(Isolate* isolate, } // #sec-temporal-parsetemporaltimezonestring -Maybe<TimeZoneRecord> ParseTemporalTimeZoneString(Isolate* isolate, - Handle<String> iso_string) { +Maybe<TimeZoneRecord> ParseTemporalTimeZoneString( + Isolate* isolate, Handle<String> time_zone_string) { TEMPORAL_ENTER_FUNC(); - // 1. Assert: Type(isoString) is String. - // 2. If isoString does not satisfy the syntax of a TemporalTimeZoneString - // (see 13.33), then + // 1. Let parseResult be ParseText(StringToCodePoints(timeZoneString), + // TimeZoneIdentifier). base::Optional<ParsedISO8601Result> parsed = - TemporalParser::ParseTemporalTimeZoneString(isolate, iso_string); - if (!parsed.has_value()) { + TemporalParser::ParseTimeZoneIdentifier(isolate, time_zone_string); + // 2. If parseResult is a Parse Node, then + if (parsed.has_value()) { + // a. Return the Record { [[Z]]: false, [[OffsetString]]: undefined, + // [[Name]]: timeZoneString }. + return Just(TimeZoneRecord( + {false, isolate->factory()->undefined_value(), time_zone_string})); + } + + // 3. Let result be ? ParseISODateTime(timeZoneString). + DateTimeRecordWithCalendar result; + MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate, result, ParseISODateTime(isolate, time_zone_string), + Nothing<TimeZoneRecord>()); + + // 4. Let timeZoneResult be result.[[TimeZone]]. + // 5. If timeZoneResult.[[Z]] is false, timeZoneResult.[[OffsetString]] is + // undefined, and timeZoneResult.[[Name]] is undefined, throw a RangeError + // exception. + if (!result.time_zone.z && result.time_zone.offset_string->IsUndefined() && + result.time_zone.name->IsUndefined()) { THROW_NEW_ERROR_RETURN_VALUE(isolate, NEW_TEMPORAL_INVALID_ARG_RANGE_ERROR(), Nothing<TimeZoneRecord>()); } - // 3. Let z, sign, hours, minutes, seconds, fraction and name be the parts of - // isoString produced respectively by the UTCDesignator, - // TimeZoneUTCOffsetSign, TimeZoneUTCOffsetHour, TimeZoneUTCOffsetMinute, - // TimeZoneUTCOffsetSecond, TimeZoneUTCOffsetFraction, and TimeZoneIANAName - // productions, or undefined if not present. - // 4. If name is empty, then - // a. Set name to undefined. - Handle<Object> name = isolate->factory()->undefined_value(); - // 5. Else, - // a. Set name to CodePointsToString(name). - if (parsed->tzi_name_length > 0) { - name = isolate->factory()->NewSubString( - iso_string, parsed->tzi_name_start, - parsed->tzi_name_start + parsed->tzi_name_length); - } - // 6. If z is not undefined, then - if (parsed->utc_designator) { - // a. Return the Record { [[Z]]: true, [[OffsetString]]: undefined, - // [[Name]]: name }. - return Just( - TimeZoneRecord({true, isolate->factory()->undefined_value(), name})); - } - Handle<Object> offset_string; - // 7. If offsetString is empty, then - if (parsed->offset_string_length == 0) { - // a. Set offsetString to undefined. - offset_string = isolate->factory()->undefined_value(); - // 8. Else, - } else { - // a. Set offsetString to CodePointsToString(offsetString). - offset_string = isolate->factory()->NewSubString( - iso_string, parsed->offset_string_start, - parsed->offset_string_start + parsed->offset_string_length); - } - // 9. Return the Record { [[Z]]: false, [[OffsetString]]: offsetString, - // [[Name]]: name }. - return Just(TimeZoneRecord({false, offset_string, name})); + // 6. Return timeZoneResult. + return Just(result.time_zone); } Maybe<int64_t> ParseTimeZoneOffsetString(Isolate* isolate, @@ -4023,25 +4035,38 @@ MaybeHandle<String> ParseTemporalCalendarString(Isolate* isolate, Handle<String> iso_string) { TEMPORAL_ENTER_FUNC(); - // 1. Assert: Type(isoString) is String. - // 2. If isoString does not satisfy the syntax of a TemporalCalendarString - // (see 13.33), then a. Throw a RangeError exception. - base::Optional<ParsedISO8601Result> parsed = - TemporalParser::ParseTemporalCalendarString(isolate, iso_string); - if (!parsed.has_value()) { - THROW_NEW_ERROR(isolate, NEW_TEMPORAL_INVALID_ARG_RANGE_ERROR(), String); - } - // 3. Let id be the part of isoString produced by the CalendarName production, - // or undefined if not present. - // 4. If id is empty, then - if (parsed->calendar_name_length == 0) { - // a. Return "iso8601". - return isolate->factory()->iso8601_string(); + // 1. Let parseResult be Completion(ParseISODateTime(isoString)). + Maybe<DateTimeRecordWithCalendar> parse_result = + ParseISODateTime(isolate, iso_string); + // 2. If parseResult is a normal completion, then + if (parse_result.IsJust()) { + // a. Let calendar be parseResult.[[Value]].[[Calendar]]. + Handle<Object> calendar = parse_result.FromJust().calendar; + // b. If calendar is undefined, return "iso8601". + if (calendar->IsUndefined()) { + return isolate->factory()->iso8601_string(); + // c. Else, return calendar. + } else { + CHECK(calendar->IsString()); + return Handle<String>::cast(calendar); + } + // 3. Else, + } else { + DCHECK(isolate->has_pending_exception()); + isolate->clear_pending_exception(); + // a. Set parseResult to ParseText(StringToCodePoints(isoString), + // CalendarName). + base::Optional<ParsedISO8601Result> parsed = + TemporalParser::ParseCalendarName(isolate, iso_string); + // b. If parseResult is a List of errors, throw a RangeError exception. + if (!parsed.has_value()) { + THROW_NEW_ERROR( + isolate, NewRangeError(MessageTemplate::kInvalidCalendar, iso_string), + String); + } + // c. Else, return isoString. + return iso_string; } - // 5. Return CodePointsToString(id). - return isolate->factory()->NewSubString( - iso_string, parsed->calendar_name_start, - parsed->calendar_name_start + parsed->calendar_name_length); } // #sec-temporal-calendarequals @@ -4346,8 +4371,8 @@ Maybe<int64_t> GetOffsetNanosecondsFor(Isolate* isolate, // 6. Set offsetNanoseconds to ℝ(offsetNanoseconds). int64_t offset_nanoseconds_int = static_cast<int64_t>(offset_nanoseconds); - // 7. If abs(offsetNanoseconds) > 86400 × 10^9, throw a RangeError exception. - if (std::abs(offset_nanoseconds_int) > 86400e9) { + // 7. If abs(offsetNanoseconds) >= 86400 × 10^9, throw a RangeError exception. + if (std::abs(offset_nanoseconds_int) >= 86400e9) { THROW_NEW_ERROR_RETURN_VALUE( isolate, NEW_TEMPORAL_INVALID_ARG_RANGE_ERROR(), Nothing<int64_t>()); } @@ -4640,13 +4665,12 @@ Handle<String> CanonicalizeTimeZoneName(Isolate* isolate, // Common routine shared by ToTemporalTimeRecord and ToPartialTime // #sec-temporal-topartialtime // #sec-temporal-totemporaltimerecord -Maybe<TimeRecordCommon> ToTemporalTimeRecordOrPartialTime( +Maybe<TimeRecord> ToTemporalTimeRecordOrPartialTime( Isolate* isolate, Handle<JSReceiver> temporal_time_like, - const TimeRecordCommon& time, bool skip_undefined, - const char* method_name) { + const TimeRecord& time, bool skip_undefined, const char* method_name) { TEMPORAL_ENTER_FUNC(); - TimeRecordCommon result(time); + TimeRecord result(time); Factory* factory = isolate->factory(); // 1. Assert: Type(temporalTimeLike) is Object. // 2. Let result be the new Record { [[Hour]]: undefined, [[Minute]]: @@ -4670,7 +4694,7 @@ Maybe<TimeRecordCommon> ToTemporalTimeRecordOrPartialTime( ASSIGN_RETURN_ON_EXCEPTION_VALUE( isolate, value, JSReceiver::GetProperty(isolate, temporal_time_like, row.first), - Nothing<TimeRecordCommon>()); + Nothing<TimeRecord>()); // c. If value is not undefined, then if (!value->IsUndefined()) { // i. Set _any_ to *true*. @@ -4682,7 +4706,7 @@ Maybe<TimeRecordCommon> ToTemporalTimeRecordOrPartialTime( // d. / ii. Set value to ? ToIntegerThrowOnOInfinity(value). ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, value, ToIntegerThrowOnInfinity(isolate, value), - Nothing<TimeRecordCommon>()); + Nothing<TimeRecord>()); // e. / iii. Set result's internal slot whose name is the Internal Slot // value of the current row to value. *(row.second) = value->Number(); @@ -4692,25 +4716,25 @@ Maybe<TimeRecordCommon> ToTemporalTimeRecordOrPartialTime( if (!any) { // a. Throw a *TypeError* exception. THROW_NEW_ERROR_RETURN_VALUE(isolate, NEW_TEMPORAL_INVALID_ARG_TYPE_ERROR(), - Nothing<TimeRecordCommon>()); + Nothing<TimeRecord>()); } // 4. Return result. return Just(result); } // #sec-temporal-topartialtime -Maybe<TimeRecordCommon> ToPartialTime(Isolate* isolate, - Handle<JSReceiver> temporal_time_like, - const TimeRecordCommon& time, - const char* method_name) { +Maybe<TimeRecord> ToPartialTime(Isolate* isolate, + Handle<JSReceiver> temporal_time_like, + const TimeRecord& time, + const char* method_name) { return ToTemporalTimeRecordOrPartialTime(isolate, temporal_time_like, time, true, method_name); } // #sec-temporal-totemporaltimerecord -Maybe<TimeRecordCommon> ToTemporalTimeRecord( - Isolate* isolate, Handle<JSReceiver> temporal_time_like, - const char* method_name) { +Maybe<TimeRecord> ToTemporalTimeRecord(Isolate* isolate, + Handle<JSReceiver> temporal_time_like, + const char* method_name) { return ToTemporalTimeRecordOrPartialTime( isolate, temporal_time_like, {kMinInt31, kMinInt31, kMinInt31, kMinInt31, kMinInt31, kMinInt31}, false, @@ -4965,8 +4989,7 @@ Handle<String> UnitToString(Isolate* isolate, Unit unit) { } // #sec-temporal-create-iso-date-record -DateRecordCommon CreateISODateRecord(Isolate* isolate, - const DateRecordCommon& date) { +DateRecord CreateISODateRecord(Isolate* isolate, const DateRecord& date) { // 1. Assert: IsValidISODate(year, month, day) is true. DCHECK(IsValidISODate(isolate, date)); // 2. Return the Record { [[Year]]: year, [[Month]]: month, [[Day]]: day }. @@ -4974,8 +4997,7 @@ DateRecordCommon CreateISODateRecord(Isolate* isolate, } // #sec-temporal-balanceisodate -DateRecordCommon BalanceISODate(Isolate* isolate, - const DateRecordCommon& date) { +DateRecord BalanceISODate(Isolate* isolate, const DateRecord& date) { TEMPORAL_ENTER_FUNC(); // 1. Let epochDays be MakeDay(𝔽(year), 𝔽(month - 1), 𝔽(day)). double epoch_days = MakeDay(date.year, date.month - 1, date.day); @@ -4983,8 +5005,8 @@ DateRecordCommon BalanceISODate(Isolate* isolate, DCHECK(std::isfinite(epoch_days)); // 3. Let ms be MakeDate(epochDays, +0𝔽). double ms = MakeDate(epoch_days, 0); - // 4. Return CreateISODateRecord(ℝ(YearFromTime(ms)), ℝ(MonthFromTime(ms)) + - // 1, ℝ(DateFromTime(ms))). + // 4. Return CreateISODateRecordWithCalendar(ℝ(YearFromTime(ms)), + // ℝ(MonthFromTime(ms)) + 1, ℝ(DateFromTime(ms))). int year = 0; int month = 0; int day = 0; @@ -5004,11 +5026,11 @@ DateRecordCommon BalanceISODate(Isolate* isolate, } // #sec-temporal-adddatetime -Maybe<DateTimeRecordCommon> AddDateTime(Isolate* isolate, - const DateTimeRecordCommon& date_time, - Handle<JSReceiver> calendar, - const DurationRecord& dur, - Handle<Object> options) { +Maybe<DateTimeRecord> AddDateTime(Isolate* isolate, + const DateTimeRecord& date_time, + Handle<JSReceiver> calendar, + const DurationRecord& dur, + Handle<Object> options) { TEMPORAL_ENTER_FUNC(); // 1. Assert: ISODateTimeWithinLimits(year, month, day, hour, minute, second, @@ -5018,7 +5040,7 @@ Maybe<DateTimeRecordCommon> AddDateTime(Isolate* isolate, // microsecond, nanosecond, hours, minutes, seconds, milliseconds, // microseconds, nanoseconds). const TimeDurationRecord& time = dur.time_duration; - DateTimeRecordCommon time_result = + DateTimeRecord time_result = AddTime(isolate, date_time.time, {0, time.hours, time.minutes, time.seconds, time.milliseconds, time.microseconds, time.nanoseconds}); @@ -5027,7 +5049,7 @@ Maybe<DateTimeRecordCommon> AddDateTime(Isolate* isolate, Handle<JSTemporalPlainDate> date_part; ASSIGN_RETURN_ON_EXCEPTION_VALUE( isolate, date_part, CreateTemporalDate(isolate, date_time.date, calendar), - Nothing<DateTimeRecordCommon>()); + Nothing<DateTimeRecord>()); // 4. Let dateDuration be ? CreateTemporalDuration(years, months, weeks, days // + timeResult.[[Days]], 0, 0, 0, 0, 0, 0). Handle<JSTemporalDuration> date_duration; @@ -5039,14 +5061,14 @@ Maybe<DateTimeRecordCommon> AddDateTime(Isolate* isolate, dur.months, dur.weeks, {dur.time_duration.days + time_result.date.day, 0, 0, 0, 0, 0, 0}}), - Nothing<DateTimeRecordCommon>()); + Nothing<DateTimeRecord>()); // 5. Let addedDate be ? CalendarDateAdd(calendar, datePart, dateDuration, // options). Handle<JSTemporalPlainDate> added_date; ASSIGN_RETURN_ON_EXCEPTION_VALUE( isolate, added_date, CalendarDateAdd(isolate, calendar, date_part, date_duration, options), - Nothing<DateTimeRecordCommon>()); + Nothing<DateTimeRecord>()); // 6. Return the new Record { [[Year]]: addedDate.[[ISOYear]], [[Month]]: // addedDate.[[ISOMonth]], [[Day]]: addedDate.[[ISODay]], [[Hour]]: // timeResult.[[Hour]], [[Minute]]: timeResult.[[Minute]], [[Second]]: @@ -5708,8 +5730,8 @@ Maybe<NanosecondsToDaysResult> NanosecondsToDays(Isolate* isolate, // #sec-temporal-differenceisodatetime Maybe<DurationRecord> DifferenceISODateTime( - Isolate* isolate, const DateTimeRecordCommon& date_time1, - const DateTimeRecordCommon& date_time2, Handle<JSReceiver> calendar, + Isolate* isolate, const DateTimeRecord& date_time1, + const DateTimeRecord& date_time2, Handle<JSReceiver> calendar, Unit largest_unit, Handle<JSReceiver> options, const char* method_name) { TEMPORAL_ENTER_FUNC(); // 1. Assert: ISODateTimeWithinLimits(y1, mon1, d1, h1, min1, s1, ms1, mus1, @@ -5733,8 +5755,8 @@ Maybe<DurationRecord> DifferenceISODateTime( // 5. Let dateSign be ! CompareISODate(y2, mon2, d2, y1, mon1, d1). double date_sign = CompareISODate(date_time2.date, date_time1.date); - // 6. Let adjustedDate be CreateISODateRecord(y1, mon1, d1). - DateRecordCommon adjusted_date = date_time1.date; + // 6. Let adjustedDate be CreateISODateRecordWithCalendar(y1, mon1, d1). + DateRecord adjusted_date = date_time1.date; CHECK(IsValidISODate(isolate, adjusted_date)); // 7. If timeSign is -dateSign, then @@ -5904,7 +5926,7 @@ bool IsValidEpochNanoseconds(Isolate* isolate, } Handle<BigInt> GetEpochFromISOParts(Isolate* isolate, - const DateTimeRecordCommon& date_time) { + const DateTimeRecord& date_time) { TEMPORAL_ENTER_FUNC(); // 1. Assert: year, month, day, hour, minute, second, millisecond, // microsecond, and nanosecond are integers. @@ -5976,6 +5998,10 @@ int32_t DurationSign(Isolate* isolaet, const DurationRecord& dur) { return 0; } +} // namespace + +namespace temporal { + // #sec-temporal-isvalidduration bool IsValidDuration(Isolate* isolate, const DurationRecord& dur) { TEMPORAL_ENTER_FUNC(); @@ -6006,6 +6032,10 @@ bool IsValidDuration(Isolate* isolate, const DurationRecord& dur) { time.microseconds > 0 || time.nanoseconds > 0))); } +} // namespace temporal + +namespace { + // #sec-temporal-isisoleapyear bool IsISOLeapYear(Isolate* isolate, int32_t year) { TEMPORAL_ENTER_FUNC(); @@ -6047,7 +6077,7 @@ int32_t ISODaysInYear(Isolate* isolate, int32_t year) { return IsISOLeapYear(isolate, year) ? 366 : 365; } -bool IsValidTime(Isolate* isolate, const TimeRecordCommon& time) { +bool IsValidTime(Isolate* isolate, const TimeRecord& time) { TEMPORAL_ENTER_FUNC(); // 2. If hour < 0 or hour > 23, then @@ -6073,7 +6103,7 @@ bool IsValidTime(Isolate* isolate, const TimeRecordCommon& time) { } // #sec-temporal-isvalidisodate -bool IsValidISODate(Isolate* isolate, const DateRecordCommon& date) { +bool IsValidISODate(Isolate* isolate, const DateRecord& date) { TEMPORAL_ENTER_FUNC(); // 1. Assert: year, month, and day are integers. @@ -6092,8 +6122,7 @@ bool IsValidISODate(Isolate* isolate, const DateRecordCommon& date) { } // #sec-temporal-compareisodate -int32_t CompareISODate(const DateRecordCommon& one, - const DateRecordCommon& two) { +int32_t CompareISODate(const DateRecord& one, const DateRecord& two) { TEMPORAL_ENTER_FUNC(); // 1. Assert: y1, m1, d1, y2, m2, and d2 are integers. @@ -6113,12 +6142,11 @@ int32_t CompareISODate(const DateRecordCommon& one, return 0; } -int32_t CompareTemporalTime(const TimeRecordCommon& time1, - const TimeRecordCommon& time2); +int32_t CompareTemporalTime(const TimeRecord& time1, const TimeRecord& time2); // #sec-temporal-compareisodatetime -int32_t CompareISODateTime(const DateTimeRecordCommon& one, - const DateTimeRecordCommon& two) { +int32_t CompareISODateTime(const DateTimeRecord& one, + const DateTimeRecord& two) { // 2. Let dateResult be ! CompareISODate(y1, mon1, d1, y2, mon2, d2). int32_t date_result = CompareISODate(one.date, two.date); // 3. If dateResult is not 0, then @@ -6147,10 +6175,10 @@ void BalanceISOYearMonth(Isolate* isolate, int32_t* year, int32_t* month) { // 4. Return the new Record { [[Year]]: year, [[Month]]: month }. } // #sec-temporal-balancetime -DateTimeRecordCommon BalanceTime(const UnbalancedTimeRecordCommon& input) { +DateTimeRecord BalanceTime(const UnbalancedTimeRecord& input) { TEMPORAL_ENTER_FUNC(); - UnbalancedTimeRecordCommon time(input); - TimeRecordCommon result; + UnbalancedTimeRecord time(input); + TimeRecord result; // 1. Assert: hour, minute, second, millisecond, microsecond, and nanosecond // are integers. @@ -6186,8 +6214,8 @@ DateTimeRecordCommon BalanceTime(const UnbalancedTimeRecordCommon& input) { // #sec-temporal-differencetime Maybe<TimeDurationRecord> DifferenceTime(Isolate* isolate, - const TimeRecordCommon& time1, - const TimeRecordCommon& time2) { + const TimeRecord& time1, + const TimeRecord& time2) { TEMPORAL_ENTER_FUNC(); // 1. Assert: h1, min1, s1, ms1, mus1, ns1, h2, min2, s2, ms2, mus2, and ns2 @@ -6216,7 +6244,7 @@ Maybe<TimeDurationRecord> DifferenceTime(Isolate* isolate, // 9. Let bt be ! BalanceTime(hours × sign, minutes × sign, seconds × sign, // milliseconds × sign, microseconds × sign, nanoseconds × sign). - DateTimeRecordCommon bt = + DateTimeRecord bt = BalanceTime({dur.hours * sign, dur.minutes * sign, dur.seconds * sign, dur.milliseconds * sign, dur.microseconds * sign, dur.nanoseconds * sign}); @@ -6231,8 +6259,8 @@ Maybe<TimeDurationRecord> DifferenceTime(Isolate* isolate, } // #sec-temporal-addtime -DateTimeRecordCommon AddTime(Isolate* isolate, const TimeRecordCommon& time, - const TimeDurationRecord& addend) { +DateTimeRecord AddTime(Isolate* isolate, const TimeRecord& time, + const TimeDurationRecord& addend) { TEMPORAL_ENTER_FUNC(); DCHECK_EQ(addend.days, 0); @@ -6330,30 +6358,29 @@ Handle<BigInt> TotalDurationNanoseconds(Isolate* isolate, return x; } -Maybe<DateRecordCommon> RegulateISODate(Isolate* isolate, ShowOverflow overflow, - const DateRecordCommon& date); +Maybe<DateRecord> RegulateISODate(Isolate* isolate, ShowOverflow overflow, + const DateRecord& date); Maybe<int32_t> ResolveISOMonth(Isolate* isolate, Handle<JSReceiver> fields); // #sec-temporal-isomonthdayfromfields -Maybe<DateRecordCommon> ISOMonthDayFromFields(Isolate* isolate, - Handle<JSReceiver> fields, - Handle<JSReceiver> options, - const char* method_name) { +Maybe<DateRecord> ISOMonthDayFromFields(Isolate* isolate, + Handle<JSReceiver> fields, + Handle<JSReceiver> options, + const char* method_name) { Factory* factory = isolate->factory(); // 1. Assert: Type(fields) is Object. - // 2. Let overflow be ? ToTemporalOverflow(options). - ShowOverflow overflow; - MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( - isolate, overflow, ToTemporalOverflow(isolate, options, method_name), - Nothing<DateRecordCommon>()); - - // 3. Set fields to ? PrepareTemporalFields(fields, « "day", "month", + // 2. Set fields to ? PrepareTemporalFields(fields, « "day", "month", // "monthCode", "year" », «"day"»). Handle<FixedArray> field_names = DayMonthMonthCodeYearInFixedArray(isolate); ASSIGN_RETURN_ON_EXCEPTION_VALUE( isolate, fields, PrepareTemporalFields(isolate, fields, field_names, RequiredFields::kDay), - Nothing<DateRecordCommon>()); + Nothing<DateRecord>()); + // 3. Let overflow be ? ToTemporalOverflow(options). + ShowOverflow overflow; + MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate, overflow, ToTemporalOverflow(isolate, options, method_name), + Nothing<DateRecord>()); // 4. Let month be ! Get(fields, "month"). Handle<Object> month_obj = JSReceiver::GetProperty(isolate, fields, factory->month_string()) @@ -6372,13 +6399,13 @@ Maybe<DateRecordCommon> ISOMonthDayFromFields(Isolate* isolate, month_code_obj->IsUndefined(isolate) && year_obj->IsUndefined(isolate)) { // a. Throw a TypeError exception. THROW_NEW_ERROR_RETURN_VALUE(isolate, NEW_TEMPORAL_INVALID_ARG_TYPE_ERROR(), - Nothing<DateRecordCommon>()); + Nothing<DateRecord>()); } // 8. Set month to ? ResolveISOMonth(fields). - DateRecordCommon result; + DateRecord result; MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, result.month, ResolveISOMonth(isolate, fields), - Nothing<DateRecordCommon>()); + Nothing<DateRecord>()); // 9. Let day be ! Get(fields, "day"). Handle<Object> day_obj = @@ -6405,7 +6432,7 @@ Maybe<DateRecordCommon> ISOMonthDayFromFields(Isolate* isolate, } MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( isolate, result, RegulateISODate(isolate, overflow, result), - Nothing<DateRecordCommon>()); + Nothing<DateRecord>()); // 14. Return the new Record { [[Month]]: result.[[Month]], [[Day]]: // result.[[Day]], [[ReferenceISOYear]]: referenceISOYear }. result.year = reference_iso_year; @@ -6513,10 +6540,12 @@ Handle<BigInt> RoundTemporalInstant(Isolate* isolate, Handle<BigInt> ns, RoundingMode rounding_mode); // #sec-temporal-differenceinstant -Handle<BigInt> DifferenceInstant(Isolate* isolate, Handle<BigInt> ns1, - Handle<BigInt> ns2, double rounding_increment, - Unit smallest_unit, - RoundingMode rounding_mode); +TimeDurationRecord DifferenceInstant(Isolate* isolate, Handle<BigInt> ns1, + Handle<BigInt> ns2, + double rounding_increment, + Unit smallest_unit, Unit largest_unit, + RoundingMode rounding_mode, + const char* method_name); // #sec-temporal-differencezoneddatetime Maybe<DurationRecord> DifferenceZonedDateTime( @@ -8003,14 +8032,14 @@ namespace { // #sec-temporal-interpretisodatetimeoffset MaybeHandle<BigInt> InterpretISODateTimeOffset( - Isolate* isolate, const DateTimeRecordCommon& data, + Isolate* isolate, const DateTimeRecord& data, OffsetBehaviour offset_behaviour, int64_t offset_nanoseconds, Handle<JSReceiver> time_zone, Disambiguation disambiguation, Offset offset_option, MatchBehaviour match_behaviour, const char* method_name); // #sec-temporal-interprettemporaldatetimefields -Maybe<DateTimeRecord> InterpretTemporalDateTimeFields( +Maybe<temporal::DateTimeRecord> InterpretTemporalDateTimeFields( Isolate* isolate, Handle<JSReceiver> calendar, Handle<JSReceiver> fields, Handle<Object> options, const char* method_name); @@ -8041,7 +8070,7 @@ MaybeHandle<Object> ToRelativeTemporalObject(Isolate* isolate, Handle<Object> time_zone_obj = factory->undefined_value(); Handle<Object> offset_string_obj; - ZonedDateTimeRecord result; + temporal::DateTimeRecord result; Handle<JSReceiver> calendar; // 6. If Type(value) is Object, then if (value_obj->IsJSReceiver()) { @@ -8094,7 +8123,7 @@ MaybeHandle<Object> ToRelativeTemporalObject(Isolate* isolate, // h. Let result be ? InterpretTemporalDateTimeFields(calendar, fields, // dateOptions). MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( - isolate, result.date_time, + isolate, result, InterpretTemporalDateTimeFields(isolate, calendar, fields, date_options, method_name), Handle<Object>()); @@ -8130,26 +8159,32 @@ MaybeHandle<Object> ToRelativeTemporalObject(Isolate* isolate, Handle<String> string; ASSIGN_RETURN_ON_EXCEPTION(isolate, string, Object::ToString(isolate, value_obj), Object); + DateTimeRecordWithCalendar parsed_result; // b. Let result be ? ParseTemporalRelativeToString(string). MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( - isolate, result, ParseTemporalRelativeToString(isolate, string), + isolate, parsed_result, ParseTemporalRelativeToString(isolate, string), Handle<Object>()); + result = {parsed_result.date, parsed_result.time}; // c. Let calendar be ? // ToTemporalCalendarWithISODefault(result.[[Calendar]]). ASSIGN_RETURN_ON_EXCEPTION( isolate, calendar, - ToTemporalCalendarWithISODefault(isolate, result.date_time.calendar, + ToTemporalCalendarWithISODefault(isolate, parsed_result.calendar, method_name), Object); - // d. Let offsetString be result.[[TimeZoneOffset]]. - offset_string_obj = result.time_zone.offset_string; + // d. Let offsetString be result.[[TimeZone]].[[OffsetString]]. + offset_string_obj = parsed_result.time_zone.offset_string; - // e. Let timeZoneName be result.[[TimeZoneIANAName]]. - Handle<Object> time_zone_name_obj = result.time_zone.name; + // e. Let timeZoneName be result.[[TimeZone]].[[Name]]. + Handle<Object> time_zone_name_obj = parsed_result.time_zone.name; - // f. If timeZoneName is not undefined, then - if (!time_zone_name_obj->IsUndefined()) { + // f. If timeZoneName is undefined, then + if (time_zone_name_obj->IsUndefined()) { + // i. Let timeZone be undefined. + time_zone_obj = factory->undefined_value(); + // g. Else, + } else { // i. If ParseText(StringToCodePoints(timeZoneName), // TimeZoneNumericUTCOffset) is a List of errors, then DCHECK(time_zone_name_obj->IsString()); @@ -8173,68 +8208,62 @@ MaybeHandle<Object> ToRelativeTemporalObject(Isolate* isolate, temporal::CreateTemporalTimeZone(isolate, time_zone_name) .ToHandleChecked(); time_zone_obj = time_zone; - // g. Else, - } else { - // i. Let timeZone be undefined. - time_zone_obj = factory->undefined_value(); - } - // h. If result.[[TimeZoneZ]] is true, then - if (result.time_zone.z) { - // i. Set offsetBehaviour to exact. - offset_behaviour = OffsetBehaviour::kExact; - // g. Else if offsetString is undefined, then - } else if (offset_string_obj->IsUndefined()) { - // i. Set offsetBehaviour to wall. - offset_behaviour = OffsetBehaviour::kWall; + // iii. If result.[[TimeZone]].[[Z]] is true, then + if (parsed_result.time_zone.z) { + // 1. Set offsetBehaviour to exact. + offset_behaviour = OffsetBehaviour::kExact; + // iv. Else if offsetString is undefined, then + } else if (offset_string_obj->IsUndefined()) { + // 1. Set offsetBehaviour to wall. + offset_behaviour = OffsetBehaviour::kWall; + } + // v. Set matchBehaviour to match minutes. + match_behaviour = MatchBehaviour::kMatchMinutes; } - // h. Set matchBehaviour to match minutes. - match_behaviour = MatchBehaviour::kMatchMinutes; } - // 7. If timeZone is not undefined, then - if (!time_zone_obj->IsUndefined()) { - DCHECK(time_zone_obj->IsJSReceiver()); - Handle<JSReceiver> time_zone = Handle<JSReceiver>::cast(time_zone_obj); - // a. If offsetBehaviour is option, then - int64_t offset_ns = 0; - if (offset_behaviour == OffsetBehaviour::kOption) { - // i. Set offsetString to ? ToString(offsetString). - Handle<String> offset_string; - ASSIGN_RETURN_ON_EXCEPTION(isolate, offset_string, - Object::ToString(isolate, offset_string_obj), - Object); - // ii. Let offsetNs be ? ParseTimeZoneOffsetString(offset_string). - MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( - isolate, offset_ns, ParseTimeZoneOffsetString(isolate, offset_string), - Handle<Object>()); - // b. Else, - } else { - // i. Let offsetNs be 0. - offset_ns = 0; - } - // Let epochNanoseconds be ? InterpretISODateTimeOffset(result.[[Year]], - // result.[[Month]], result.[[Day]], result.[[Hour]], result.[[Minute]], - // result.[[Second]], result.[[Millisecond]], result.[[Microsecond]], - // result.[[Nanosecond]], offsetBehaviour, offsetNs, timeZone, "compatible", - // "reject", matchBehaviour). - Handle<BigInt> epoch_nanoseconds; - ASSIGN_RETURN_ON_EXCEPTION( - isolate, epoch_nanoseconds, - InterpretISODateTimeOffset( - isolate, {result.date_time.date, result.date_time.time}, - offset_behaviour, offset_ns, time_zone, Disambiguation::kCompatible, - Offset::kReject, match_behaviour, method_name), - Object); - - // e. Return ? CreateTemporalZonedDateTime(epochNanoseconds, timeZone, - // calendar). - return CreateTemporalZonedDateTime(isolate, epoch_nanoseconds, time_zone, - calendar); + // 8. If timeZone is undefined, then + if (time_zone_obj->IsUndefined()) { + // a. Return ? CreateTemporalDate(result.[[Year]], result.[[Month]], + // result.[[Day]], calendar). + return CreateTemporalDate(isolate, result.date, calendar); + } + DCHECK(time_zone_obj->IsJSReceiver()); + Handle<JSReceiver> time_zone = Handle<JSReceiver>::cast(time_zone_obj); + // 9. If offsetBehaviour is option, then + int64_t offset_ns = 0; + if (offset_behaviour == OffsetBehaviour::kOption) { + // a. Set offsetString to ? ToString(offsetString). + Handle<String> offset_string; + ASSIGN_RETURN_ON_EXCEPTION(isolate, offset_string, + Object::ToString(isolate, offset_string_obj), + Object); + // b. Let offsetNs be ? ParseTimeZoneOffsetString(offset_string). + MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate, offset_ns, ParseTimeZoneOffsetString(isolate, offset_string), + Handle<Object>()); + // 10. Else, + } else { + // a. Let offsetNs be 0. + offset_ns = 0; } - // 7. Return ? CreateTemporalDate(result.[[Year]], result.[[Month]], - // result.[[Day]], + // 11. Let epochNanoseconds be ? InterpretISODateTimeOffset(result.[[Year]], + // result.[[Month]], result.[[Day]], result.[[Hour]], result.[[Minute]], + // result.[[Second]], result.[[Millisecond]], result.[[Microsecond]], + // result.[[Nanosecond]], offsetBehaviour, offsetNs, timeZone, "compatible", + // "reject", matchBehaviour). + Handle<BigInt> epoch_nanoseconds; + ASSIGN_RETURN_ON_EXCEPTION( + isolate, epoch_nanoseconds, + InterpretISODateTimeOffset(isolate, result, offset_behaviour, offset_ns, + time_zone, Disambiguation::kCompatible, + Offset::kReject, match_behaviour, method_name), + Object); + + // 12. Return ? CreateTemporalZonedDateTime(epochNanoseconds, timeZone, // calendar). - return CreateTemporalDate(isolate, result.date_time.date, calendar); + return CreateTemporalZonedDateTime(isolate, epoch_nanoseconds, time_zone, + calendar); } // #sec-temporal-defaulttemporallargestunit @@ -8376,12 +8405,6 @@ Maybe<DurationRecord> DifferenceZonedDateTime( .ToChecked()); } -// #sec-temporal-differenceinstant -Handle<BigInt> DifferenceInstant(Isolate* isolate, Handle<BigInt> ns1, - Handle<BigInt> ns2, double rounding_increment, - Unit smallest_unit, - RoundingMode rounding_mode); - Maybe<DurationRecord> AddDuration(Isolate* isolate, const DurationRecord& dur1, const DurationRecord& dur2, Handle<Object> relative_to_obj, @@ -8548,17 +8571,13 @@ Maybe<DurationRecord> AddDuration(Isolate* isolate, const DurationRecord& dur1, // 11. If largestUnit is not one of "year", "month", "week", or "day", then if (!(largest_unit == Unit::kYear || largest_unit == Unit::kMonth || largest_unit == Unit::kWeek || largest_unit == Unit::kDay)) { - // i. Let diffNs be ! DifferenceInstant(relativeTo.[[Nanoseconds]], endNs, - // 1, "nanosecond", "halfExpand"). - Handle<BigInt> diff_ns = DifferenceInstant( - isolate, handle(relative_to->nanoseconds(), isolate), end_ns, 1, - Unit::kNanosecond, RoundingMode::kHalfExpand); - // ii. Let result be ! BalanceDuration(0, 0, 0, 0, 0, 0, diffNs, - // largestUnit). + // a. Let result be ! DifferenceInstant(relativeTo.[[Nanoseconds]], endNs, + // 1, *"nanosecond"*, largestUnit, *"halfExpand"*). result.time_duration = - BalanceDuration(isolate, largest_unit, diff_ns, method_name) - .ToChecked(); - // d. Return ! CreateDurationRecord(0, 0, 0, 0, result.[[Hours]], + DifferenceInstant(isolate, handle(relative_to->nanoseconds(), isolate), + end_ns, 1, Unit::kNanosecond, largest_unit, + RoundingMode::kHalfExpand, method_name); + // b. Return ! CreateDurationRecord(0, 0, 0, 0, result.[[Hours]], // result.[[Minutes]], result.[[Seconds]], result.[[Milliseconds]], // result.[[Microseconds]], result.[[Nanoseconds]]). result.time_duration.days = 0; @@ -9478,7 +9497,7 @@ MaybeHandle<JSTemporalCalendar> JSTemporalCalendar::Constructor( namespace { // #sec-temporal-toisodayofyear -int32_t ToISODayOfYear(Isolate* isolate, const DateRecordCommon& date) { +int32_t ToISODayOfYear(Isolate* isolate, const DateRecord& date) { TEMPORAL_ENTER_FUNC(); // 1. Assert: IsValidISODate(year, month, day) is *true*. DCHECK(IsValidISODate(isolate, date)); @@ -9500,7 +9519,7 @@ bool IsPlainDatePlainDateTimeOrPlainYearMonth( } // #sec-temporal-toisodayofweek -int32_t ToISODayOfWeek(Isolate* isolate, const DateRecordCommon& date) { +int32_t ToISODayOfWeek(Isolate* isolate, const DateRecord& date) { TEMPORAL_ENTER_FUNC(); // 1. Assert: IsValidISODate(year, month, day) is *true*. @@ -9529,8 +9548,8 @@ int32_t ToISODayOfWeek(Isolate* isolate, const DateRecordCommon& date) { } // #sec-temporal-regulateisodate -Maybe<DateRecordCommon> RegulateISODate(Isolate* isolate, ShowOverflow overflow, - const DateRecordCommon& date) { +Maybe<DateRecord> RegulateISODate(Isolate* isolate, ShowOverflow overflow, + const DateRecord& date) { TEMPORAL_ENTER_FUNC(); // 1. Assert: year, month, and day are integers. @@ -9543,14 +9562,14 @@ Maybe<DateRecordCommon> RegulateISODate(Isolate* isolate, ShowOverflow overflow, if (!IsValidISODate(isolate, date)) { THROW_NEW_ERROR_RETURN_VALUE(isolate, NEW_TEMPORAL_INVALID_ARG_RANGE_ERROR(), - Nothing<DateRecordCommon>()); + Nothing<DateRecord>()); } // b. Return the Record { [[Year]]: year, [[Month]]: month, [[Day]]: day // }. return Just(date); // 4. If overflow is "constrain", then case ShowOverflow::kConstrain: - DateRecordCommon result(date); + DateRecord result(date); // a. Set month to ! ConstrainToRange(month, 1, 12). result.month = std::max(std::min(result.month, 12), 1); // b. Set day to ! ConstrainToRange(day, 1, ! ISODaysInMonth(year, @@ -9667,26 +9686,25 @@ Maybe<int32_t> ResolveISOMonth(Isolate* isolate, Handle<JSReceiver> fields) { } // #sec-temporal-isodatefromfields -Maybe<DateRecordCommon> ISODateFromFields(Isolate* isolate, - Handle<JSReceiver> fields, - Handle<JSReceiver> options, - const char* method_name) { +Maybe<DateRecord> ISODateFromFields(Isolate* isolate, Handle<JSReceiver> fields, + Handle<JSReceiver> options, + const char* method_name) { Factory* factory = isolate->factory(); // 1. Assert: Type(fields) is Object. - // 2. Let overflow be ? ToTemporalOverflow(options). - ShowOverflow overflow; - MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( - isolate, overflow, ToTemporalOverflow(isolate, options, method_name), - Nothing<DateRecordCommon>()); - // 3. Set fields to ? PrepareTemporalFields(fields, « "day", "month", + // 2. Set fields to ? PrepareTemporalFields(fields, « "day", "month", // "monthCode", "year" », «"year", "day"»). Handle<FixedArray> field_names = DayMonthMonthCodeYearInFixedArray(isolate); ASSIGN_RETURN_ON_EXCEPTION_VALUE( isolate, fields, PrepareTemporalFields(isolate, fields, field_names, RequiredFields::kYearAndDay), - Nothing<DateRecordCommon>()); + Nothing<DateRecord>()); + // 3. Let overflow be ? ToTemporalOverflow(options). + ShowOverflow overflow; + MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate, overflow, ToTemporalOverflow(isolate, options, method_name), + Nothing<DateRecord>()); // 4. Let year be ! Get(fields, "year"). Handle<Object> year_obj = @@ -9700,9 +9718,8 @@ Maybe<DateRecordCommon> ISODateFromFields(Isolate* isolate, // 6. Let month be ? ResolveISOMonth(fields). int32_t month; - MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, month, - ResolveISOMonth(isolate, fields), - Nothing<DateRecordCommon>()); + MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate, month, ResolveISOMonth(isolate, fields), Nothing<DateRecord>()); // 7. Let day be ! Get(fields, "day"). Handle<Object> day_obj = @@ -9720,10 +9737,9 @@ Maybe<DateRecordCommon> ISODateFromFields(Isolate* isolate, } // #sec-temporal-addisodate -Maybe<DateRecordCommon> AddISODate(Isolate* isolate, - const DateRecordCommon& date, - const DateDurationRecord& duration, - ShowOverflow overflow) { +Maybe<DateRecord> AddISODate(Isolate* isolate, const DateRecord& date, + const DateDurationRecord& duration, + ShowOverflow overflow) { TEMPORAL_ENTER_FUNC(); // 1. Assert: year, month, day, years, months, weeks, and days are integers. @@ -9731,7 +9747,7 @@ Maybe<DateRecordCommon> AddISODate(Isolate* isolate, DCHECK(overflow == ShowOverflow::kConstrain || overflow == ShowOverflow::kReject); // 3. Let intermediate be ! BalanceISOYearMonth(year + years, month + months). - DateRecordCommon intermediate = date; + DateRecord intermediate = date; intermediate.year += static_cast<int32_t>(duration.years); intermediate.month += static_cast<int32_t>(duration.months); BalanceISOYearMonth(isolate, &intermediate.year, &intermediate.month); @@ -9739,7 +9755,7 @@ Maybe<DateRecordCommon> AddISODate(Isolate* isolate, // intermediate.[[Month]], day, overflow). MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( isolate, intermediate, RegulateISODate(isolate, overflow, intermediate), - Nothing<DateRecordCommon>()); + Nothing<DateRecord>()); // 5. Set days to days + 7 × weeks. // 6. Let d be intermediate.[[Day]] + days. @@ -9750,8 +9766,8 @@ Maybe<DateRecordCommon> AddISODate(Isolate* isolate, // #sec-temporal-differenceisodate Maybe<DateDurationRecord> DifferenceISODate(Isolate* isolate, - const DateRecordCommon& date1, - const DateRecordCommon& date2, + const DateRecord& date1, + const DateRecord& date2, Unit largest_unit, const char* method_name) { TEMPORAL_ENTER_FUNC(); @@ -9773,14 +9789,14 @@ Maybe<DateDurationRecord> DifferenceISODate(Isolate* isolate, // c. Let start be the new Record { [[Year]]: y1, [[Month]]: m1, [[Day]]: // d1 // }. - DateRecordCommon start = date1; + DateRecord start = date1; // d. Let end be the new Record { [[Year]]: y2, [[Month]]: m2, [[Day]]: // d2 }. - DateRecordCommon end = date2; + DateRecord end = date2; // e. Let years be end.[[Year]] − start.[[Year]]. double years = end.year - start.year; // f. Let mid be ! AddISODate(y1, m1, d1, years, 0, 0, 0, "constrain"). - DateRecordCommon mid; + DateRecord mid; MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( isolate, mid, AddISODate(isolate, date1, {years, 0, 0, 0}, @@ -9885,7 +9901,7 @@ Maybe<DateDurationRecord> DifferenceISODate(Isolate* isolate, // 3. If largestUnit is "day" or "week", then case Unit::kDay: case Unit::kWeek: { - DateRecordCommon smaller, greater; + DateRecord smaller, greater; // a. If ! CompareISODate(y1, m1, d1, y2, m2, d2) < 0, then int32_t sign; if (CompareISODate(date1, date2) < 0) { @@ -9941,18 +9957,13 @@ Maybe<DateDurationRecord> DifferenceISODate(Isolate* isolate, } // #sec-temporal-isoyearmonthfromfields -Maybe<DateRecordCommon> ISOYearMonthFromFields(Isolate* isolate, - Handle<JSReceiver> fields, - Handle<JSReceiver> options, - const char* method_name) { +Maybe<DateRecord> ISOYearMonthFromFields(Isolate* isolate, + Handle<JSReceiver> fields, + Handle<JSReceiver> options, + const char* method_name) { Factory* factory = isolate->factory(); // 1. Assert: Type(fields) is Object. - // 2. Let overflow be ? ToTemporalOverflow(options). - ShowOverflow overflow; - MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( - isolate, overflow, ToTemporalOverflow(isolate, options, method_name), - Nothing<DateRecordCommon>()); - // 3. Set fields to ? PrepareTemporalFields(fields, « "month", "monthCode", + // 2. Set fields to ? PrepareTemporalFields(fields, « "month", "monthCode", // "year" », «»). Handle<FixedArray> field_names = factory->NewFixedArray(3); field_names->set(0, ReadOnlyRoots(isolate).month_string()); @@ -9962,7 +9973,12 @@ Maybe<DateRecordCommon> ISOYearMonthFromFields(Isolate* isolate, isolate, fields, PrepareTemporalFields(isolate, fields, field_names, RequiredFields::kNone), - Nothing<DateRecordCommon>()); + Nothing<DateRecord>()); + // 3. Let overflow be ? ToTemporalOverflow(options). + ShowOverflow overflow; + MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate, overflow, ToTemporalOverflow(isolate, options, method_name), + Nothing<DateRecord>()); // 4. Let year be ! Get(fields, "year"). Handle<Object> year_obj = @@ -9971,26 +9987,25 @@ Maybe<DateRecordCommon> ISOYearMonthFromFields(Isolate* isolate, // 5. If year is undefined, throw a TypeError exception. if (year_obj->IsUndefined(isolate)) { THROW_NEW_ERROR_RETURN_VALUE(isolate, NEW_TEMPORAL_INVALID_ARG_TYPE_ERROR(), - Nothing<DateRecordCommon>()); + Nothing<DateRecord>()); } - DateRecordCommon result; + DateRecord result; result.year = FastD2I(floor(year_obj->Number())); // 6. Let month be ? ResolveISOMonth(fields). int32_t month; - MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, month, - ResolveISOMonth(isolate, fields), - Nothing<DateRecordCommon>()); + MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate, month, ResolveISOMonth(isolate, fields), Nothing<DateRecord>()); // 7. Let result be ? RegulateISOYearMonth(year, month, overflow). MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( isolate, result.month, RegulateISOYearMonth(isolate, overflow, month), - Nothing<DateRecordCommon>()); + Nothing<DateRecord>()); // 8. Return the new Record { [[Year]]: result.[[Year]], [[Month]]: // result.[[Month]], [[ReferenceISODay]]: 1 }. result.day = 1; return Just(result); } // #sec-temporal-toisoweekofyear -int32_t ToISOWeekOfYear(Isolate* isolate, const DateRecordCommon& date) { +int32_t ToISOWeekOfYear(Isolate* isolate, const DateRecord& date) { TEMPORAL_ENTER_FUNC(); // 1. Assert: IsValidISODate(year, month, day) is *true*. DCHECK(IsValidISODate(isolate, date)); @@ -10105,7 +10120,7 @@ MaybeHandle<JSTemporalPlainDate> JSTemporalCalendar::DateAdd( method_name), Handle<JSTemporalPlainDate>()); - DateRecordCommon result; + DateRecord result; // If calendar.[[Identifier]] is "iso8601", then if (calendar->calendar_index() == 0) { // 9. Let result be ? AddISODate(date.[[ISOYear]], date.[[ISOMonth]], @@ -10401,7 +10416,7 @@ MaybeHandle<JSTemporalPlainDate> JSTemporalCalendar::DateFromFields( JSTemporalPlainDate); if (calendar->calendar_index() == 0) { // 6. Let result be ? ISODateFromFields(fields, options). - DateRecordCommon result; + DateRecord result; MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( isolate, result, ISODateFromFields(isolate, fields, options, method_name), @@ -10650,7 +10665,7 @@ MaybeHandle<JSTemporalPlainMonthDay> JSTemporalCalendar::MonthDayFromFields( JSTemporalPlainMonthDay); // 6. Let result be ? ISOMonthDayFromFields(fields, options). if (calendar->calendar_index() == 0) { - DateRecordCommon result; + DateRecord result; MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( isolate, result, ISOMonthDayFromFields(isolate, fields, options, method_name), @@ -10689,7 +10704,7 @@ MaybeHandle<JSTemporalPlainYearMonth> JSTemporalCalendar::YearMonthFromFields( JSTemporalPlainYearMonth); // 6. Let result be ? ISOYearMonthFromFields(fields, options). if (calendar->calendar_index() == 0) { - DateRecordCommon result; + DateRecord result; MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( isolate, result, ISOYearMonthFromFields(isolate, fields, options, method_name), @@ -10902,92 +10917,56 @@ MaybeHandle<JSTemporalInstant> JSTemporalTimeZone::GetInstantFor( namespace { #ifdef V8_INTL_SUPPORT -MaybeHandle<Object> GetIANATimeZoneTransition(Isolate* isolate, - Handle<BigInt> nanoseconds, - int32_t time_zone_index, - Intl::Transition transition) { +Handle<Object> GetIANATimeZoneTransition(Isolate* isolate, + Handle<BigInt> nanoseconds, + int32_t time_zone_index, + Intl::Transition transition) { if (time_zone_index == JSTemporalTimeZone::kUTCTimeZoneIndex) { return isolate->factory()->null_value(); } - - Handle<BigInt> one_million = BigInt::FromUint64(isolate, 1000000); - Maybe<int64_t> maybe_transition = - Intl::GetTimeZoneOffsetTransitionMilliseconds( - isolate, time_zone_index, - BigInt::Divide(isolate, nanoseconds, one_million) - .ToHandleChecked() - ->AsInt64(), - transition); - // If there are no transition in this timezone, return null. - if (maybe_transition.IsNothing()) { - return isolate->factory()->null_value(); - } - - // #sec-temporal-getianatimezonenexttransition and - // #sec-temporal-getianatimezoneprevioustransition states: - // "The operation returns null if no such transition exists for which t ≤ - // ℤ(nsMaxInstant)." and "The operation returns null if no such transition - // exists for which t ≥ ℤ(nsMinInstant)." - // - // nsMinInstant = -nsMaxInstant = -8.64 × 10^21 => msMinInstant = -8.64 x - // 10^15 - constexpr int64_t kMsMinInstant = -8.64e15; - // nsMaxInstant = 10^8 × nsPerDay = 8.64 × 10^21 => msMaxInstant = 8.64 x - // 10^15 - constexpr int64_t kMsMaxInstant = 8.64e15; - - int64_t ms = maybe_transition.FromJust(); - if (ms < kMsMinInstant || ms > kMsMaxInstant) { - return isolate->factory()->null_value(); - } - - // Convert the transition from milliseconds to nanoseconds. - return BigInt::Multiply(isolate, BigInt::FromInt64(isolate, ms), one_million); + return Intl::GetTimeZoneOffsetTransitionNanoseconds(isolate, time_zone_index, + nanoseconds, transition); } // #sec-temporal-getianatimezonenexttransition -MaybeHandle<Object> GetIANATimeZoneNextTransition(Isolate* isolate, - Handle<BigInt> nanoseconds, - int32_t time_zone_index) { +Handle<Object> GetIANATimeZoneNextTransition(Isolate* isolate, + Handle<BigInt> nanoseconds, + int32_t time_zone_index) { return GetIANATimeZoneTransition(isolate, nanoseconds, time_zone_index, Intl::Transition::kNext); } // #sec-temporal-getianatimezoneprevioustransition -MaybeHandle<Object> GetIANATimeZonePreviousTransition( - Isolate* isolate, Handle<BigInt> nanoseconds, int32_t time_zone_index) { +Handle<Object> GetIANATimeZonePreviousTransition(Isolate* isolate, + Handle<BigInt> nanoseconds, + int32_t time_zone_index) { return GetIANATimeZoneTransition(isolate, nanoseconds, time_zone_index, Intl::Transition::kPrevious); } -MaybeHandle<Object> GetIANATimeZoneOffsetNanoseconds(Isolate* isolate, - Handle<BigInt> nanoseconds, - int32_t time_zone_index) { +Handle<Object> GetIANATimeZoneOffsetNanoseconds(Isolate* isolate, + Handle<BigInt> nanoseconds, + int32_t time_zone_index) { if (time_zone_index == JSTemporalTimeZone::kUTCTimeZoneIndex) { return handle(Smi::zero(), isolate); } return isolate->factory()->NewNumberFromInt64( - 1000000 * Intl::GetTimeZoneOffsetMilliseconds( - isolate, time_zone_index, - BigInt::Divide(isolate, nanoseconds, - BigInt::FromUint64(isolate, 1000000)) - .ToHandleChecked() - ->AsInt64()) - .ToChecked()); + Intl::GetTimeZoneOffsetNanoseconds(isolate, time_zone_index, + nanoseconds)); } #else // V8_INTL_SUPPORT // #sec-temporal-getianatimezonenexttransition -MaybeHandle<Object> GetIANATimeZoneNextTransition(Isolate* isolate, - Handle<BigInt>, int32_t) { +Handle<Object> GetIANATimeZoneNextTransition(Isolate* isolate, Handle<BigInt>, + int32_t) { return isolate->factory()->null_value(); } // #sec-temporal-getianatimezoneprevioustransition -MaybeHandle<Object> GetIANATimeZonePreviousTransition(Isolate* isolate, - Handle<BigInt>, int32_t) { +Handle<Object> GetIANATimeZonePreviousTransition(Isolate* isolate, + Handle<BigInt>, int32_t) { return isolate->factory()->null_value(); } -MaybeHandle<Object> GetIANATimeZoneOffsetNanoseconds(Isolate* isolate, - Handle<BigInt>, - int32_t time_zone_index) { +Handle<Object> GetIANATimeZoneOffsetNanoseconds(Isolate* isolate, + Handle<BigInt>, + int32_t time_zone_index) { DCHECK_EQ(time_zone_index, JSTemporalTimeZone::kUTCTimeZoneIndex); return handle(Smi::zero(), isolate); } @@ -11024,7 +11003,7 @@ MaybeHandle<JSTemporalPlainDateTime> JSTemporalTimeZone::GetPlainDateTimeFor( // template for shared code of Temporal.TimeZone.prototype.getNextTransition and // Temporal.TimeZone.prototype.getPreviousTransition -template <MaybeHandle<Object> (*iana_func)(Isolate*, Handle<BigInt>, int32_t)> +template <Handle<Object> (*iana_func)(Isolate*, Handle<BigInt>, int32_t)> MaybeHandle<Object> GetTransition(Isolate* isolate, Handle<JSTemporalTimeZone> time_zone, Handle<Object> starting_point_obj, @@ -11045,12 +11024,9 @@ MaybeHandle<Object> GetTransition(Isolate* isolate, // 5. Let transition be ? // GetIANATimeZoneNextTransition(startingPoint.[[Nanoseconds]], // timeZone.[[Identifier]]). - Handle<Object> transition_obj; - ASSIGN_RETURN_ON_EXCEPTION( - isolate, transition_obj, + Handle<Object> transition_obj = iana_func(isolate, handle(starting_point->nanoseconds(), isolate), - time_zone->time_zone_index()), - Object); + time_zone->time_zone_index()); // 6. If transition is null, return null. if (transition_obj->IsNull()) { return isolate->factory()->null_value(); @@ -11081,7 +11057,7 @@ MaybeHandle<Object> JSTemporalTimeZone::GetPreviousTransition( // #sec-temporal.timezone.prototype.getpossibleinstantsfor // #sec-temporal-getianatimezoneepochvalue MaybeHandle<JSArray> GetIANATimeZoneEpochValueAsArrayOfInstantForUTC( - Isolate* isolate, const DateTimeRecordCommon& date_time) { + Isolate* isolate, const DateTimeRecord& date_time) { Factory* factory = isolate->factory(); // 6. Let possibleInstants be a new empty List. Handle<BigInt> epoch_nanoseconds = GetEpochFromISOParts(isolate, date_time); @@ -11105,7 +11081,7 @@ MaybeHandle<JSArray> GetIANATimeZoneEpochValueAsArrayOfInstantForUTC( #ifdef V8_INTL_SUPPORT MaybeHandle<JSArray> GetIANATimeZoneEpochValueAsArrayOfInstant( Isolate* isolate, int32_t time_zone_index, - const DateTimeRecordCommon& date_time) { + const DateTimeRecord& date_time) { Factory* factory = isolate->factory(); if (time_zone_index == JSTemporalTimeZone::kUTCTimeZoneIndex) { return GetIANATimeZoneEpochValueAsArrayOfInstantForUTC(isolate, date_time); @@ -11115,24 +11091,16 @@ MaybeHandle<JSArray> GetIANATimeZoneEpochValueAsArrayOfInstant( Handle<BigInt> nanoseconds_in_local_time = GetEpochFromISOParts(isolate, date_time); - std::vector<int64_t> possible_offset_in_milliseconds = - Intl::GetTimeZonePossibleOffsetMilliseconds( - isolate, time_zone_index, - BigInt::Divide(isolate, nanoseconds_in_local_time, - BigInt::FromUint64(isolate, 1000000)) - .ToHandleChecked() - ->AsInt64()); + std::vector<Handle<BigInt>> possible_offset = + Intl::GetTimeZonePossibleOffsetNanoseconds(isolate, time_zone_index, + nanoseconds_in_local_time); - int32_t array_length = - static_cast<int32_t>(possible_offset_in_milliseconds.size()); + int32_t array_length = static_cast<int32_t>(possible_offset.size()); Handle<FixedArray> fixed_array = factory->NewFixedArray(array_length); for (int32_t i = 0; i < array_length; i++) { - int64_t offset_in_nanoseconds = - possible_offset_in_milliseconds[i] * 1000000; Handle<BigInt> epoch_nanoseconds = - BigInt::Subtract(isolate, nanoseconds_in_local_time, - BigInt::FromInt64(isolate, offset_in_nanoseconds)) + BigInt::Subtract(isolate, nanoseconds_in_local_time, possible_offset[i]) .ToHandleChecked(); // a. If ! IsValidEpochNanoseconds(epochNanoseconds) is false, throw a // RangeError exception. @@ -11155,7 +11123,7 @@ MaybeHandle<JSArray> GetIANATimeZoneEpochValueAsArrayOfInstant( MaybeHandle<JSArray> GetIANATimeZoneEpochValueAsArrayOfInstant( Isolate* isolate, int32_t time_zone_index, - const DateTimeRecordCommon& date_time) { + const DateTimeRecord& date_time) { DCHECK_EQ(time_zone_index, JSTemporalTimeZone::kUTCTimeZoneIndex); return GetIANATimeZoneEpochValueAsArrayOfInstantForUTC(isolate, date_time); } @@ -11176,7 +11144,7 @@ MaybeHandle<JSArray> JSTemporalTimeZone::GetPossibleInstantsFor( ToTemporalDateTime(isolate, date_time_obj, "Temporal.TimeZone.prototype.getPossibleInstantsFor"), JSArray); - DateTimeRecordCommon date_time_record = { + DateTimeRecord date_time_record = { {date_time->iso_year(), date_time->iso_month(), date_time->iso_day()}, {date_time->iso_hour(), date_time->iso_minute(), date_time->iso_second(), date_time->iso_millisecond(), date_time->iso_microsecond(), @@ -12181,29 +12149,29 @@ MaybeHandle<JSTemporalPlainDateTime> JSTemporalPlainDateTime::Constructor( namespace { // #sec-temporal-interprettemporaldatetimefields -Maybe<DateTimeRecord> InterpretTemporalDateTimeFields( +Maybe<temporal::DateTimeRecord> InterpretTemporalDateTimeFields( Isolate* isolate, Handle<JSReceiver> calendar, Handle<JSReceiver> fields, Handle<Object> options, const char* method_name) { TEMPORAL_ENTER_FUNC(); // 1. Let timeResult be ? ToTemporalTimeRecord(fields). - TimeRecordCommon time_result; + TimeRecord time_result; MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( isolate, time_result, ToTemporalTimeRecord(isolate, fields, method_name), - Nothing<DateTimeRecord>()); + Nothing<temporal::DateTimeRecord>()); // 2. Let temporalDate be ? DateFromFields(calendar, fields, options). Handle<JSTemporalPlainDate> temporal_date; ASSIGN_RETURN_ON_EXCEPTION_VALUE( isolate, temporal_date, DateFromFields(isolate, calendar, fields, options), - Nothing<DateTimeRecord>()); + Nothing<temporal::DateTimeRecord>()); // 3. Let overflow be ? ToTemporalOverflow(options). ShowOverflow overflow; MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( isolate, overflow, ToTemporalOverflow(isolate, options, method_name), - Nothing<DateTimeRecord>()); + Nothing<temporal::DateTimeRecord>()); // 4. Let timeResult be ? RegulateTime(timeResult.[[Hour]], // timeResult.[[Minute]], timeResult.[[Second]], timeResult.[[Millisecond]], @@ -12211,7 +12179,7 @@ Maybe<DateTimeRecord> InterpretTemporalDateTimeFields( MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( isolate, time_result, temporal::RegulateTime(isolate, time_result, overflow), - Nothing<DateTimeRecord>()); + Nothing<temporal::DateTimeRecord>()); // 5. Return the new Record { [[Year]]: temporalDate.[[ISOYear]], [[Month]]: // temporalDate.[[ISOMonth]], [[Day]]: temporalDate.[[ISODay]], [[Hour]]: // timeResult.[[Hour]], [[Minute]]: timeResult.[[Minute]], [[Second]]: @@ -12219,17 +12187,16 @@ Maybe<DateTimeRecord> InterpretTemporalDateTimeFields( // [[Microsecond]]: timeResult.[[Microsecond]], [[Nanosecond]]: // timeResult.[[Nanosecond]] }. - DateTimeRecord result = { + temporal::DateTimeRecord result = { {temporal_date->iso_year(), temporal_date->iso_month(), temporal_date->iso_day()}, - time_result, - Handle<String>()}; + time_result}; return Just(result); } // #sec-temporal-parsetemporaldatetimestring -Maybe<DateTimeRecord> ParseTemporalDateTimeString(Isolate* isolate, - Handle<String> iso_string) { +Maybe<DateTimeRecordWithCalendar> ParseTemporalDateTimeString( + Isolate* isolate, Handle<String> iso_string) { TEMPORAL_ENTER_FUNC(); // 1. Assert: Type(isoString) is String. // 2. If isoString does not satisfy the syntax of a TemporalDateTimeString @@ -12240,7 +12207,7 @@ Maybe<DateTimeRecord> ParseTemporalDateTimeString(Isolate* isolate, // a. Throw a *RangeError* exception. THROW_NEW_ERROR_RETURN_VALUE(isolate, NEW_TEMPORAL_INVALID_ARG_RANGE_ERROR(), - Nothing<DateTimeRecord>()); + Nothing<DateTimeRecordWithCalendar>()); } // 3. If _isoString_ contains a |UTCDesignator|, then @@ -12248,7 +12215,7 @@ Maybe<DateTimeRecord> ParseTemporalDateTimeString(Isolate* isolate, // a. Throw a *RangeError* exception. THROW_NEW_ERROR_RETURN_VALUE(isolate, NEW_TEMPORAL_INVALID_ARG_RANGE_ERROR(), - Nothing<DateTimeRecord>()); + Nothing<DateTimeRecordWithCalendar>()); } // 3. Let result be ? ParseISODateTime(isoString). @@ -12265,7 +12232,7 @@ MaybeHandle<JSTemporalPlainDateTime> ToTemporalDateTime( DCHECK(options->IsJSReceiver() || options->IsUndefined()); Handle<JSReceiver> calendar; - DateTimeRecord result; + temporal::DateTimeRecord result; // 2. If Type(item) is Object, then if (item_obj->IsJSReceiver()) { Handle<JSReceiver> item = Handle<JSReceiver>::cast(item_obj); @@ -12351,9 +12318,11 @@ MaybeHandle<JSTemporalPlainDateTime> ToTemporalDateTime( Object::ToString(isolate, item_obj), JSTemporalPlainDateTime); // c. Let result be ? ParseTemporalDateTimeString(string). + DateTimeRecordWithCalendar parsed_result; MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( - isolate, result, ParseTemporalDateTimeString(isolate, string), + isolate, parsed_result, ParseTemporalDateTimeString(isolate, string), Handle<JSTemporalPlainDateTime>()); + result = {parsed_result.date, parsed_result.time}; // d. Assert: ! IsValidISODate(result.[[Year]], result.[[Month]], // result.[[Day]]) is true. DCHECK(IsValidISODate(isolate, result.date)); @@ -12365,7 +12334,8 @@ MaybeHandle<JSTemporalPlainDateTime> ToTemporalDateTime( // be ? ToTemporalCalendarWithISODefault(result.[[Calendar]]). ASSIGN_RETURN_ON_EXCEPTION( isolate, calendar, - ToTemporalCalendarWithISODefault(isolate, result.calendar, method_name), + ToTemporalCalendarWithISODefault(isolate, parsed_result.calendar, + method_name), JSTemporalPlainDateTime); } // 4. Return ? CreateTemporalDateTime(result.[[Year]], result.[[Month]], @@ -12554,7 +12524,7 @@ MaybeHandle<JSTemporalPlainDateTime> JSTemporalPlainDateTime::With( JSTemporalPlainDateTime); // 12. Let result be ? InterpretTemporalDateTimeFields(calendar, fields, // options). - DateTimeRecord result; + temporal::DateTimeRecord result; MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( isolate, result, InterpretTemporalDateTimeFields(isolate, calendar, fields, options, @@ -12778,10 +12748,11 @@ MaybeHandle<JSTemporalPlainDateTime> JSTemporalPlainDateTime::WithPlainDate( } namespace { -MaybeHandle<String> TemporalDateTimeToString( - Isolate* isolate, const DateTimeRecordCommon& date_time, - Handle<JSReceiver> calendar, Precision precision, - ShowCalendar show_calendar) { +MaybeHandle<String> TemporalDateTimeToString(Isolate* isolate, + const DateTimeRecord& date_time, + Handle<JSReceiver> calendar, + Precision precision, + ShowCalendar show_calendar) { IncrementalStringBuilder builder(isolate); // 1. Assert: isoYear, isoMonth, isoDay, hour, minute, second, millisecond, // microsecond, and nanosecond are integers. @@ -12860,15 +12831,15 @@ namespace { constexpr double kNsPerDay = 8.64e13; -DateTimeRecordCommon RoundTime( - Isolate* isolate, const TimeRecordCommon& time, double increment, Unit unit, +DateTimeRecord RoundTime( + Isolate* isolate, const TimeRecord& time, double increment, Unit unit, RoundingMode rounding_mode, // 3.a a. If dayLengthNs is not present, set dayLengthNs to nsPerDay. double day_length_ns = kNsPerDay); // #sec-temporal-roundisodatetime -DateTimeRecordCommon RoundISODateTime( - Isolate* isolate, const DateTimeRecordCommon& date_time, double increment, +DateTimeRecord RoundISODateTime( + Isolate* isolate, const DateTimeRecord& date_time, double increment, Unit unit, RoundingMode rounding_mode, // 3. If dayLength is not present, set dayLength to nsPerDay. double day_length_ns = kNsPerDay) { @@ -12881,14 +12852,14 @@ DateTimeRecordCommon RoundISODateTime( // 4. Let roundedTime be ! RoundTime(hour, minute, second, millisecond, // microsecond, nanosecond, increment, unit, roundingMode, dayLength). - DateTimeRecordCommon rounded_time = RoundTime( - isolate, date_time.time, increment, unit, rounding_mode, day_length_ns); + DateTimeRecord rounded_time = RoundTime(isolate, date_time.time, increment, + unit, rounding_mode, day_length_ns); // 5. Let balanceResult be ! BalanceISODate(year, month, day + // roundedTime.[[Days]]). rounded_time.date.year = date_time.date.year; rounded_time.date.month = date_time.date.month; rounded_time.date.day += date_time.date.day; - DateRecordCommon balance_result = BalanceISODate(isolate, rounded_time.date); + DateRecord balance_result = BalanceISODate(isolate, rounded_time.date); // 6. Return the Record { [[Year]]: balanceResult.[[Year]], [[Month]]: // balanceResult.[[Month]], [[Day]]: balanceResult.[[Day]], [[Hour]]: @@ -12942,7 +12913,7 @@ MaybeHandle<String> JSTemporalPlainDateTime::ToString( // dateTime.[[ISOMillisecond]], dateTime.[[ISOMicrosecond]], // dateTime.[[ISONanosecond]], precision.[[Increment]], precision.[[Unit]], // roundingMode). - DateTimeRecordCommon result = RoundISODateTime( + DateTimeRecord result = RoundISODateTime( isolate, {{date_time->iso_year(), date_time->iso_month(), date_time->iso_day()}, {date_time->iso_hour(), date_time->iso_minute(), date_time->iso_second(), @@ -13073,7 +13044,7 @@ MaybeHandle<JSTemporalPlainDateTime> JSTemporalPlainDateTime::Round( // dateTime.[[ISOMinute]], dateTime.[[ISOSecond]], // dateTime.[[ISOMillisecond]], dateTime.[[ISOMicrosecond]], // dateTime.[[ISONanosecond]], roundingIncrement, smallestUnit, roundingMode). - DateTimeRecordCommon result = RoundISODateTime( + DateTimeRecord result = RoundISODateTime( isolate, {{date_time->iso_year(), date_time->iso_month(), date_time->iso_day()}, {date_time->iso_hour(), date_time->iso_minute(), date_time->iso_second(), @@ -13122,7 +13093,7 @@ AddDurationToOrSubtractDurationFromPlainDateTime( // duration.[[Weeks]], duration.[[Days]], duration.[[Hours]], // duration.[[Minutes]], duration.[[Seconds]], duration.[[Milliseconds]], // duration.[[Microseconds]], duration.[[Nanoseconds]], options). - DateTimeRecordCommon result; + DateTimeRecord result; MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( isolate, result, AddDateTime(isolate, @@ -13435,8 +13406,8 @@ MaybeHandle<JSTemporalPlainMonthDay> JSTemporalPlainMonthDay::Constructor( namespace { // #sec-temporal-parsetemporalmonthdaystring -Maybe<DateRecord> ParseTemporalMonthDayString(Isolate* isolate, - Handle<String> iso_string) { +Maybe<DateRecordWithCalendar> ParseTemporalMonthDayString( + Isolate* isolate, Handle<String> iso_string) { TEMPORAL_ENTER_FUNC(); // 1. Assert: Type(isoString) is String. @@ -13446,28 +13417,30 @@ Maybe<DateRecord> ParseTemporalMonthDayString(Isolate* isolate, TemporalParser::ParseTemporalMonthDayString(isolate, iso_string); if (!parsed.has_value()) { // a. Throw a *RangeError* exception. - THROW_NEW_ERROR_RETURN_VALUE( - isolate, NEW_TEMPORAL_INVALID_ARG_RANGE_ERROR(), Nothing<DateRecord>()); + THROW_NEW_ERROR_RETURN_VALUE(isolate, + NEW_TEMPORAL_INVALID_ARG_RANGE_ERROR(), + Nothing<DateRecordWithCalendar>()); } // 3. If isoString contains a UTCDesignator, then if (parsed->utc_designator) { // a. Throw a *RangeError* exception. - THROW_NEW_ERROR_RETURN_VALUE( - isolate, NEW_TEMPORAL_INVALID_ARG_RANGE_ERROR(), Nothing<DateRecord>()); + THROW_NEW_ERROR_RETURN_VALUE(isolate, + NEW_TEMPORAL_INVALID_ARG_RANGE_ERROR(), + Nothing<DateRecordWithCalendar>()); } // 3. Let result be ? ParseISODateTime(isoString). - DateTimeRecord result; + DateTimeRecordWithCalendar result; MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( isolate, result, ParseISODateTime(isolate, iso_string, *parsed), - Nothing<DateRecord>()); + Nothing<DateRecordWithCalendar>()); // 5. Let year be result.[[Year]]. // 6. If no part of isoString is produced by the DateYear production, then // a. Set year to undefined. // 7. Return the Record { [[Year]]: year, [[Month]]: result.[[Month]], // [[Day]]: result.[[Day]], [[Calendar]]: result.[[Calendar]] }. - DateRecord ret({result.date, result.calendar}); + DateRecordWithCalendar ret({result.date, result.calendar}); return Just(ret); } @@ -13591,7 +13564,7 @@ MaybeHandle<JSTemporalPlainMonthDay> ToTemporalMonthDay( JSTemporalPlainMonthDay); // 7. Let result be ? ParseTemporalMonthDayString(string). - DateRecord result; + DateRecordWithCalendar result; MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( isolate, result, ParseTemporalMonthDayString(isolate, string), Handle<JSTemporalPlainMonthDay>()); @@ -13926,8 +13899,8 @@ MaybeHandle<JSTemporalPlainYearMonth> JSTemporalPlainYearMonth::Constructor( namespace { // #sec-temporal-parsetemporalyearmonthstring -Maybe<DateRecord> ParseTemporalYearMonthString(Isolate* isolate, - Handle<String> iso_string) { +Maybe<DateRecordWithCalendar> ParseTemporalYearMonthString( + Isolate* isolate, Handle<String> iso_string) { TEMPORAL_ENTER_FUNC(); // 1. Assert: Type(isoString) is String. @@ -13936,28 +13909,30 @@ Maybe<DateRecord> ParseTemporalYearMonthString(Isolate* isolate, base::Optional<ParsedISO8601Result> parsed = TemporalParser::ParseTemporalYearMonthString(isolate, iso_string); if (!parsed.has_value()) { - THROW_NEW_ERROR_RETURN_VALUE( - isolate, NEW_TEMPORAL_INVALID_ARG_RANGE_ERROR(), Nothing<DateRecord>()); + THROW_NEW_ERROR_RETURN_VALUE(isolate, + NEW_TEMPORAL_INVALID_ARG_RANGE_ERROR(), + Nothing<DateRecordWithCalendar>()); } // 3. If _isoString_ contains a |UTCDesignator|, then if (parsed->utc_designator) { // a. Throw a *RangeError* exception. - THROW_NEW_ERROR_RETURN_VALUE( - isolate, NEW_TEMPORAL_INVALID_ARG_RANGE_ERROR(), Nothing<DateRecord>()); + THROW_NEW_ERROR_RETURN_VALUE(isolate, + NEW_TEMPORAL_INVALID_ARG_RANGE_ERROR(), + Nothing<DateRecordWithCalendar>()); } // 3. Let result be ? ParseISODateTime(isoString). - DateTimeRecord result; + DateTimeRecordWithCalendar result; MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( isolate, result, ParseISODateTime(isolate, iso_string, *parsed), - Nothing<DateRecord>()); + Nothing<DateRecordWithCalendar>()); // 4. Return the Record { [[Year]]: result.[[Year]], [[Month]]: // result.[[Month]], [[Day]]: result.[[Day]], [[Calendar]]: // result.[[Calendar]] }. - DateRecord ret = {{result.date.year, result.date.month, result.date.day}, - result.calendar}; + DateRecordWithCalendar ret = { + {result.date.year, result.date.month, result.date.day}, result.calendar}; return Just(ret); } @@ -14009,7 +13984,7 @@ MaybeHandle<JSTemporalPlainYearMonth> ToTemporalYearMonth( Object::ToString(isolate, item_obj), JSTemporalPlainYearMonth); // 6. Let result be ? ParseTemporalYearMonthString(string). - DateRecord result; + DateRecordWithCalendar result; MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( isolate, result, ParseTemporalYearMonthString(isolate, string), Handle<JSTemporalPlainYearMonth>()); @@ -14644,8 +14619,7 @@ MaybeHandle<JSTemporalZonedDateTime> JSTemporalPlainTime::ToZonedDateTime( namespace { // #sec-temporal-comparetemporaltime -int32_t CompareTemporalTime(const TimeRecordCommon& time1, - const TimeRecordCommon& time2) { +int32_t CompareTemporalTime(const TimeRecord& time1, const TimeRecord& time2) { TEMPORAL_ENTER_FUNC(); // 1. Assert: h1, min1, s1, ms1, mus1, ns1, h2, min2, s2, ms2, mus2, and ns2 @@ -14852,7 +14826,7 @@ MaybeHandle<JSTemporalPlainTime> JSTemporalPlainTime::Round( // temporalTime.[[ISOMillisecond]], temporalTime.[[ISOMicrosecond]], // temporalTime.[[ISONanosecond]], roundingIncrement, smallestUnit, // roundingMode). - DateTimeRecordCommon result = RoundTime( + DateTimeRecord result = RoundTime( isolate, {temporal_time->iso_hour(), temporal_time->iso_minute(), temporal_time->iso_second(), temporal_time->iso_millisecond(), @@ -14884,7 +14858,7 @@ MaybeHandle<JSTemporalPlainTime> JSTemporalPlainTime::With( MAYBE_RETURN(RejectObjectWithCalendarOrTimeZone(isolate, temporal_time_like), Handle<JSTemporalPlainTime>()); // 5. Let partialTime be ? ToPartialTime(temporalTimeLike). - TimeRecordCommon result; + TimeRecord result; MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( isolate, result, ToPartialTime( @@ -15027,7 +15001,7 @@ MaybeHandle<JSTemporalPlainTime> AddDurationToOrSubtractDurationFromPlainTime( // duration.[[Minutes]], sign x duration.[[Seconds]], sign x // duration.[[Milliseconds]], sign x duration.[[Microseconds]], sign x // duration.[[Nanoseconds]]). - DateTimeRecordCommon result = AddTime( + DateTimeRecord result = AddTime( isolate, {temporal_time->iso_hour(), temporal_time->iso_minute(), temporal_time->iso_second(), temporal_time->iso_millisecond(), @@ -15338,11 +15312,13 @@ Handle<BigInt> ApplyUnsignedRoundingMode( // 7. Let d2 be r2 – x. Handle<BigInt> dd2 = BigInt::Subtract(isolate, rr2, num).ToHandleChecked(); // 8. If d1 < d2, return r1. - if (BigInt::CompareToBigInt(dd1, dd2) == ComparisonResult::kLessThan) + if (BigInt::CompareToBigInt(dd1, dd2) == ComparisonResult::kLessThan) { return r1; + } // 9. If d2 < d1, return r2. - if (BigInt::CompareToBigInt(dd2, dd1) == ComparisonResult::kLessThan) + if (BigInt::CompareToBigInt(dd2, dd1) == ComparisonResult::kLessThan) { return r2; + } // 10. Assert: d1 is equal to d2. DCHECK_EQ(BigInt::CompareToBigInt(dd1, dd2), ComparisonResult::kEqual); // 11. If unsignedRoundingMode is half-zero, return r1. @@ -15402,54 +15378,48 @@ double RoundNumberToIncrement(Isolate* isolate, double x, double increment, return rounded * increment; } -// For the case that x and return are BigInt. -Handle<BigInt> RoundNumberToIncrement(Isolate* isolate, Handle<BigInt> x, - double increment, - RoundingMode rounding_mode) { +// #sec-temporal-roundnumbertoincrementasifpositive +Handle<BigInt> RoundNumberToIncrementAsIfPositive(Isolate* isolate, + Handle<BigInt> x, + double increment, + RoundingMode rounding_mode) { TEMPORAL_ENTER_FUNC(); // 1. Let quotient be x / increment. - bool is_negative; - // 2. If quotient < 0, then - if (x->IsNegative() != (increment < 0)) { - // a. Let isNegative be true. - is_negative = true; - // b. Set quotient to -quotient. - x = BigInt::UnaryMinus(isolate, x); - // 3. Else, - } else { - // a. Let isNegative be false. - is_negative = false; - } - // 4. Let unsignedRoundingMode be GetUnsignedRoundingMode(roundingMode, - // isNegative). + // 2. Let unsignedRoundingMode be GetUnsignedRoundingMode(roundingMode, + // false). UnsignedRoundingMode unsigned_rounding_mode = - GetUnsignedRoundingMode(rounding_mode, is_negative); + GetUnsignedRoundingMode(rounding_mode, false); - // 5. Let r1 be the largest integer such that r1 ≤ quotient. Handle<BigInt> increment_bigint = BigInt::FromNumber(isolate, isolate->factory()->NewNumber(increment)) .ToHandleChecked(); + // 3. Let r1 be the largest integer such that r1 ≤ quotient. Handle<BigInt> r1 = BigInt::Divide(isolate, x, increment_bigint).ToHandleChecked(); - // 6. Let r2 be the smallest integer such that r2 > quotient. + + // Adjust for negative quotient. + if (r1->IsNegative() && BigInt::Remainder(isolate, x, increment_bigint) + .ToHandleChecked() + ->ToBoolean()) { + r1 = BigInt::Decrement(isolate, r1).ToHandleChecked(); + } + + // 4. Let r2 be the smallest integer such that r2 > quotient. Handle<BigInt> r2 = BigInt::Increment(isolate, r1).ToHandleChecked(); - // 7. Let rounded be ApplyUnsignedRoundingMode(quotient, r1, r2, + // 5. Let rounded be ApplyUnsignedRoundingMode(quotient, r1, r2, // unsignedRoundingMode). Handle<BigInt> rounded = ApplyUnsignedRoundingMode( isolate, x, increment_bigint, r1, r2, unsigned_rounding_mode); - // 8. If isNegative is true, set rounded to -rounded. - if (is_negative) { - rounded = BigInt::UnaryMinus(isolate, rounded); - } - // 9. Return rounded × increment. - return BigInt::Multiply(isolate, rounded, increment_bigint).ToHandleChecked(); + // 6. Return rounded × increment. + Handle<BigInt> result = + BigInt::Multiply(isolate, rounded, increment_bigint).ToHandleChecked(); + return result; } -DateTimeRecordCommon RoundTime(Isolate* isolate, const TimeRecordCommon& time, - double increment, Unit unit, - RoundingMode rounding_mode, - double day_length_ns) { +DateTimeRecord RoundTime(Isolate* isolate, const TimeRecord& time, + double increment, Unit unit, + RoundingMode rounding_mode, double day_length_ns) { TEMPORAL_ENTER_FUNC(); // 1. Assert: hour, minute, second, millisecond, microsecond, nanosecond, and @@ -15760,7 +15730,7 @@ MaybeHandle<String> JSTemporalPlainTime::ToString( // temporalTime.[[ISONanosecond]], precision.[[Increment]], // precision.[[Unit]], roundingMode). - DateTimeRecordCommon round_result = RoundTime( + DateTimeRecord round_result = RoundTime( isolate, {temporal_time->iso_hour(), temporal_time->iso_minute(), temporal_time->iso_second(), temporal_time->iso_millisecond(), @@ -15821,7 +15791,7 @@ MaybeHandle<JSTemporalZonedDateTime> JSTemporalZonedDateTime::Constructor( } // #sec-get-temporal.zoneddatetime.prototype.hoursinday -MaybeHandle<Smi> JSTemporalZonedDateTime::HoursInDay( +MaybeHandle<Object> JSTemporalZonedDateTime::HoursInDay( Isolate* isolate, Handle<JSTemporalZonedDateTime> zoned_date_time) { TEMPORAL_ENTER_FUNC(); const char* method_name = "Temporal.ZonedDateTime.prototype.hoursInDay"; @@ -15864,7 +15834,7 @@ MaybeHandle<Smi> JSTemporalZonedDateTime::HoursInDay( iso_calendar), Smi); // 11. Let tomorrowFields be BalanceISODate(year, month, day + 1). - DateRecordCommon tomorrow_fields = BalanceISODate( + DateRecord tomorrow_fields = BalanceISODate( isolate, {temporal_date_time->iso_year(), temporal_date_time->iso_month(), temporal_date_time->iso_day() + 1}); @@ -15895,15 +15865,23 @@ MaybeHandle<Smi> JSTemporalZonedDateTime::HoursInDay( Smi); // 15. Let diffNs be tomorrowInstant.[[Nanoseconds]] − // todayInstant.[[Nanoseconds]]. - // 16. Return 𝔽(diffNs / (3.6 × 10^12)). - int64_t diff_ns = + Handle<BigInt> diff_ns = BigInt::Subtract(isolate, handle(tomorrow_instant->nanoseconds(), isolate), handle(today_instant->nanoseconds(), isolate)) + .ToHandleChecked(); + // 16. Return 𝔽(diffNs / (3.6 × 10^12)). + // + // Note: The result of the division may be non integer for TimeZone which + // change fractional hours. Perform this division in two steps: + // First convert it to seconds in BigInt, then perform floating point + // division (seconds / 3600) to convert to hours. + int64_t diff_seconds = + BigInt::Divide(isolate, diff_ns, BigInt::FromUint64(isolate, 1000000000)) .ToHandleChecked() ->AsInt64(); - return handle(Smi::FromInt(static_cast<int32_t>(diff_ns / 3600000000000LL)), - isolate); + double hours_in_that_day = static_cast<double>(diff_seconds) / 3600.0; + return isolate->factory()->NewNumber(hours_in_that_day); } namespace { @@ -15926,7 +15904,7 @@ MaybeHandle<JSTemporalZonedDateTime> ToTemporalZonedDateTime( Handle<JSReceiver> time_zone; Handle<JSReceiver> calendar; - ZonedDateTimeRecord result; + temporal::DateTimeRecord result; // 5. If Type(item) is Object, then if (item_obj->IsJSReceiver()) { @@ -16001,7 +15979,7 @@ MaybeHandle<JSTemporalZonedDateTime> ToTemporalZonedDateTime( // l. Let result be ? InterpretTemporalDateTimeFields(calendar, fields, // options). MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( - isolate, result.date_time, + isolate, result, InterpretTemporalDateTimeFields(isolate, calendar, fields, options, method_name), Handle<JSTemporalZonedDateTime>()); @@ -16017,13 +15995,18 @@ MaybeHandle<JSTemporalZonedDateTime> ToTemporalZonedDateTime( Object::ToString(isolate, item_obj), JSTemporalZonedDateTime); // c. Let result be ? ParseTemporalZonedDateTimeString(string). + DateTimeRecordWithCalendar parsed_result; MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( - isolate, result, ParseTemporalZonedDateTimeString(isolate, string), + isolate, parsed_result, + ParseTemporalZonedDateTimeString(isolate, string), Handle<JSTemporalZonedDateTime>()); + result = {parsed_result.date, parsed_result.time}; + // d. Let timeZoneName be result.[[TimeZone]].[[Name]]. // e. Assert: timeZoneName is not undefined. - DCHECK(!result.time_zone.name->IsUndefined()); - Handle<String> time_zone_name = Handle<String>::cast(result.time_zone.name); + DCHECK(!parsed_result.time_zone.name->IsUndefined()); + Handle<String> time_zone_name = + Handle<String>::cast(parsed_result.time_zone.name); // f. If ParseText(StringToCodePoints(timeZoneName), // TimeZoneNumericUTCOffset) is a List of errors, then @@ -16040,11 +16023,11 @@ MaybeHandle<JSTemporalZonedDateTime> ToTemporalZonedDateTime( // ii. Set timeZoneName to ! CanonicalizeTimeZoneName(timeZoneName). time_zone_name = CanonicalizeTimeZoneName(isolate, time_zone_name); } - // g. Let offsetString be result.[[TimeZoneOffsetString]]. - offset_string = result.time_zone.offset_string; + // g. Let offsetString be result.[[TimeZone]].[[OffsetString]]. + offset_string = parsed_result.time_zone.offset_string; - // h. If result.[[TimeZoneZ]] is true, then - if (result.time_zone.z) { + // h. If result.[[TimeZone]].[[Z]] is true, then + if (parsed_result.time_zone.z) { // i. Set offsetBehaviour to exact. offset_behaviour = OffsetBehaviour::kExact; // i. Else if offsetString is undefined, then @@ -16059,7 +16042,7 @@ MaybeHandle<JSTemporalZonedDateTime> ToTemporalZonedDateTime( // ToTemporalCalendarWithISODefault(result.[[Calendar]]). ASSIGN_RETURN_ON_EXCEPTION( isolate, calendar, - ToTemporalCalendarWithISODefault(isolate, result.date_time.calendar, + ToTemporalCalendarWithISODefault(isolate, parsed_result.calendar, method_name), JSTemporalZonedDateTime); // j. Set matchBehaviour to match minutes. @@ -16101,10 +16084,9 @@ MaybeHandle<JSTemporalZonedDateTime> ToTemporalZonedDateTime( Handle<BigInt> epoch_nanoseconds; ASSIGN_RETURN_ON_EXCEPTION( isolate, epoch_nanoseconds, - InterpretISODateTimeOffset( - isolate, {result.date_time.date, result.date_time.time}, - offset_behaviour, offset_nanoseconds, time_zone, disambiguation, - offset, match_behaviour, method_name), + InterpretISODateTimeOffset(isolate, result, offset_behaviour, + offset_nanoseconds, time_zone, disambiguation, + offset, match_behaviour, method_name), JSTemporalZonedDateTime); // 8. Return ? CreateTemporalZonedDateTime(epochNanoseconds, timeZone, @@ -16263,7 +16245,7 @@ namespace { // #sec-temporal-interpretisodatetimeoffset MaybeHandle<BigInt> InterpretISODateTimeOffset( - Isolate* isolate, const DateTimeRecordCommon& data, + Isolate* isolate, const DateTimeRecord& data, OffsetBehaviour offset_behaviour, int64_t offset_nanoseconds, Handle<JSReceiver> time_zone, Disambiguation disambiguation, Offset offset_option, MatchBehaviour match_behaviour, @@ -16489,7 +16471,7 @@ MaybeHandle<JSTemporalZonedDateTime> JSTemporalZonedDateTime::With( // 19. Let dateTimeResult be ? InterpretTemporalDateTimeFields(calendar, // fields, options). - DateTimeRecord date_time_result; + temporal::DateTimeRecord date_time_result; MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE( isolate, date_time_result, InterpretTemporalDateTimeFields(isolate, calendar, fields, options, @@ -17147,7 +17129,7 @@ MaybeHandle<JSTemporalZonedDateTime> JSTemporalZonedDateTime::Round( // temporalDateTime.[[ISOSecond]], temporalDateTime.[[ISOMillisecond]], // temporalDateTime.[[ISOMicrosecond]], temporalDateTime.[[ISONanosecond]], // roundingIncrement, smallestUnit, roundingMode, dayLengthNs). - DateTimeRecordCommon round_result = RoundISODateTime( + DateTimeRecord round_result = RoundISODateTime( isolate, {{temporal_date_time->iso_year(), temporal_date_time->iso_month(), temporal_date_time->iso_day()}, @@ -17271,11 +17253,6 @@ MaybeHandle<JSTemporalZonedDateTime> JSTemporalZonedDateTime::Subtract( namespace { -Handle<BigInt> DifferenceInstant(Isolate* isolate, Handle<BigInt> ns1, - Handle<BigInt> ns2, double rounding_increment, - Unit smallest_unit, - RoundingMode rounding_mode); - // #sec-temporal-differencetemporalzoneddatetime MaybeHandle<JSTemporalDuration> DifferenceTemporalZonedDateTime( Isolate* isolate, TimePreposition operation, @@ -17318,21 +17295,15 @@ MaybeHandle<JSTemporalDuration> DifferenceTemporalZonedDateTime( settings.largest_unit != Unit::kMonth && settings.largest_unit != Unit::kWeek && settings.largest_unit != Unit::kDay) { - // a. Let differenceNs be ! DifferenceInstant(zonedDateTime.[[Nanoseconds]], + // 1. Let result be ! DifferenceInstant(zonedDateTime.[[Nanoseconds]], // other.[[Nanoseconds]], settings.[[RoundingIncrement]], - // settings.[[SmallestUnit]], settings.[[RoundingMode]]). - Handle<BigInt> difference_ns = DifferenceInstant( + // settings.[[SmallestUnit]], settings.[[LargestUnit]], + // settings.[[RoundingMode]]). + TimeDurationRecord balance_result = DifferenceInstant( isolate, handle(zoned_date_time->nanoseconds(), isolate), handle(other->nanoseconds(), isolate), settings.rounding_increment, - settings.smallest_unit, settings.rounding_mode); - // b. Assert: The following steps cannot fail due to overflow in the Number - // domain because abs(differenceNs) ≤ 2 × nsMaxInstant. c. Let balanceResult - // be ! BalanceDuration(0, 0, 0, 0, 0, 0, differenceNs, - // settings.[[LargestUnit]]). - TimeDurationRecord balance_result = - BalanceDuration(isolate, settings.largest_unit, difference_ns, - method_name) - .ToChecked(); + settings.smallest_unit, settings.largest_unit, settings.rounding_mode, + method_name); // d. Return ! CreateTemporalDuration(0, 0, 0, 0, sign × // balanceResult.[[Hours]], sign × balanceResult.[[Minutes]], sign × // balanceResult.[[Seconds]], sign × balanceResult.[[Milliseconds]], sign × @@ -17985,8 +17956,10 @@ Handle<BigInt> RoundTemporalInstant(Isolate* isolate, Handle<BigInt> ns, default: UNREACHABLE(); } - // 8. Return ! RoundNumberToIncrement(ℝ(ns), incrementNs, roundingMode). - return RoundNumberToIncrement(isolate, ns, increment_ns, rounding_mode); + // 8. Return ! RoundNumberToIncrementAsIfPositive(ℝ(ns), incrementNs, + // roundingMode). + return RoundNumberToIncrementAsIfPositive(isolate, ns, increment_ns, + rounding_mode); } } // namespace @@ -18429,7 +18402,13 @@ RoundingMode NegateTemporalRoundingMode(RoundingMode rounding_mode) { // 2. If roundingMode is "floor", return "ceil". case RoundingMode::kFloor: return RoundingMode::kCeil; - // 3. Return roundingMode. + // 3. If roundingMode is "halfCeil", return "halfFloor". + case RoundingMode::kHalfCeil: + return RoundingMode::kHalfFloor; + // 4. If roundingMode is "halfFloor", return "halfCeil". + case RoundingMode::kHalfFloor: + return RoundingMode::kHalfCeil; + // 5. Return roundingMode. default: return rounding_mode; } @@ -18546,17 +18525,50 @@ Maybe<DifferenceSettings> GetDifferenceSettings( } // #sec-temporal-differenceinstant -Handle<BigInt> DifferenceInstant(Isolate* isolate, Handle<BigInt> ns1, - Handle<BigInt> ns2, double rounding_increment, - Unit smallest_unit, - RoundingMode rounding_mode) { +TimeDurationRecord DifferenceInstant(Isolate* isolate, Handle<BigInt> ns1, + Handle<BigInt> ns2, + double rounding_increment, + Unit smallest_unit, Unit largest_unit, + RoundingMode rounding_mode, + const char* method_name) { // 1. Assert: Type(ns1) is BigInt. // 2. Assert: Type(ns2) is BigInt. - // 3. Return ! RoundTemporalInstant(ns2 - ns1, roundingIncrement, - // smallestUnit, roundingMode). - return RoundTemporalInstant( - isolate, BigInt::Subtract(isolate, ns2, ns1).ToHandleChecked(), - rounding_increment, smallest_unit, rounding_mode); + // 3. Assert: The following step cannot fail due to overflow in the Number + // domain because abs(ns2 - ns1) <= 2 x nsMaxInstant. + + // 4. Let roundResult be ! RoundDuration(0, 0, 0, 0, 0, 0, 0, 0, 0, ns2 - ns1, + // roundingIncrement, smallestUnit, roundingMode).[[DurationRecord]]. + Handle<BigInt> diff = BigInt::Subtract(isolate, ns2, ns1).ToHandleChecked(); + // Note: Since diff could be very big and over the precision of double can + // hold, break diff into diff_hours and diff_nanoseconds before pass into + // RoundDuration. + Handle<BigInt> nanoseconds_in_a_hour = + BigInt::FromUint64(isolate, 3600000000000); + double diff_hours = + BigInt::ToNumber(isolate, + BigInt::Divide(isolate, diff, nanoseconds_in_a_hour) + .ToHandleChecked()) + ->Number(); + double diff_nanoseconds = + BigInt::ToNumber(isolate, + BigInt::Remainder(isolate, diff, nanoseconds_in_a_hour) + .ToHandleChecked()) + ->Number(); + DurationRecordWithRemainder round_record = + RoundDuration( + isolate, {0, 0, 0, {0, diff_hours, 0, 0, 0, 0, diff_nanoseconds}}, + rounding_increment, smallest_unit, rounding_mode, method_name) + .ToChecked(); + // 5. Assert: roundResult.[[Days]] is 0. + DCHECK_EQ(0, round_record.record.time_duration.days); + // 6. Return ! BalanceDuration(0, roundResult.[[Hours]], + // roundResult.[[Minutes]], roundResult.[[Seconds]], + // roundResult.[[Milliseconds]], roundResult.[[Microseconds]], + // roundResult.[[Nanoseconds]], largestUnit). + return BalanceDuration(isolate, largest_unit, + isolate->factory()->undefined_value(), + round_record.record.time_duration, method_name) + .ToChecked(); } // #sec-temporal-differencetemporalinstant @@ -18581,22 +18593,16 @@ MaybeHandle<JSTemporalDuration> DifferenceTemporalInstant( DisallowedUnitsInDifferenceSettings::kNone, Unit::kNanosecond, Unit::kSecond, method_name), Handle<JSTemporalDuration>()); - // 4. Let roundedNs be ! DifferenceInstant(instant.[[Nanoseconds]], + // 4. Let result be ! DifferenceInstant(instant.[[Nanoseconds]], // other.[[Nanoseconds]], settings.[[RoundingIncrement]], - // settings.[[SmallestUnit]], settings.[[RoundingMode]]). - Handle<BigInt> rounded_ns = DifferenceInstant( + // settings.[[SmallestUnit]], settings.[[LargestUnit]], + // settings.[[RoundingMode]]). + TimeDurationRecord result = DifferenceInstant( isolate, handle(instant->nanoseconds(), isolate), handle(other->nanoseconds(), isolate), settings.rounding_increment, - settings.smallest_unit, settings.rounding_mode); - // 5. Assert: The following steps cannot fail due to overflow in the Number - // domain because abs(roundedNs) ≤ 2 × nsMaxInstant. - // 6. Let result be ! BalanceDuration(0, 0, 0, 0, 0, 0, roundedNs, - // settings.[[LargestUnit]]). - TimeDurationRecord result = - BalanceDuration(isolate, settings.largest_unit, rounded_ns, method_name) - .ToChecked(); - - // 7. Return ! CreateTemporalDuration(0, 0, 0, 0, sign × result.[[Hours]], + settings.smallest_unit, settings.largest_unit, settings.rounding_mode, + method_name); + // 5. Return ! CreateTemporalDuration(0, 0, 0, 0, sign × result.[[Hours]], // sign × result.[[Minutes]], sign × result.[[Seconds]], sign × // result.[[Milliseconds]], sign × result.[[Microseconds]], sign × // result.[[Nanoseconds]]). diff --git a/deps/v8/src/objects/js-temporal-objects.h b/deps/v8/src/objects/js-temporal-objects.h index cebe2b2ffa1ac3..c387433e43c6d4 100644 --- a/deps/v8/src/objects/js-temporal-objects.h +++ b/deps/v8/src/objects/js-temporal-objects.h @@ -950,7 +950,7 @@ class JSTemporalZonedDateTime Handle<Object> time_zone_like); // #sec-get-temporal.zoneddatetime.prototype.hoursinday - V8_WARN_UNUSED_RESULT static MaybeHandle<Smi> HoursInDay( + V8_WARN_UNUSED_RESULT static MaybeHandle<Object> HoursInDay( Isolate* isolate, Handle<JSTemporalZonedDateTime> zoned_date_time); // #sec-temporal.zoneddatetime.prototype.round @@ -1051,13 +1051,13 @@ class JSTemporalZonedDateTime namespace temporal { -struct DateRecordCommon { +struct DateRecord { int32_t year; int32_t month; int32_t day; }; -struct TimeRecordCommon { +struct TimeRecord { int32_t hour; int32_t minute; int32_t second; @@ -1066,14 +1066,14 @@ struct TimeRecordCommon { int32_t nanosecond; }; -struct DateTimeRecordCommon { - DateRecordCommon date; - TimeRecordCommon time; +struct DateTimeRecord { + DateRecord date; + TimeRecord time; }; // #sec-temporal-createtemporaldatetime V8_WARN_UNUSED_RESULT MaybeHandle<JSTemporalPlainDateTime> -CreateTemporalDateTime(Isolate* isolate, const DateTimeRecordCommon& date_time, +CreateTemporalDateTime(Isolate* isolate, const DateTimeRecord& date_time, Handle<JSReceiver> calendar); // #sec-temporal-createtemporaltimezone @@ -1146,6 +1146,49 @@ MaybeHandle<JSTemporalInstant> BuiltinTimeZoneGetInstantForCompatible( Isolate* isolate, Handle<JSReceiver> time_zone, Handle<JSTemporalPlainDateTime> date_time, const char* method_name); +// For Intl.DurationFormat + +// #sec-temporal-time-duration-records +struct TimeDurationRecord { + double days; + double hours; + double minutes; + double seconds; + double milliseconds; + double microseconds; + double nanoseconds; + + // #sec-temporal-createtimedurationrecord + static Maybe<TimeDurationRecord> Create(Isolate* isolate, double days, + double hours, double minutes, + double seconds, double milliseconds, + double microseconds, + double nanoseconds); +}; + +// #sec-temporal-duration-records +// Cannot reuse DateDurationRecord here due to duplicate days. +struct DurationRecord { + double years; + double months; + double weeks; + TimeDurationRecord time_duration; + // #sec-temporal-createdurationrecord + static Maybe<DurationRecord> Create(Isolate* isolate, double years, + double months, double weeks, double days, + double hours, double minutes, + double seconds, double milliseconds, + double microseconds, double nanoseconds); +}; + +// #sec-temporal-topartialduration +Maybe<DurationRecord> ToPartialDuration( + Isolate* isolate, Handle<Object> temporal_duration_like_obj, + const DurationRecord& input); + +// #sec-temporal-isvalidduration +bool IsValidDuration(Isolate* isolate, const DurationRecord& dur); + } // namespace temporal } // namespace internal } // namespace v8 diff --git a/deps/v8/src/objects/js-weak-refs-inl.h b/deps/v8/src/objects/js-weak-refs-inl.h index 76e6e075e5ded6..0385d59efb7e56 100644 --- a/deps/v8/src/objects/js-weak-refs-inl.h +++ b/deps/v8/src/objects/js-weak-refs-inl.h @@ -5,10 +5,9 @@ #ifndef V8_OBJECTS_JS_WEAK_REFS_INL_H_ #define V8_OBJECTS_JS_WEAK_REFS_INL_H_ -#include "src/objects/js-weak-refs.h" - #include "src/api/api-inl.h" #include "src/heap/heap-write-barrier-inl.h" +#include "src/objects/js-weak-refs.h" #include "src/objects/smi-inl.h" // Has to be the last include (doesn't have include guards): @@ -55,7 +54,7 @@ void JSFinalizationRegistry::RegisterWeakCellWithUnregisterToken( bool JSFinalizationRegistry::Unregister( Handle<JSFinalizationRegistry> finalization_registry, - Handle<JSReceiver> unregister_token, Isolate* isolate) { + Handle<HeapObject> unregister_token, Isolate* isolate) { // Iterate through the doubly linked list of WeakCells associated with the // key. Each WeakCell will be in the "active_cells" or "cleared_cells" list of // its FinalizationRegistry; remove it from there. @@ -66,7 +65,7 @@ bool JSFinalizationRegistry::Unregister( template <typename GCNotifyUpdatedSlotCallback> bool JSFinalizationRegistry::RemoveUnregisterToken( - JSReceiver unregister_token, Isolate* isolate, + HeapObject unregister_token, Isolate* isolate, RemoveUnregisterTokenMode removal_mode, GCNotifyUpdatedSlotCallback gc_notify_updated_slot) { // This method is called from both FinalizationRegistry#unregister and for @@ -171,7 +170,7 @@ void WeakCell::Nullify(Isolate* isolate, // only called for WeakCells which haven't been unregistered yet, so they will // be in the active_cells list. (The caller must guard against calling this // for unregistered WeakCells by checking that the target is not undefined.) - DCHECK(target().IsJSReceiver()); + DCHECK(target().CanBeHeldWeakly()); set_target(ReadOnlyRoots(isolate).undefined_value()); JSFinalizationRegistry fr = @@ -217,7 +216,7 @@ void WeakCell::RemoveFromFinalizationRegistryCells(Isolate* isolate) { // It's important to set_target to undefined here. This guards that we won't // call Nullify (which assumes that the WeakCell is in active_cells). - DCHECK(target().IsUndefined() || target().IsJSReceiver()); + DCHECK(target().IsUndefined() || target().CanBeHeldWeakly()); set_target(ReadOnlyRoots(isolate).undefined_value()); JSFinalizationRegistry fr = diff --git a/deps/v8/src/objects/js-weak-refs.h b/deps/v8/src/objects/js-weak-refs.h index f678234ff81afc..64ff9573f6b2df 100644 --- a/deps/v8/src/objects/js-weak-refs.h +++ b/deps/v8/src/objects/js-weak-refs.h @@ -37,7 +37,7 @@ class JSFinalizationRegistry Handle<WeakCell> weak_cell, Isolate* isolate); inline static bool Unregister( Handle<JSFinalizationRegistry> finalization_registry, - Handle<JSReceiver> unregister_token, Isolate* isolate); + Handle<HeapObject> unregister_token, Isolate* isolate); // RemoveUnregisterToken is called from both Unregister and during GC. Since // it modifies slots in key_map and WeakCells and the normal write barrier is @@ -49,7 +49,7 @@ class JSFinalizationRegistry }; template <typename GCNotifyUpdatedSlotCallback> inline bool RemoveUnregisterToken( - JSReceiver unregister_token, Isolate* isolate, + HeapObject unregister_token, Isolate* isolate, RemoveUnregisterTokenMode removal_mode, GCNotifyUpdatedSlotCallback gc_notify_updated_slot); diff --git a/deps/v8/src/objects/js-weak-refs.tq b/deps/v8/src/objects/js-weak-refs.tq index 760cacf45cbe51..a2fb7193d02bdd 100644 --- a/deps/v8/src/objects/js-weak-refs.tq +++ b/deps/v8/src/objects/js-weak-refs.tq @@ -20,8 +20,8 @@ extern class JSFinalizationRegistry extends JSObject { extern class WeakCell extends HeapObject { finalization_registry: Undefined|JSFinalizationRegistry; - target: Undefined|JSReceiver; - unregister_token: Undefined|JSReceiver; + target: Undefined|JSReceiver|Symbol; + unregister_token: Undefined|JSReceiver|Symbol; holdings: JSAny; // For storing doubly linked lists of WeakCells in JSFinalizationRegistry's @@ -40,5 +40,5 @@ extern class WeakCell extends HeapObject { } extern class JSWeakRef extends JSObject { - target: Undefined|JSReceiver; + target: Undefined|JSReceiver|Symbol; } diff --git a/deps/v8/src/objects/keys.cc b/deps/v8/src/objects/keys.cc index a7d7f27ed92ea9..ac21fbf9c3d2d2 100644 --- a/deps/v8/src/objects/keys.cc +++ b/deps/v8/src/objects/keys.cc @@ -253,6 +253,14 @@ Maybe<bool> KeyAccumulator::CollectKeys(Handle<JSReceiver> receiver, Maybe<bool> result = Just(false); // Dummy initialization. if (current->IsJSProxy()) { result = CollectOwnJSProxyKeys(receiver, Handle<JSProxy>::cast(current)); + } else if (current->IsWasmObject()) { + if (mode_ == KeyCollectionMode::kIncludePrototypes) { + RETURN_FAILURE(isolate_, kThrowOnError, + NewTypeError(MessageTemplate::kWasmObjectsAreOpaque)); + } else { + DCHECK_EQ(KeyCollectionMode::kOwnOnly, mode_); + DCHECK_EQ(result, Just(false)); // Stop iterating. + } } else { DCHECK(current->IsJSObject()); result = CollectOwnKeys(receiver, Handle<JSObject>::cast(current)); diff --git a/deps/v8/src/objects/lookup.cc b/deps/v8/src/objects/lookup.cc index 3fa41c0f57c6e3..e08ebaff089f8a 100644 --- a/deps/v8/src/objects/lookup.cc +++ b/deps/v8/src/objects/lookup.cc @@ -22,10 +22,6 @@ #include "src/objects/property-details.h" #include "src/objects/struct-inl.h" -#if V8_ENABLE_WEBASSEMBLY -#include "src/wasm/wasm-objects-inl.h" -#endif // V8_ENABLE_WEBASSEMBLY - namespace v8 { namespace internal { @@ -334,9 +330,7 @@ void LookupIterator::InternalUpdateProtector(Isolate* isolate, void LookupIterator::PrepareForDataProperty(Handle<Object> value) { DCHECK(state_ == DATA || state_ == ACCESSOR); DCHECK(HolderIsReceiverOrHiddenPrototype()); -#if V8_ENABLE_WEBASSEMBLY DCHECK(!receiver_->IsWasmObject(isolate_)); -#endif // V8_ENABLE_WEBASSEMBLY Handle<JSReceiver> holder = GetHolder<JSReceiver>(); // We are not interested in tracking constness of a JSProxy's direct @@ -464,9 +458,7 @@ void LookupIterator::ReconfigureDataProperty(Handle<Object> value, DCHECK(HolderIsReceiverOrHiddenPrototype()); Handle<JSReceiver> holder = GetHolder<JSReceiver>(); -#if V8_ENABLE_WEBASSEMBLY if (V8_UNLIKELY(holder->IsWasmObject())) UNREACHABLE(); -#endif // V8_ENABLE_WEBASSEMBLY // Property details can never change for private properties. if (holder->IsJSProxy(isolate_)) { @@ -874,18 +866,8 @@ bool LookupIterator::HolderIsReceiverOrHiddenPrototype() const { Handle<Object> LookupIterator::FetchValue( AllocationPolicy allocation_policy) const { Object result; + DCHECK(!holder_->IsWasmObject()); if (IsElement(*holder_)) { -#if V8_ENABLE_WEBASSEMBLY - if (V8_UNLIKELY(holder_->IsWasmObject(isolate_))) { - if (holder_->IsWasmStruct()) { - // WasmStructs don't have elements. - return isolate_->factory()->undefined_value(); - } - Handle<WasmArray> holder = GetHolder<WasmArray>(); - return WasmArray::GetElement(isolate_, holder, number_.as_uint32()); - } -#endif // V8_ENABLE_WEBASSEMBLY - DCHECK(holder_->IsJSObject(isolate_)); Handle<JSObject> holder = GetHolder<JSObject>(); ElementsAccessor* accessor = holder->GetElementsAccessor(isolate_); return accessor->Get(isolate_, holder, number_); @@ -903,27 +885,6 @@ Handle<Object> LookupIterator::FetchValue( } } else if (property_details_.location() == PropertyLocation::kField) { DCHECK_EQ(PropertyKind::kData, property_details_.kind()); -#if V8_ENABLE_WEBASSEMBLY - if (V8_UNLIKELY(holder_->IsWasmObject(isolate_))) { - if (allocation_policy == AllocationPolicy::kAllocationDisallowed) { - // TODO(ishell): consider taking field type into account and relaxing - // this a bit. - return isolate_->factory()->undefined_value(); - } - if (holder_->IsWasmArray(isolate_)) { - // WasmArrays don't have other named properties besides "length". - DCHECK_EQ(*name_, ReadOnlyRoots(isolate_).length_string()); - Handle<WasmArray> holder = GetHolder<WasmArray>(); - uint32_t length = holder->length(); - return isolate_->factory()->NewNumberFromUint(length); - } - Handle<WasmStruct> holder = GetHolder<WasmStruct>(); - return WasmStruct::GetField(isolate_, holder, - property_details_.field_index()); - } -#endif // V8_ENABLE_WEBASSEMBLY - - DCHECK(holder_->IsJSObject(isolate_)); Handle<JSObject> holder = GetHolder<JSObject>(); FieldIndex field_index = FieldIndex::ForDescriptor(holder->map(isolate_), descriptor_number()); @@ -1081,11 +1042,9 @@ Handle<Object> LookupIterator::GetDataValue(SeqCstAccessTag tag) const { void LookupIterator::WriteDataValue(Handle<Object> value, bool initializing_store) { DCHECK_EQ(DATA, state_); -#if V8_ENABLE_WEBASSEMBLY // WriteDataValueToWasmObject() must be used instead for writing to // WasmObjects. DCHECK(!holder_->IsWasmObject(isolate_)); -#endif // V8_ENABLE_WEBASSEMBLY DCHECK_IMPLIES(holder_->IsJSSharedStruct(), value->IsShared()); Handle<JSReceiver> holder = GetHolder<JSReceiver>(); @@ -1179,42 +1138,6 @@ Handle<Object> LookupIterator::SwapDataValue(Handle<Object> value, return accessor->SwapAtomic(isolate_, holder, number_, *value, kSeqCstAccess); } -#if V8_ENABLE_WEBASSEMBLY - -wasm::ValueType LookupIterator::wasm_value_type() const { - DCHECK(has_property_); - DCHECK(holder_->IsWasmObject(isolate_)); - if (holder_->IsWasmStruct(isolate_)) { - wasm::StructType* wasm_struct_type = WasmStruct::cast(*holder_).type(); - return wasm_struct_type->field(property_details_.field_index()); - - } else { - DCHECK(holder_->IsWasmArray(isolate_)); - wasm::ArrayType* wasm_array_type = WasmArray::cast(*holder_).type(); - return wasm_array_type->element_type(); - } -} - -void LookupIterator::WriteDataValueToWasmObject(Handle<Object> value) { - DCHECK_EQ(DATA, state_); - DCHECK(holder_->IsWasmObject(isolate_)); - Handle<JSReceiver> holder = GetHolder<JSReceiver>(); - - if (IsElement(*holder)) { - // TODO(ishell): consider supporting indexed access to WasmStruct fields. - // TODO(v8:11804): implement stores to WasmArrays. - UNIMPLEMENTED(); - } else { - // WasmArrays don't have writable properties. - DCHECK(holder->IsWasmStruct()); - Handle<WasmStruct> wasm_holder = GetHolder<WasmStruct>(); - WasmStruct::SetField(isolate_, wasm_holder, property_details_.field_index(), - value); - } -} - -#endif // V8_ENABLE_WEBASSEMBLY - template <bool is_element> bool LookupIterator::SkipInterceptor(JSObject holder) { InterceptorInfo info = GetInterceptor<is_element>(holder); @@ -1284,9 +1207,7 @@ LookupIterator::State LookupIterator::LookupInSpecialHolder( if (is_element || !name_->IsPrivate(isolate_)) return JSPROXY; } #if V8_ENABLE_WEBASSEMBLY - if (map.IsWasmObjectMap()) { - return LookupInRegularHolder<is_element>(map, holder); - } + if (map.IsWasmObjectMap()) return WASM_OBJECT; #endif // V8_ENABLE_WEBASSEMBLY if (map.is_access_check_needed()) { if (is_element || !name_->IsPrivate(isolate_) || @@ -1325,6 +1246,7 @@ LookupIterator::State LookupIterator::LookupInSpecialHolder( return NOT_FOUND; case INTEGER_INDEXED_EXOTIC: case JSPROXY: + case WASM_OBJECT: case TRANSITION: UNREACHABLE(); } @@ -1338,43 +1260,22 @@ LookupIterator::State LookupIterator::LookupInRegularHolder( if (interceptor_state_ == InterceptorState::kProcessNonMasking) { return NOT_FOUND; } - + DCHECK(!holder.IsWasmObject(isolate_)); if (is_element && IsElement(holder)) { -#if V8_ENABLE_WEBASSEMBLY - if (V8_UNLIKELY(holder.IsWasmObject(isolate_))) { - // TODO(ishell): consider supporting indexed access to WasmStruct fields. - if (holder.IsWasmArray(isolate_)) { - WasmArray wasm_array = WasmArray::cast(holder); - number_ = index_ < wasm_array.length() ? InternalIndex(index_) - : InternalIndex::NotFound(); - wasm::ArrayType* wasm_array_type = wasm_array.type(); - property_details_ = - PropertyDetails(PropertyKind::kData, - wasm_array_type->mutability() ? SEALED : FROZEN, - PropertyCellType::kNoCell); - - } else { - DCHECK(holder.IsWasmStruct(isolate_)); - DCHECK(number_.is_not_found()); - } - } else // NOLINT(readability/braces) -#endif // V8_ENABLE_WEBASSEMBLY - { - JSObject js_object = JSObject::cast(holder); - ElementsAccessor* accessor = js_object.GetElementsAccessor(isolate_); - FixedArrayBase backing_store = js_object.elements(isolate_); - number_ = accessor->GetEntryForIndex(isolate_, js_object, backing_store, - index_); - if (number_.is_not_found()) { - return holder.IsJSTypedArray(isolate_) ? INTEGER_INDEXED_EXOTIC - : NOT_FOUND; - } - property_details_ = accessor->GetDetails(js_object, number_); - if (map.has_frozen_elements()) { - property_details_ = property_details_.CopyAddAttributes(FROZEN); - } else if (map.has_sealed_elements()) { - property_details_ = property_details_.CopyAddAttributes(SEALED); - } + JSObject js_object = JSObject::cast(holder); + ElementsAccessor* accessor = js_object.GetElementsAccessor(isolate_); + FixedArrayBase backing_store = js_object.elements(isolate_); + number_ = + accessor->GetEntryForIndex(isolate_, js_object, backing_store, index_); + if (number_.is_not_found()) { + return holder.IsJSTypedArray(isolate_) ? INTEGER_INDEXED_EXOTIC + : NOT_FOUND; + } + property_details_ = accessor->GetDetails(js_object, number_); + if (map.has_frozen_elements()) { + property_details_ = property_details_.CopyAddAttributes(FROZEN); + } else if (map.has_sealed_elements()) { + property_details_ = property_details_.CopyAddAttributes(SEALED); } } else if (!map.is_dictionary_map()) { DescriptorArray descriptors = map.instance_descriptors(isolate_); diff --git a/deps/v8/src/objects/lookup.h b/deps/v8/src/objects/lookup.h index 8526639ea407d0..782a09225c17a9 100644 --- a/deps/v8/src/objects/lookup.h +++ b/deps/v8/src/objects/lookup.h @@ -60,6 +60,7 @@ class V8_EXPORT_PRIVATE LookupIterator final { INTEGER_INDEXED_EXOTIC, INTERCEPTOR, JSPROXY, + WASM_OBJECT, NOT_FOUND, ACCESSOR, DATA, @@ -197,13 +198,6 @@ class V8_EXPORT_PRIVATE LookupIterator final { static inline void UpdateProtector(Isolate* isolate, Handle<Object> receiver, Handle<Name> name); -#if V8_ENABLE_WEBASSEMBLY - // Fetches type of WasmStruct's field or WasmArray's elements, it - // is used for preparing the value for storing into WasmObjects. - wasm::ValueType wasm_value_type() const; - void WriteDataValueToWasmObject(Handle<Object> value); -#endif // V8_ENABLE_WEBASSEMBLY - // Lookup a 'cached' private property for an accessor. // If not found returns false and leaves the LookupIterator unmodified. bool TryLookupCachedProperty(Handle<AccessorPair> accessor); diff --git a/deps/v8/src/objects/map-inl.h b/deps/v8/src/objects/map-inl.h index 6421ef1449f533..04cdb99e103c5a 100644 --- a/deps/v8/src/objects/map-inl.h +++ b/deps/v8/src/objects/map-inl.h @@ -53,6 +53,7 @@ RELEASE_ACQUIRE_WEAK_ACCESSORS(Map, raw_transitions, ACCESSORS_CHECKED2(Map, prototype, HeapObject, kPrototypeOffset, true, value.IsNull() || value.IsJSProxy() || + value.IsWasmObject() || (value.IsJSObject() && value.map().is_prototype_map())) DEF_GETTER(Map, prototype_info, Object) { diff --git a/deps/v8/src/objects/map.cc b/deps/v8/src/objects/map.cc index ca86b4b5fed681..c065533248a7c6 100644 --- a/deps/v8/src/objects/map.cc +++ b/deps/v8/src/objects/map.cc @@ -299,11 +299,13 @@ VisitorId Map::GetVisitorId(Map map) { case JS_TEMPORAL_TIME_ZONE_TYPE: case JS_TEMPORAL_ZONED_DATE_TIME_TYPE: case JS_TYPED_ARRAY_PROTOTYPE_TYPE: + case JS_RAW_JSON_TYPE: #ifdef V8_INTL_SUPPORT case JS_V8_BREAK_ITERATOR_TYPE: case JS_COLLATOR_TYPE: case JS_DATE_TIME_FORMAT_TYPE: case JS_DISPLAY_NAMES_TYPE: + case JS_DURATION_FORMAT_TYPE: case JS_LIST_FORMAT_TYPE: case JS_LOCALE_TYPE: case JS_NUMBER_FORMAT_TYPE: @@ -2296,7 +2298,8 @@ void Map::SetPrototype(Isolate* isolate, Handle<Map> map, Handle<JSObject> prototype_jsobj = Handle<JSObject>::cast(prototype); JSObject::OptimizeAsPrototype(prototype_jsobj, enable_prototype_setup_mode); } else { - DCHECK(prototype->IsNull(isolate) || prototype->IsJSProxy()); + DCHECK(prototype->IsNull(isolate) || prototype->IsJSProxy() || + prototype->IsWasmObject()); } WriteBarrierMode wb_mode = diff --git a/deps/v8/src/objects/maybe-object-inl.h b/deps/v8/src/objects/maybe-object-inl.h index 4b06fec5cb7d31..f5dd961487015f 100644 --- a/deps/v8/src/objects/maybe-object-inl.h +++ b/deps/v8/src/objects/maybe-object-inl.h @@ -84,8 +84,8 @@ HeapObjectReference HeapObjectReference::ClearedValue( #ifdef V8_COMPRESS_POINTERS // This is necessary to make pointer decompression computation also // suitable for cleared weak references. - Address raw_value = - DecompressTaggedPointer(cage_base, kClearedWeakHeapObjectLower32); + Address raw_value = V8HeapCompressionScheme::DecompressTaggedPointer( + cage_base, kClearedWeakHeapObjectLower32); #else Address raw_value = kClearedWeakHeapObjectLower32; #endif diff --git a/deps/v8/src/objects/maybe-object.h b/deps/v8/src/objects/maybe-object.h index 0393ef6497b393..1005693ae1d8bc 100644 --- a/deps/v8/src/objects/maybe-object.h +++ b/deps/v8/src/objects/maybe-object.h @@ -36,7 +36,7 @@ class MaybeObject : public TaggedImpl<HeapObjectReferenceType::WEAK, Address> { #endif private: - template <typename TFieldType, int kFieldOffset> + template <typename TFieldType, int kFieldOffset, typename CompressionScheme> friend class TaggedField; }; diff --git a/deps/v8/src/objects/name-inl.h b/deps/v8/src/objects/name-inl.h index 570ac0a5082021..ee3585eb6a031d 100644 --- a/deps/v8/src/objects/name-inl.h +++ b/deps/v8/src/objects/name-inl.h @@ -190,6 +190,14 @@ uint32_t Name::EnsureRawHash( return String::cast(*this).ComputeAndSetRawHash(access_guard); } +uint32_t Name::RawHash() { + uint32_t field = raw_hash_field(kAcquireLoad); + if (V8_UNLIKELY(IsForwardingIndex(field))) { + return GetRawHashFromForwardingTable(field); + } + return field; +} + uint32_t Name::EnsureHash() { return HashBits::decode(EnsureRawHash()); } uint32_t Name::EnsureHash(const SharedStringAccessGuardIfNeeded& access_guard) { diff --git a/deps/v8/src/objects/name.h b/deps/v8/src/objects/name.h index dcd1b9652d869b..c2816d04c20f85 100644 --- a/deps/v8/src/objects/name.h +++ b/deps/v8/src/objects/name.h @@ -190,6 +190,7 @@ class Name : public TorqueGeneratedName<Name, PrimitiveHeapObject> { // a forwarding index. inline uint32_t EnsureRawHash(); inline uint32_t EnsureRawHash(const SharedStringAccessGuardIfNeeded&); + inline uint32_t RawHash(); static inline bool IsHashFieldComputed(uint32_t raw_hash_field); static inline bool IsHash(uint32_t raw_hash_field); diff --git a/deps/v8/src/objects/object-list-macros.h b/deps/v8/src/objects/object-list-macros.h index 183c414f2e1e42..368dd5400921a5 100644 --- a/deps/v8/src/objects/object-list-macros.h +++ b/deps/v8/src/objects/object-list-macros.h @@ -158,6 +158,7 @@ class ZoneForwardList; V(JSPrimitiveWrapper) \ V(JSPromise) \ V(JSProxy) \ + V(JSRawJson) \ V(JSReceiver) \ V(JSRegExp) \ V(JSRegExpStringIterator) \ @@ -279,6 +280,7 @@ class ZoneForwardList; V(JSCollator) \ V(JSDateTimeFormat) \ V(JSDisplayNames) \ + V(JSDurationFormat) \ V(JSListFormat) \ V(JSLocale) \ V(JSNumberFormat) \ diff --git a/deps/v8/src/objects/object-macros.h b/deps/v8/src/objects/object-macros.h index 7b9cf484061794..f5de0bb96072b4 100644 --- a/deps/v8/src/objects/object-macros.h +++ b/deps/v8/src/objects/object-macros.h @@ -18,14 +18,14 @@ // Since this changes visibility, it should always be last in a class // definition. -#define OBJECT_CONSTRUCTORS(Type, ...) \ - public: \ - constexpr Type() : __VA_ARGS__() {} \ - \ - protected: \ - template <typename TFieldType, int kFieldOffset> \ - friend class TaggedField; \ - \ +#define OBJECT_CONSTRUCTORS(Type, ...) \ + public: \ + constexpr Type() : __VA_ARGS__() {} \ + \ + protected: \ + template <typename TFieldType, int kFieldOffset, typename CompressionScheme> \ + friend class TaggedField; \ + \ explicit inline Type(Address ptr) #define OBJECT_CONSTRUCTORS_IMPL(Type, Super) \ @@ -698,15 +698,15 @@ static_assert(sizeof(unsigned) == sizeof(uint32_t), set(IndexForEntry(i) + k##name##Offset, value); \ } -#define TQ_OBJECT_CONSTRUCTORS(Type) \ - public: \ - constexpr Type() = default; \ - \ - protected: \ - template <typename TFieldType, int kFieldOffset> \ - friend class TaggedField; \ - \ - inline explicit Type(Address ptr); \ +#define TQ_OBJECT_CONSTRUCTORS(Type) \ + public: \ + constexpr Type() = default; \ + \ + protected: \ + template <typename TFieldType, int kFieldOffset, typename CompressionScheme> \ + friend class TaggedField; \ + \ + inline explicit Type(Address ptr); \ friend class TorqueGenerated##Type<Type, Super>; #define TQ_OBJECT_CONSTRUCTORS_IMPL(Type) \ diff --git a/deps/v8/src/objects/objects-body-descriptors-inl.h b/deps/v8/src/objects/objects-body-descriptors-inl.h index e6994bb87ca15b..4496fdedcf20c7 100644 --- a/deps/v8/src/objects/objects-body-descriptors-inl.h +++ b/deps/v8/src/objects/objects-body-descriptors-inl.h @@ -953,12 +953,10 @@ class Code::BodyDescriptor final : public BodyDescriptorBase { RelocInfo::ModeMask(RelocInfo::RELATIVE_CODE_TARGET) | RelocInfo::ModeMask(RelocInfo::FULL_EMBEDDED_OBJECT) | RelocInfo::ModeMask(RelocInfo::COMPRESSED_EMBEDDED_OBJECT) | - RelocInfo::ModeMask(RelocInfo::DATA_EMBEDDED_OBJECT) | RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) | RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE) | RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE_ENCODED) | - RelocInfo::ModeMask(RelocInfo::OFF_HEAP_TARGET) | - RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY); + RelocInfo::ModeMask(RelocInfo::OFF_HEAP_TARGET); template <typename ObjectVisitor> static inline void IterateBody(Map map, HeapObject obj, ObjectVisitor* v) { @@ -1282,11 +1280,13 @@ auto BodyDescriptorApply(InstanceType type, Args&&... args) { case TYPE##_TYPED_ARRAY_CONSTRUCTOR_TYPE: TYPED_ARRAYS(TYPED_ARRAY_CONSTRUCTORS_SWITCH) #undef TYPED_ARRAY_CONSTRUCTORS_SWITCH + case JS_RAW_JSON_TYPE: #ifdef V8_INTL_SUPPORT case JS_V8_BREAK_ITERATOR_TYPE: case JS_COLLATOR_TYPE: case JS_DATE_TIME_FORMAT_TYPE: case JS_DISPLAY_NAMES_TYPE: + case JS_DURATION_FORMAT_TYPE: case JS_LIST_FORMAT_TYPE: case JS_LOCALE_TYPE: case JS_NUMBER_FORMAT_TYPE: diff --git a/deps/v8/src/objects/objects-inl.h b/deps/v8/src/objects/objects-inl.h index 4ba77c3009a757..025c87dbf313cc 100644 --- a/deps/v8/src/objects/objects-inl.h +++ b/deps/v8/src/objects/objects-inl.h @@ -43,6 +43,7 @@ #include "src/objects/tagged-impl-inl.h" #include "src/objects/tagged-index.h" #include "src/objects/templates.h" +#include "src/sandbox/bounded-size-inl.h" #include "src/sandbox/external-pointer-inl.h" #include "src/sandbox/sandboxed-pointer-inl.h" @@ -663,6 +664,14 @@ void Object::WriteSandboxedPointerField(size_t offset, Isolate* isolate, PtrComprCageBase(isolate), value); } +size_t Object::ReadBoundedSizeField(size_t offset) const { + return i::ReadBoundedSizeField(field_address(offset)); +} + +void Object::WriteBoundedSizeField(size_t offset, size_t value) { + i::WriteBoundedSizeField(field_address(offset), value); +} + template <ExternalPointerTag tag> void Object::InitExternalPointerField(size_t offset, Isolate* isolate, Address value) { @@ -733,14 +742,19 @@ HeapObject MapWord::ToForwardingAddress() { HeapObject MapWord::ToForwardingAddress(PtrComprCageBase host_cage_base) { DCHECK(IsForwardingAddress()); - if (V8_EXTERNAL_CODE_SPACE_BOOL) { - // Recompress value_ using proper host_cage_base since the map word - // has the upper 32 bits that correspond to the main cage base value. - Address value = - DecompressTaggedPointer(host_cage_base, CompressTagged(value_)); - return HeapObject::FromAddress(value); - } +#ifdef V8_EXTERNAL_CODE_SPACE + // Recompress value_ using proper host_cage_base and compression scheme + // since the map word is decompressed using the default compression scheme + // in an assumption it'll contain Map pointer. + // TODO(v8:11880): this code must be updated once a different scheme is used + // for external code fields. + Tagged_t compressed = V8HeapCompressionScheme::CompressTagged(value_); + Address value = V8HeapCompressionScheme::DecompressTaggedPointer( + host_cage_base, compressed); + return HeapObject::FromAddress(value); +#else return HeapObject::FromAddress(value_); +#endif // V8_EXTERNAL_CODE_SPACE } #ifdef VERIFY_HEAP @@ -1135,6 +1149,9 @@ Object Object::GetSimpleHash(Object object) { } else if (InstanceTypeChecker::IsSharedFunctionInfo(instance_type)) { uint32_t hash = SharedFunctionInfo::cast(object).Hash(); return Smi::FromInt(hash & Smi::kMaxValue); + } else if (InstanceTypeChecker::IsScopeInfo(instance_type)) { + uint32_t hash = ScopeInfo::cast(object).Hash(); + return Smi::FromInt(hash & Smi::kMaxValue); } DCHECK(object.IsJSReceiver()); return object; @@ -1198,6 +1215,23 @@ MaybeHandle<Object> Object::Share(Isolate* isolate, Handle<Object> value, throw_if_cannot_be_shared); } +// https://tc39.es/proposal-symbols-as-weakmap-keys/#sec-canbeheldweakly-abstract-operation +bool Object::CanBeHeldWeakly() const { + if (IsJSReceiver()) { + // TODO(v8:12547) Shared structs and arrays should only be able to point + // to shared values in weak collections. For now, disallow them as weak + // collection keys. + if (v8_flags.harmony_struct) { + return !IsJSSharedStruct() && !IsJSSharedArray(); + } + return true; + } + if (v8_flags.harmony_symbol_as_weakmap_key) { + return IsSymbol() && !Symbol::cast(*this).is_in_public_symbol_table(); + } + return false; +} + Handle<Object> ObjectHashTableShape::AsHandle(Handle<Object> key) { return key; } diff --git a/deps/v8/src/objects/objects.cc b/deps/v8/src/objects/objects.cc index 75da04a3329735..c15ac8656adeaf 100644 --- a/deps/v8/src/objects/objects.cc +++ b/deps/v8/src/objects/objects.cc @@ -1167,6 +1167,10 @@ MaybeHandle<Object> Object::GetProperty(LookupIterator* it, if (!was_found && !is_global_reference) it->NotFound(); return result; } + case LookupIterator::WASM_OBJECT: + THROW_NEW_ERROR(it->isolate(), + NewTypeError(MessageTemplate::kWasmObjectsAreOpaque), + Object); case LookupIterator::INTERCEPTOR: { bool done; Handle<Object> result; @@ -2505,6 +2509,10 @@ Maybe<bool> Object::SetPropertyInternal(LookupIterator* it, value, receiver, should_throw); } + case LookupIterator::WASM_OBJECT: + RETURN_FAILURE(it->isolate(), kThrowOnError, + NewTypeError(MessageTemplate::kWasmObjectsAreOpaque)); + case LookupIterator::INTERCEPTOR: { if (it->HolderIsReceiverOrHiddenPrototype()) { Maybe<bool> result = @@ -2722,6 +2730,7 @@ Maybe<bool> Object::SetSuperProperty(LookupIterator* it, Handle<Object> value, case LookupIterator::NOT_FOUND: case LookupIterator::TRANSITION: + case LookupIterator::WASM_OBJECT: UNREACHABLE(); } } @@ -2811,38 +2820,22 @@ Maybe<bool> Object::SetDataProperty(LookupIterator* it, Handle<Object> value) { } } -#if V8_ENABLE_WEBASSEMBLY - if (receiver->IsWasmObject(isolate)) { - // Prepares given value for being stored into a field of given Wasm type - // or throw if the value can't be stored into the field. - ASSIGN_RETURN_ON_EXCEPTION_VALUE( - isolate, to_assign, - WasmObject::ToWasmValue(isolate, it->wasm_value_type(), to_assign), - Nothing<bool>()); - - // Store prepared value. - it->WriteDataValueToWasmObject(to_assign); - - } else // NOLINT(readability/braces) -#endif // V8_ENABLE_WEBASSEMBLY - // clang-format off + DCHECK(!receiver->IsWasmObject(isolate)); if (V8_UNLIKELY(receiver->IsJSSharedStruct(isolate) || receiver->IsJSSharedArray(isolate))) { - // clang-format on - - // Shared structs can only point to primitives or shared values. - ASSIGN_RETURN_ON_EXCEPTION_VALUE( - isolate, to_assign, Object::Share(isolate, to_assign, kThrowOnError), - Nothing<bool>()); - it->WriteDataValue(to_assign, false); - } else { - // Possibly migrate to the most up-to-date map that will be able to store - // |value| under it->name(). - it->PrepareForDataProperty(to_assign); + // Shared structs can only point to primitives or shared values. + ASSIGN_RETURN_ON_EXCEPTION_VALUE( + isolate, to_assign, Object::Share(isolate, to_assign, kThrowOnError), + Nothing<bool>()); + it->WriteDataValue(to_assign, false); + } else { + // Possibly migrate to the most up-to-date map that will be able to store + // |value| under it->name(). + it->PrepareForDataProperty(to_assign); - // Write the property value. - it->WriteDataValue(to_assign, false); - } + // Write the property value. + it->WriteDataValue(to_assign, false); + } #if VERIFY_HEAP if (v8_flags.verify_heap) { diff --git a/deps/v8/src/objects/objects.h b/deps/v8/src/objects/objects.h index f85f1a907999f7..e1df081a5c72c5 100644 --- a/deps/v8/src/objects/objects.h +++ b/deps/v8/src/objects/objects.h @@ -86,6 +86,7 @@ // - JSCollator // If V8_INTL_SUPPORT enabled. // - JSDateTimeFormat // If V8_INTL_SUPPORT enabled. // - JSDisplayNames // If V8_INTL_SUPPORT enabled. +// - JSDurationFormat // If V8_INTL_SUPPORT enabled. // - JSListFormat // If V8_INTL_SUPPORT enabled. // - JSLocale // If V8_INTL_SUPPORT enabled. // - JSNumberFormat // If V8_INTL_SUPPORT enabled. @@ -335,6 +336,11 @@ class Object : public TaggedImpl<HeapObjectReferenceType::STRONG, Address> { V8_INLINE bool IsPrivateSymbol() const; V8_INLINE bool IsPublicSymbol() const; +#if !V8_ENABLE_WEBASSEMBLY + // Dummy implementation on builds without WebAssembly. + bool IsWasmObject(Isolate* = nullptr) const { return false; } +#endif + enum class Conversion { kToNumber, kToNumeric }; #define DECL_STRUCT_PREDICATE(NAME, Name, name) \ @@ -737,6 +743,12 @@ class Object : public TaggedImpl<HeapObjectReferenceType::STRONG, Address> { inline void WriteSandboxedPointerField(size_t offset, Isolate* isolate, Address value); + // + // BoundedSize field accessors. + // + inline size_t ReadBoundedSizeField(size_t offset) const; + inline void WriteBoundedSizeField(size_t offset, size_t value); + // // ExternalPointer_t field accessors. // @@ -779,6 +791,11 @@ class Object : public TaggedImpl<HeapObjectReferenceType::STRONG, Address> { Handle<HeapObject> value, ShouldThrow throw_if_cannot_be_shared); + // Whether this Object can be held weakly, i.e. whether it can be used as a + // key in WeakMap, as a key in WeakSet, as the target of a WeakRef, or as a + // target or unregister token of a FinalizationRegistry. + inline bool CanBeHeldWeakly() const; + protected: inline Address field_address(size_t offset) const { return ptr() + offset - kHeapObjectTag; @@ -904,7 +921,7 @@ class MapWord { private: // HeapObject calls the private constructor and directly reads the value. friend class HeapObject; - template <typename TFieldType, int kFieldOffset> + template <typename TFieldType, int kFieldOffset, typename CompressionScheme> friend class TaggedField; explicit MapWord(Address value) : value_(value) {} diff --git a/deps/v8/src/objects/option-utils.h b/deps/v8/src/objects/option-utils.h index 3075dbf1f5d7ed..1b7d81704f61bb 100644 --- a/deps/v8/src/objects/option-utils.h +++ b/deps/v8/src/objects/option-utils.h @@ -76,8 +76,8 @@ V8_WARN_UNUSED_RESULT static Maybe<T> GetStringOrBooleanOption( const std::vector<T>& enum_values, T true_value, T false_value, T fallback_value) { DCHECK_EQ(str_values.size(), enum_values.size()); - Handle<String> property_str = - isolate->factory()->NewStringFromAsciiChecked(property); + Factory* factory = isolate->factory(); + Handle<String> property_str = factory->NewStringFromAsciiChecked(property); // 1. Let value be ? Get(options, property). Handle<Object> value; @@ -104,8 +104,14 @@ V8_WARN_UNUSED_RESULT static Maybe<T> GetStringOrBooleanOption( // 6. Let value be ? ToString(value). ASSIGN_RETURN_ON_EXCEPTION_VALUE( isolate, value_str, Object::ToString(isolate, value), Nothing<T>()); - // If values does not contain an element equal to value, return fallback. - // 8. Return value. + // 7. If value is *"true"* or *"false"*, return _fallback_. + if (String::Equals(isolate, value_str, factory->true_string()) || + String::Equals(isolate, value_str, factory->false_string())) { + return Just(fallback_value); + } + // 8. If values does not contain an element equal to _value_, throw a + // *RangeError* exception. + // 9. Return value. value_str = String::Flatten(isolate, value_str); { DisallowGarbageCollection no_gc; @@ -127,7 +133,11 @@ V8_WARN_UNUSED_RESULT static Maybe<T> GetStringOrBooleanOption( } } } // end of no_gc - return Just(fallback_value); + THROW_NEW_ERROR_RETURN_VALUE( + isolate, + NewRangeError(MessageTemplate::kValueOutOfRange, value, + factory->NewStringFromAsciiChecked(method), property_str), + Nothing<T>()); } // ECMA402 9.2.10. GetOption( options, property, type, values, fallback) diff --git a/deps/v8/src/objects/scope-info.cc b/deps/v8/src/objects/scope-info.cc index 94fc55c583f582..045f6c1cecea38 100644 --- a/deps/v8/src/objects/scope-info.cc +++ b/deps/v8/src/objects/scope-info.cc @@ -22,9 +22,14 @@ namespace v8 { namespace internal { #ifdef DEBUG -bool ScopeInfo::Equals(ScopeInfo other) const { +bool ScopeInfo::Equals(ScopeInfo other, + bool ignore_position_and_module_info) const { if (length() != other.length()) return false; for (int index = 0; index < length(); ++index) { + if (ignore_position_and_module_info && HasPositionInfo() && + index >= PositionInfoIndex() && index <= PositionInfoIndex() + 1) { + continue; + } Object entry = get(index); Object other_entry = other.get(index); if (entry.IsSmi()) { @@ -39,14 +44,20 @@ bool ScopeInfo::Equals(ScopeInfo other) const { return false; } } else if (entry.IsScopeInfo()) { - if (!ScopeInfo::cast(entry).Equals(ScopeInfo::cast(other_entry))) { + if (!ScopeInfo::cast(entry).Equals(ScopeInfo::cast(other_entry), + ignore_position_and_module_info)) { return false; } } else if (entry.IsSourceTextModuleInfo()) { - if (!SourceTextModuleInfo::cast(entry).Equals( + if (!ignore_position_and_module_info && + !SourceTextModuleInfo::cast(entry).Equals( SourceTextModuleInfo::cast(other_entry))) { return false; } + } else if (entry.IsOddball()) { + if (Oddball::cast(entry).kind() != Oddball::cast(other_entry).kind()) { + return false; + } } else { UNREACHABLE(); } @@ -1100,6 +1111,19 @@ void ScopeInfo::ModuleVariable(int i, String* name, int* index, } } +uint32_t ScopeInfo::Hash() { + // Hash ScopeInfo based on its start and end position. + // Note: Ideally we'd also have the script ID. But since we only use the + // hash in a debug-evaluate cache, we don't worry too much about collisions. + if (HasPositionInfo()) { + return static_cast<uint32_t>( + base::hash_combine(flags(), StartPosition(), EndPosition())); + } + + return static_cast<uint32_t>( + base::hash_combine(flags(), context_local_count())); +} + std::ostream& operator<<(std::ostream& os, VariableAllocationInfo var_info) { switch (var_info) { case VariableAllocationInfo::NONE: diff --git a/deps/v8/src/objects/scope-info.h b/deps/v8/src/objects/scope-info.h index 3d266606f62440..89001da93b5655 100644 --- a/deps/v8/src/objects/scope-info.h +++ b/deps/v8/src/objects/scope-info.h @@ -253,7 +253,8 @@ class ScopeInfo : public TorqueGeneratedScopeInfo<ScopeInfo, HeapObject> { bool IsReplModeScope() const; #ifdef DEBUG - bool Equals(ScopeInfo other) const; + bool Equals(ScopeInfo other, + bool ignore_position_and_module_info = false) const; #endif template <typename IsolateT> @@ -304,6 +305,9 @@ class ScopeInfo : public TorqueGeneratedScopeInfo<ScopeInfo, HeapObject> { // Gives access to raw memory which stores the ScopeInfo's data. inline ObjectSlot data_start(); + // Hash based on position info and flags. Falls back to flags + local count. + V8_EXPORT_PRIVATE uint32_t Hash(); + private: friend class WebSnapshotDeserializer; diff --git a/deps/v8/src/objects/shared-function-info.cc b/deps/v8/src/objects/shared-function-info.cc index c5dabb0d33ca1d..9e442c5c5ccff1 100644 --- a/deps/v8/src/objects/shared-function-info.cc +++ b/deps/v8/src/objects/shared-function-info.cc @@ -716,24 +716,35 @@ int SharedFunctionInfo::EndPosition() const { return kNoSourcePosition; } -void SharedFunctionInfo::SetPosition(int start_position, int end_position) { +void SharedFunctionInfo::UpdateFromFunctionLiteralForLiveEdit( + FunctionLiteral* lit) { Object maybe_scope_info = name_or_scope_info(kAcquireLoad); if (maybe_scope_info.IsScopeInfo()) { - ScopeInfo info = ScopeInfo::cast(maybe_scope_info); - if (info.HasPositionInfo()) { - info.SetPositionInfo(start_position, end_position); - } - } else if (HasUncompiledData()) { + // Updating the ScopeInfo is safe since they are identical modulo + // source positions. + ScopeInfo new_scope_info = *lit->scope()->scope_info(); + DCHECK(new_scope_info.Equals(ScopeInfo::cast(maybe_scope_info), true)); + SetScopeInfo(new_scope_info); + } else if (!is_compiled()) { + CHECK(HasUncompiledData()); if (HasUncompiledDataWithPreparseData()) { - // Clear out preparsed scope data, since the position setter invalidates - // any scope data. ClearPreparseData(); } - uncompiled_data().set_start_position(start_position); - uncompiled_data().set_end_position(end_position); - } else { - UNREACHABLE(); + uncompiled_data().set_start_position(lit->start_position()); + uncompiled_data().set_end_position(lit->end_position()); + + if (!is_toplevel()) { + Scope* outer_scope = lit->scope()->GetOuterScopeWithContext(); + if (outer_scope) { + // Use the raw accessor since we have to replace the existing outer + // scope. + set_raw_outer_scope_info_or_feedback_metadata( + *outer_scope->scope_info()); + } + } } + SetFunctionTokenPosition(lit->function_token_position(), + lit->start_position()); } // static diff --git a/deps/v8/src/objects/shared-function-info.h b/deps/v8/src/objects/shared-function-info.h index 15038faa5461f9..b0a73066559abd 100644 --- a/deps/v8/src/objects/shared-function-info.h +++ b/deps/v8/src/objects/shared-function-info.h @@ -253,9 +253,8 @@ class SharedFunctionInfo // Start position of this function in the script source. V8_EXPORT_PRIVATE int StartPosition() const; - // Set the start and end position of this function in the script source. - // Updates the scope info if available. - V8_EXPORT_PRIVATE void SetPosition(int start_position, int end_position); + V8_EXPORT_PRIVATE void UpdateFromFunctionLiteralForLiveEdit( + FunctionLiteral* lit); // [outer scope info | feedback metadata] Shared storage for outer scope info // (on uncompiled functions) and feedback metadata (on compiled functions). diff --git a/deps/v8/src/objects/simd.cc b/deps/v8/src/objects/simd.cc index 54dd3a0809bbf9..4fa55e4cc76597 100644 --- a/deps/v8/src/objects/simd.cc +++ b/deps/v8/src/objects/simd.cc @@ -94,7 +94,7 @@ inline uintptr_t slow_search(T* array, uintptr_t array_len, uintptr_t index, // is uint64_t[2], and not uint32_t[4]. // C++ standard dictates that a union can only be initialized through its first // member, which forces us to have uint64_t[2] for definition. -#if defined(_MSC_VER) +#if defined(_MSC_VER) && !defined(__clang__) #define PACK32x4(w, x, y, z) \ { ((w) + (uint64_t(x) << 32)), ((y) + (uint64_t(z) << 32)) } #else diff --git a/deps/v8/src/objects/slots-inl.h b/deps/v8/src/objects/slots-inl.h index 989a553f81e03f..9f97aba2b34e41 100644 --- a/deps/v8/src/objects/slots-inl.h +++ b/deps/v8/src/objects/slots-inl.h @@ -272,7 +272,7 @@ inline void CopyTagged(Address dst, const Address src, size_t num_tagged) { // Sets |counter| number of kTaggedSize-sized values starting at |start| slot. inline void MemsetTagged(Tagged_t* start, Object value, size_t counter) { #ifdef V8_COMPRESS_POINTERS - Tagged_t raw_value = CompressTagged(value.ptr()); + Tagged_t raw_value = V8HeapCompressionScheme::CompressTagged(value.ptr()); MemsetUint32(start, raw_value, counter); #else Address raw_value = value.ptr(); diff --git a/deps/v8/src/objects/string-forwarding-table.cc b/deps/v8/src/objects/string-forwarding-table.cc index 0a6462b6135b8d..53366ed2bbc3b3 100644 --- a/deps/v8/src/objects/string-forwarding-table.cc +++ b/deps/v8/src/objects/string-forwarding-table.cc @@ -261,6 +261,11 @@ uint32_t StringForwardingTable::GetRawHash(PtrComprCageBase cage_base, return block->record(index_in_block)->raw_hash(cage_base); } +// static +uint32_t StringForwardingTable::GetRawHashStatic(Isolate* isolate, int index) { + return isolate->string_forwarding_table()->GetRawHash(isolate, index); +} + v8::String::ExternalStringResourceBase* StringForwardingTable::GetExternalResource(int index, bool* is_one_byte) const { CHECK_LT(index, size()); diff --git a/deps/v8/src/objects/string-forwarding-table.h b/deps/v8/src/objects/string-forwarding-table.h index 72e4d73c0ba536..3cf7d3280b258f 100644 --- a/deps/v8/src/objects/string-forwarding-table.h +++ b/deps/v8/src/objects/string-forwarding-table.h @@ -56,6 +56,7 @@ class StringForwardingTable { static Address GetForwardStringAddress(Isolate* isolate, int index); V8_EXPORT_PRIVATE uint32_t GetRawHash(PtrComprCageBase cage_base, int index) const; + static uint32_t GetRawHashStatic(Isolate* isolate, int index); v8::String::ExternalStringResourceBase* GetExternalResource( int index, bool* is_one_byte) const; diff --git a/deps/v8/src/objects/string.cc b/deps/v8/src/objects/string.cc index 7060047d8eb55c..cf7b0afd927432 100644 --- a/deps/v8/src/objects/string.cc +++ b/deps/v8/src/objects/string.cc @@ -198,7 +198,9 @@ void InitExternalPointerFieldsDuringExternalization(String string, Map new_map, } // namespace template <typename IsolateT> -void String::MakeThin(IsolateT* isolate, String internalized) { +void String::MakeThin( + IsolateT* isolate, String internalized, + UpdateInvalidatedObjectSize update_invalidated_object_size) { DisallowGarbageCollection no_gc; DCHECK_NE(*this, internalized); DCHECK(internalized.IsInternalizedString()); @@ -218,7 +220,7 @@ void String::MakeThin(IsolateT* isolate, String internalized) { } #endif - bool has_pointers = initial_shape.IsIndirect(); + bool may_contain_recorded_slots = initial_shape.IsIndirect(); int old_size = SizeFromMap(initial_map); Map target_map = ComputeThinStringMap(isolate, initial_shape, internalized.IsOneByteRepresentation()); @@ -232,6 +234,12 @@ void String::MakeThin(IsolateT* isolate, String internalized) { isolate->AsIsolate()->heap()->NotifyObjectLayoutChange( *this, no_gc, InvalidateRecordedSlots::kYes, ThinString::kSize); MigrateExternalString(isolate->AsIsolate(), *this, internalized); + + // Conservatively assume ExternalStrings may have recorded slots, because + // they could have been transitioned from ConsStrings without having had the + // recorded slots cleared. + // TODO(v8:13374): Fix this more uniformly. + may_contain_recorded_slots = true; } // Update actual first and then do release store on the map word. This ensures @@ -249,21 +257,26 @@ void String::MakeThin(IsolateT* isolate, String internalized) { int size_delta = old_size - ThinString::kSize; if (size_delta != 0) { if (!Heap::IsLargeObject(thin)) { - isolate->heap()->NotifyObjectSizeChange( - thin, old_size, ThinString::kSize, - has_pointers ? ClearRecordedSlots::kYes : ClearRecordedSlots::kNo); + isolate->heap()->NotifyObjectSizeChange(thin, old_size, ThinString::kSize, + may_contain_recorded_slots + ? ClearRecordedSlots::kYes + : ClearRecordedSlots::kNo, + update_invalidated_object_size); } else { // We don't need special handling for the combination IsLargeObject && - // has_pointers, because indirect strings never get that large. - DCHECK(!has_pointers); + // may_contain_recorded_slots, because indirect strings never get that + // large. + DCHECK(!may_contain_recorded_slots); } } } template EXPORT_TEMPLATE_DEFINE(V8_EXPORT_PRIVATE) void String::MakeThin( - Isolate* isolate, String internalized); + Isolate* isolate, String internalized, + UpdateInvalidatedObjectSize update_invalidated_object_size); template EXPORT_TEMPLATE_DEFINE(V8_EXPORT_PRIVATE) void String::MakeThin( - LocalIsolate* isolate, String internalized); + LocalIsolate* isolate, String internalized, + UpdateInvalidatedObjectSize update_invalidated_object_size); template <typename T> bool String::MarkForExternalizationDuringGC(Isolate* isolate, T* resource) { @@ -1687,7 +1700,7 @@ uint32_t String::ComputeAndSetRawHash( string = ThinString::cast(string).actual(cage_base); shape = StringShape(string, cage_base); if (length() == string.length()) { - uint32_t raw_hash = string.raw_hash_field(); + uint32_t raw_hash = string.RawHash(); DCHECK(IsHashFieldComputed(raw_hash)); set_raw_hash_field(raw_hash); return raw_hash; diff --git a/deps/v8/src/objects/string.h b/deps/v8/src/objects/string.h index 7e86f0929c912e..ac56001bd14256 100644 --- a/deps/v8/src/objects/string.h +++ b/deps/v8/src/objects/string.h @@ -11,6 +11,7 @@ #include "src/base/export-template.h" #include "src/base/strings.h" #include "src/common/globals.h" +#include "src/heap/heap.h" #include "src/objects/instance-type.h" #include "src/objects/map.h" #include "src/objects/name.h" @@ -194,7 +195,9 @@ class String : public TorqueGeneratedString<String, Name> { template <typename IsolateT> EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) - void MakeThin(IsolateT* isolate, String canonical); + void MakeThin(IsolateT* isolate, String canonical, + UpdateInvalidatedObjectSize update_invalidated_object_size = + UpdateInvalidatedObjectSize::kYes); template <typename Char> V8_INLINE base::Vector<const Char> GetCharVector( diff --git a/deps/v8/src/objects/tagged-field-inl.h b/deps/v8/src/objects/tagged-field-inl.h index 3ed08a95c95144..1c8e9a8f0c6f6b 100644 --- a/deps/v8/src/objects/tagged-field-inl.h +++ b/deps/v8/src/objects/tagged-field-inl.h @@ -13,29 +13,32 @@ namespace v8 { namespace internal { // static -template <typename T, int kFieldOffset> -Address TaggedField<T, kFieldOffset>::address(HeapObject host, int offset) { +template <typename T, int kFieldOffset, typename CompressionScheme> +Address TaggedField<T, kFieldOffset, CompressionScheme>::address( + HeapObject host, int offset) { return host.address() + kFieldOffset + offset; } // static -template <typename T, int kFieldOffset> -Tagged_t* TaggedField<T, kFieldOffset>::location(HeapObject host, int offset) { +template <typename T, int kFieldOffset, typename CompressionScheme> +Tagged_t* TaggedField<T, kFieldOffset, CompressionScheme>::location( + HeapObject host, int offset) { return reinterpret_cast<Tagged_t*>(address(host, offset)); } // static -template <typename T, int kFieldOffset> +template <typename T, int kFieldOffset, typename CompressionScheme> template <typename TOnHeapAddress> -Address TaggedField<T, kFieldOffset>::tagged_to_full( +Address TaggedField<T, kFieldOffset, CompressionScheme>::tagged_to_full( TOnHeapAddress on_heap_addr, Tagged_t tagged_value) { #ifdef V8_COMPRESS_POINTERS if (kIsSmi) { - return DecompressTaggedSigned(tagged_value); + return CompressionScheme::DecompressTaggedSigned(tagged_value); } else if (kIsHeapObject) { - return DecompressTaggedPointer(on_heap_addr, tagged_value); + return CompressionScheme::DecompressTaggedPointer(on_heap_addr, + tagged_value); } else { - return DecompressTaggedAny(on_heap_addr, tagged_value); + return CompressionScheme::DecompressTaggedAny(on_heap_addr, tagged_value); } #else return tagged_value; @@ -43,35 +46,38 @@ Address TaggedField<T, kFieldOffset>::tagged_to_full( } // static -template <typename T, int kFieldOffset> -Tagged_t TaggedField<T, kFieldOffset>::full_to_tagged(Address value) { +template <typename T, int kFieldOffset, typename CompressionScheme> +Tagged_t TaggedField<T, kFieldOffset, CompressionScheme>::full_to_tagged( + Address value) { #ifdef V8_COMPRESS_POINTERS - return CompressTagged(value); + return CompressionScheme::CompressTagged(value); #else return value; #endif } // static -template <typename T, int kFieldOffset> -T TaggedField<T, kFieldOffset>::load(HeapObject host, int offset) { +template <typename T, int kFieldOffset, typename CompressionScheme> +T TaggedField<T, kFieldOffset, CompressionScheme>::load(HeapObject host, + int offset) { Tagged_t value = *location(host, offset); DCHECK_NE(kFieldOffset + offset, HeapObject::kMapOffset); return T(tagged_to_full(host.ptr(), value)); } // static -template <typename T, int kFieldOffset> -T TaggedField<T, kFieldOffset>::load(PtrComprCageBase cage_base, - HeapObject host, int offset) { +template <typename T, int kFieldOffset, typename CompressionScheme> +T TaggedField<T, kFieldOffset, CompressionScheme>::load( + PtrComprCageBase cage_base, HeapObject host, int offset) { Tagged_t value = *location(host, offset); DCHECK_NE(kFieldOffset + offset, HeapObject::kMapOffset); return T(tagged_to_full(cage_base, value)); } // static -template <typename T, int kFieldOffset> -void TaggedField<T, kFieldOffset>::store(HeapObject host, T value) { +template <typename T, int kFieldOffset, typename CompressionScheme> +void TaggedField<T, kFieldOffset, CompressionScheme>::store(HeapObject host, + T value) { #ifdef V8_ATOMIC_OBJECT_FIELD_WRITES Relaxed_Store(host, value); #else @@ -82,8 +88,10 @@ void TaggedField<T, kFieldOffset>::store(HeapObject host, T value) { } // static -template <typename T, int kFieldOffset> -void TaggedField<T, kFieldOffset>::store(HeapObject host, int offset, T value) { +template <typename T, int kFieldOffset, typename CompressionScheme> +void TaggedField<T, kFieldOffset, CompressionScheme>::store(HeapObject host, + int offset, + T value) { #ifdef V8_ATOMIC_OBJECT_FIELD_WRITES Relaxed_Store(host, offset, value); #else @@ -94,107 +102,113 @@ void TaggedField<T, kFieldOffset>::store(HeapObject host, int offset, T value) { } // static -template <typename T, int kFieldOffset> -T TaggedField<T, kFieldOffset>::Relaxed_Load(HeapObject host, int offset) { +template <typename T, int kFieldOffset, typename CompressionScheme> +T TaggedField<T, kFieldOffset, CompressionScheme>::Relaxed_Load(HeapObject host, + int offset) { AtomicTagged_t value = AsAtomicTagged::Relaxed_Load(location(host, offset)); DCHECK_NE(kFieldOffset + offset, HeapObject::kMapOffset); return T(tagged_to_full(host.ptr(), value)); } // static -template <typename T, int kFieldOffset> -T TaggedField<T, kFieldOffset>::Relaxed_Load(PtrComprCageBase cage_base, - HeapObject host, int offset) { +template <typename T, int kFieldOffset, typename CompressionScheme> +T TaggedField<T, kFieldOffset, CompressionScheme>::Relaxed_Load( + PtrComprCageBase cage_base, HeapObject host, int offset) { AtomicTagged_t value = AsAtomicTagged::Relaxed_Load(location(host, offset)); DCHECK_NE(kFieldOffset + offset, HeapObject::kMapOffset); return T(tagged_to_full(cage_base, value)); } // static -template <typename T, int kFieldOffset> -T TaggedField<T, kFieldOffset>::Relaxed_Load_Map_Word( +template <typename T, int kFieldOffset, typename CompressionScheme> +T TaggedField<T, kFieldOffset, CompressionScheme>::Relaxed_Load_Map_Word( PtrComprCageBase cage_base, HeapObject host) { AtomicTagged_t value = AsAtomicTagged::Relaxed_Load(location(host, 0)); return T(tagged_to_full(cage_base, value)); } // static -template <typename T, int kFieldOffset> -void TaggedField<T, kFieldOffset>::Relaxed_Store_Map_Word(HeapObject host, - T value) { +template <typename T, int kFieldOffset, typename CompressionScheme> +void TaggedField<T, kFieldOffset, CompressionScheme>::Relaxed_Store_Map_Word( + HeapObject host, T value) { AsAtomicTagged::Relaxed_Store(location(host), full_to_tagged(value.ptr())); } // static -template <typename T, int kFieldOffset> -void TaggedField<T, kFieldOffset>::Relaxed_Store(HeapObject host, T value) { +template <typename T, int kFieldOffset, typename CompressionScheme> +void TaggedField<T, kFieldOffset, CompressionScheme>::Relaxed_Store( + HeapObject host, T value) { Address ptr = value.ptr(); DCHECK_NE(kFieldOffset, HeapObject::kMapOffset); AsAtomicTagged::Relaxed_Store(location(host), full_to_tagged(ptr)); } // static -template <typename T, int kFieldOffset> -void TaggedField<T, kFieldOffset>::Relaxed_Store(HeapObject host, int offset, - T value) { +template <typename T, int kFieldOffset, typename CompressionScheme> +void TaggedField<T, kFieldOffset, CompressionScheme>::Relaxed_Store( + HeapObject host, int offset, T value) { Address ptr = value.ptr(); DCHECK_NE(kFieldOffset + offset, HeapObject::kMapOffset); AsAtomicTagged::Relaxed_Store(location(host, offset), full_to_tagged(ptr)); } // static -template <typename T, int kFieldOffset> -T TaggedField<T, kFieldOffset>::Acquire_Load(HeapObject host, int offset) { +template <typename T, int kFieldOffset, typename CompressionScheme> +T TaggedField<T, kFieldOffset, CompressionScheme>::Acquire_Load(HeapObject host, + int offset) { AtomicTagged_t value = AsAtomicTagged::Acquire_Load(location(host, offset)); DCHECK_NE(kFieldOffset + offset, HeapObject::kMapOffset); return T(tagged_to_full(host.ptr(), value)); } // static -template <typename T, int kFieldOffset> -T TaggedField<T, kFieldOffset>::Acquire_Load_No_Unpack( +template <typename T, int kFieldOffset, typename CompressionScheme> +T TaggedField<T, kFieldOffset, CompressionScheme>::Acquire_Load_No_Unpack( PtrComprCageBase cage_base, HeapObject host, int offset) { AtomicTagged_t value = AsAtomicTagged::Acquire_Load(location(host, offset)); return T(tagged_to_full(cage_base, value)); } -template <typename T, int kFieldOffset> -T TaggedField<T, kFieldOffset>::Acquire_Load(PtrComprCageBase cage_base, - HeapObject host, int offset) { +template <typename T, int kFieldOffset, typename CompressionScheme> +T TaggedField<T, kFieldOffset, CompressionScheme>::Acquire_Load( + PtrComprCageBase cage_base, HeapObject host, int offset) { AtomicTagged_t value = AsAtomicTagged::Acquire_Load(location(host, offset)); DCHECK_NE(kFieldOffset + offset, HeapObject::kMapOffset); return T(tagged_to_full(cage_base, value)); } // static -template <typename T, int kFieldOffset> -void TaggedField<T, kFieldOffset>::Release_Store(HeapObject host, T value) { +template <typename T, int kFieldOffset, typename CompressionScheme> +void TaggedField<T, kFieldOffset, CompressionScheme>::Release_Store( + HeapObject host, T value) { Address ptr = value.ptr(); DCHECK_NE(kFieldOffset, HeapObject::kMapOffset); AsAtomicTagged::Release_Store(location(host), full_to_tagged(ptr)); } // static -template <typename T, int kFieldOffset> -void TaggedField<T, kFieldOffset>::Release_Store_Map_Word(HeapObject host, - T value) { +template <typename T, int kFieldOffset, typename CompressionScheme> +void TaggedField<T, kFieldOffset, CompressionScheme>::Release_Store_Map_Word( + HeapObject host, T value) { Address ptr = value.ptr(); AsAtomicTagged::Release_Store(location(host), full_to_tagged(ptr)); } // static -template <typename T, int kFieldOffset> -void TaggedField<T, kFieldOffset>::Release_Store(HeapObject host, int offset, - T value) { +template <typename T, int kFieldOffset, typename CompressionScheme> +void TaggedField<T, kFieldOffset, CompressionScheme>::Release_Store( + HeapObject host, int offset, T value) { Address ptr = value.ptr(); DCHECK_NE(kFieldOffset + offset, HeapObject::kMapOffset); AsAtomicTagged::Release_Store(location(host, offset), full_to_tagged(ptr)); } // static -template <typename T, int kFieldOffset> -Tagged_t TaggedField<T, kFieldOffset>::Release_CompareAndSwap(HeapObject host, - T old, T value) { +template <typename T, int kFieldOffset, typename CompressionScheme> +Tagged_t TaggedField<T, kFieldOffset, + CompressionScheme>::Release_CompareAndSwap(HeapObject host, + T old, + T value) { Tagged_t old_value = full_to_tagged(old.ptr()); Tagged_t new_value = full_to_tagged(value.ptr()); Tagged_t result = AsAtomicTagged::Release_CompareAndSwap( @@ -203,43 +217,46 @@ Tagged_t TaggedField<T, kFieldOffset>::Release_CompareAndSwap(HeapObject host, } // static -template <typename T, int kFieldOffset> -T TaggedField<T, kFieldOffset>::SeqCst_Load(HeapObject host, int offset) { +template <typename T, int kFieldOffset, typename CompressionScheme> +T TaggedField<T, kFieldOffset, CompressionScheme>::SeqCst_Load(HeapObject host, + int offset) { AtomicTagged_t value = AsAtomicTagged::SeqCst_Load(location(host, offset)); DCHECK_NE(kFieldOffset + offset, HeapObject::kMapOffset); return T(tagged_to_full(host.ptr(), value)); } // static -template <typename T, int kFieldOffset> -T TaggedField<T, kFieldOffset>::SeqCst_Load(PtrComprCageBase cage_base, - HeapObject host, int offset) { +template <typename T, int kFieldOffset, typename CompressionScheme> +T TaggedField<T, kFieldOffset, CompressionScheme>::SeqCst_Load( + PtrComprCageBase cage_base, HeapObject host, int offset) { AtomicTagged_t value = AsAtomicTagged::SeqCst_Load(location(host, offset)); DCHECK_NE(kFieldOffset + offset, HeapObject::kMapOffset); return T(tagged_to_full(cage_base, value)); } // static -template <typename T, int kFieldOffset> -void TaggedField<T, kFieldOffset>::SeqCst_Store(HeapObject host, T value) { +template <typename T, int kFieldOffset, typename CompressionScheme> +void TaggedField<T, kFieldOffset, CompressionScheme>::SeqCst_Store( + HeapObject host, T value) { Address ptr = value.ptr(); DCHECK_NE(kFieldOffset, HeapObject::kMapOffset); AsAtomicTagged::SeqCst_Store(location(host), full_to_tagged(ptr)); } // static -template <typename T, int kFieldOffset> -void TaggedField<T, kFieldOffset>::SeqCst_Store(HeapObject host, int offset, - T value) { +template <typename T, int kFieldOffset, typename CompressionScheme> +void TaggedField<T, kFieldOffset, CompressionScheme>::SeqCst_Store( + HeapObject host, int offset, T value) { Address ptr = value.ptr(); DCHECK_NE(kFieldOffset + offset, HeapObject::kMapOffset); AsAtomicTagged::SeqCst_Store(location(host, offset), full_to_tagged(ptr)); } // static -template <typename T, int kFieldOffset> -T TaggedField<T, kFieldOffset>::SeqCst_Swap(HeapObject host, int offset, - T value) { +template <typename T, int kFieldOffset, typename CompressionScheme> +T TaggedField<T, kFieldOffset, CompressionScheme>::SeqCst_Swap(HeapObject host, + int offset, + T value) { Address ptr = value.ptr(); DCHECK_NE(kFieldOffset + offset, HeapObject::kMapOffset); AtomicTagged_t old_value = @@ -248,10 +265,9 @@ T TaggedField<T, kFieldOffset>::SeqCst_Swap(HeapObject host, int offset, } // static -template <typename T, int kFieldOffset> -T TaggedField<T, kFieldOffset>::SeqCst_Swap(PtrComprCageBase cage_base, - HeapObject host, int offset, - T value) { +template <typename T, int kFieldOffset, typename CompressionScheme> +T TaggedField<T, kFieldOffset, CompressionScheme>::SeqCst_Swap( + PtrComprCageBase cage_base, HeapObject host, int offset, T value) { Address ptr = value.ptr(); DCHECK_NE(kFieldOffset + offset, HeapObject::kMapOffset); AtomicTagged_t old_value = diff --git a/deps/v8/src/objects/tagged-field.h b/deps/v8/src/objects/tagged-field.h index 9410878c5ee652..a585a66a7a86d4 100644 --- a/deps/v8/src/objects/tagged-field.h +++ b/deps/v8/src/objects/tagged-field.h @@ -6,19 +6,19 @@ #define V8_OBJECTS_TAGGED_FIELD_H_ #include "src/common/globals.h" - +#include "src/common/ptr-compr.h" #include "src/objects/objects.h" #include "src/objects/tagged-value.h" -namespace v8 { -namespace internal { +namespace v8::internal { // This helper static class represents a tagged field of type T at offset // kFieldOffset inside some host HeapObject. // For full-pointer mode this type adds no overhead but when pointer // compression is enabled such class allows us to use proper decompression // function depending on the field type. -template <typename T, int kFieldOffset = 0> +template <typename T, int kFieldOffset = 0, + typename CompressionScheme = V8HeapCompressionScheme> class TaggedField : public AllStatic { public: static_assert(std::is_base_of<Object, T>::value || @@ -91,7 +91,6 @@ class TaggedField : public AllStatic { static inline Tagged_t full_to_tagged(Address value); }; -} // namespace internal -} // namespace v8 +} // namespace v8::internal #endif // V8_OBJECTS_TAGGED_FIELD_H_ diff --git a/deps/v8/src/objects/tagged-impl-inl.h b/deps/v8/src/objects/tagged-impl-inl.h index 909f65a959ef4b..4ce915730d0752 100644 --- a/deps/v8/src/objects/tagged-impl-inl.h +++ b/deps/v8/src/objects/tagged-impl-inl.h @@ -34,7 +34,8 @@ Smi TaggedImpl<kRefType, StorageType>::ToSmi() const { return Smi(ptr_); } // Implementation for compressed pointers. - return Smi(DecompressTaggedSigned(static_cast<Tagged_t>(ptr_))); + return Smi( + CompressionScheme::DecompressTaggedSigned(static_cast<Tagged_t>(ptr_))); } // @@ -111,8 +112,9 @@ bool TaggedImpl<kRefType, StorageType>::GetHeapObjectIfStrong( if (kIsFull) return GetHeapObjectIfStrong(result); // Implementation for compressed pointers. if (IsStrong()) { - *result = HeapObject::cast( - Object(DecompressTaggedPointer(isolate, static_cast<Tagged_t>(ptr_)))); + *result = + HeapObject::cast(Object(CompressionScheme::DecompressTaggedPointer( + isolate, static_cast<Tagged_t>(ptr_)))); return true; } return false; @@ -136,8 +138,8 @@ HeapObject TaggedImpl<kRefType, StorageType>::GetHeapObjectAssumeStrong( if (kIsFull) return GetHeapObjectAssumeStrong(); // Implementation for compressed pointers. DCHECK(IsStrong()); - return HeapObject::cast( - Object(DecompressTaggedPointer(isolate, static_cast<Tagged_t>(ptr_)))); + return HeapObject::cast(Object(CompressionScheme::DecompressTaggedPointer( + isolate, static_cast<Tagged_t>(ptr_)))); } // @@ -222,12 +224,12 @@ HeapObject TaggedImpl<kRefType, StorageType>::GetHeapObject( DCHECK(!IsSmi()); if (kCanBeWeak) { DCHECK(!IsCleared()); - return HeapObject::cast(Object(DecompressTaggedPointer( + return HeapObject::cast(Object(CompressionScheme::DecompressTaggedPointer( isolate, static_cast<Tagged_t>(ptr_) & ~kWeakHeapObjectMask))); } else { DCHECK(!HAS_WEAK_HEAP_OBJECT_TAG(ptr_)); - return HeapObject::cast( - Object(DecompressTaggedPointer(isolate, static_cast<Tagged_t>(ptr_)))); + return HeapObject::cast(Object(CompressionScheme::DecompressTaggedPointer( + isolate, static_cast<Tagged_t>(ptr_)))); } } @@ -250,7 +252,8 @@ Object TaggedImpl<kRefType, StorageType>::GetHeapObjectOrSmi( if (kIsFull) return GetHeapObjectOrSmi(); // Implementation for compressed pointers. if (IsSmi()) { - return Object(DecompressTaggedSigned(static_cast<Tagged_t>(ptr_))); + return Object( + CompressionScheme::DecompressTaggedSigned(static_cast<Tagged_t>(ptr_))); } return GetHeapObject(isolate); } diff --git a/deps/v8/src/objects/tagged-impl.h b/deps/v8/src/objects/tagged-impl.h index 7840a087e21f5b..f4445b4ecf3340 100644 --- a/deps/v8/src/objects/tagged-impl.h +++ b/deps/v8/src/objects/tagged-impl.h @@ -30,6 +30,10 @@ bool V8_EXPORT_PRIVATE CheckObjectComparisonAllowed(Address a, Address b); template <HeapObjectReferenceType kRefType, typename StorageType> class TaggedImpl { public: + // Compressed TaggedImpl are never used for external Code pointers, so + // we can use this shorter alias for calling decompression functions. + using CompressionScheme = V8HeapCompressionScheme; + static_assert(std::is_same<StorageType, Address>::value || std::is_same<StorageType, Tagged_t>::value, "StorageType must be either Address or Tagged_t"); diff --git a/deps/v8/src/objects/tagged-value-inl.h b/deps/v8/src/objects/tagged-value-inl.h index f409a4006bc529..4ca8739367dcdf 100644 --- a/deps/v8/src/objects/tagged-value-inl.h +++ b/deps/v8/src/objects/tagged-value-inl.h @@ -21,7 +21,7 @@ namespace internal { inline StrongTaggedValue::StrongTaggedValue(Object o) : #ifdef V8_COMPRESS_POINTERS - TaggedImpl(CompressTagged(o.ptr())) + TaggedImpl(CompressionScheme::CompressTagged(o.ptr())) #else TaggedImpl(o.ptr()) #endif @@ -30,7 +30,7 @@ inline StrongTaggedValue::StrongTaggedValue(Object o) Object StrongTaggedValue::ToObject(Isolate* isolate, StrongTaggedValue object) { #ifdef V8_COMPRESS_POINTERS - return Object(DecompressTaggedAny(isolate, object.ptr())); + return Object(CompressionScheme::DecompressTaggedAny(isolate, object.ptr())); #else return Object(object.ptr()); #endif @@ -39,7 +39,7 @@ Object StrongTaggedValue::ToObject(Isolate* isolate, StrongTaggedValue object) { inline TaggedValue::TaggedValue(MaybeObject o) : #ifdef V8_COMPRESS_POINTERS - TaggedImpl(CompressTagged(o.ptr())) + TaggedImpl(CompressionScheme::CompressTagged(o.ptr())) #else TaggedImpl(o.ptr()) #endif @@ -48,7 +48,8 @@ inline TaggedValue::TaggedValue(MaybeObject o) MaybeObject TaggedValue::ToMaybeObject(Isolate* isolate, TaggedValue object) { #ifdef V8_COMPRESS_POINTERS - return MaybeObject(DecompressTaggedAny(isolate, object.ptr())); + return MaybeObject( + CompressionScheme::DecompressTaggedAny(isolate, object.ptr())); #else return MaybeObject(object.ptr()); #endif diff --git a/deps/v8/src/objects/template-objects.cc b/deps/v8/src/objects/template-objects.cc index 0bcca301d970e7..12eb1d18ee4675 100644 --- a/deps/v8/src/objects/template-objects.cc +++ b/deps/v8/src/objects/template-objects.cc @@ -44,33 +44,10 @@ Handle<JSArray> TemplateObjectDescription::GetTemplateObject( // Create the raw object from the {raw_strings}. Handle<FixedArray> raw_strings(description->raw_strings(), isolate); - Handle<JSArray> raw_object = isolate->factory()->NewJSArrayWithElements( - raw_strings, PACKED_ELEMENTS, raw_strings->length(), - AllocationType::kOld); - - // Create the template object from the {cooked_strings}. Handle<FixedArray> cooked_strings(description->cooked_strings(), isolate); - Handle<JSArray> template_object = isolate->factory()->NewJSArrayWithElements( - cooked_strings, PACKED_ELEMENTS, cooked_strings->length(), - AllocationType::kOld); - - // Freeze the {raw_object}. - JSObject::SetIntegrityLevel(raw_object, FROZEN, kThrowOnError).ToChecked(); - - // Install a "raw" data property for {raw_object} on {template_object}. - PropertyDescriptor raw_desc; - raw_desc.set_value(raw_object); - raw_desc.set_configurable(false); - raw_desc.set_enumerable(false); - raw_desc.set_writable(false); - JSArray::DefineOwnProperty(isolate, template_object, - isolate->factory()->raw_string(), &raw_desc, - Just(kThrowOnError)) - .ToChecked(); - - // Freeze the {template_object} as well. - JSObject::SetIntegrityLevel(template_object, FROZEN, kThrowOnError) - .ToChecked(); + Handle<JSArray> template_object = + isolate->factory()->NewJSArrayForTemplateLiteralArray(cooked_strings, + raw_strings); // Insert the template object into the template weakmap. Handle<HeapObject> previous_cached_templates = handle( diff --git a/deps/v8/src/objects/type-hints.cc b/deps/v8/src/objects/type-hints.cc index 93924a1b951ace..a099ea12cca0b6 100644 --- a/deps/v8/src/objects/type-hints.cc +++ b/deps/v8/src/objects/type-hints.cc @@ -23,6 +23,8 @@ std::ostream& operator<<(std::ostream& os, BinaryOperationHint hint) { return os << "String"; case BinaryOperationHint::kBigInt: return os << "BigInt"; + case BinaryOperationHint::kBigInt64: + return os << "BigInt64"; case BinaryOperationHint::kAny: return os << "Any"; } diff --git a/deps/v8/src/objects/type-hints.h b/deps/v8/src/objects/type-hints.h index 2638c128bd7aea..d9ed88057760f9 100644 --- a/deps/v8/src/objects/type-hints.h +++ b/deps/v8/src/objects/type-hints.h @@ -20,6 +20,7 @@ enum class BinaryOperationHint : uint8_t { kNumberOrOddball, kString, kBigInt, + kBigInt64, kAny }; diff --git a/deps/v8/src/objects/value-serializer.cc b/deps/v8/src/objects/value-serializer.cc index 3745f66da5975b..61a7cae8e8a900 100644 --- a/deps/v8/src/objects/value-serializer.cc +++ b/deps/v8/src/objects/value-serializer.cc @@ -1096,7 +1096,7 @@ Maybe<bool> ValueSerializer::WriteWasmMemory(Handle<WasmMemoryObject> object) { #endif // V8_ENABLE_WEBASSEMBLY Maybe<bool> ValueSerializer::WriteSharedObject(Handle<HeapObject> object) { - if (!delegate_ || isolate_->shared_isolate() == nullptr) { + if (!delegate_ || !isolate_->has_shared_heap()) { return ThrowDataCloneError(MessageTemplate::kDataCloneError, object); } @@ -1340,6 +1340,9 @@ Maybe<T> ValueDeserializer::ReadVarintLoop() { // Since {value} is not modified in this branch we can safely skip the // DCHECK when fuzzing. DCHECK_IMPLIES(!v8_flags.fuzzing, !has_another_byte); + // For consistency with the fast unrolled loop in ReadVarint we return + // after we have read size(T) + 1 bytes. + return Just(value); } position_++; } while (has_another_byte); @@ -1476,6 +1479,12 @@ MaybeHandle<Object> ValueDeserializer::ReadObject() { isolate_->Throw(*isolate_->factory()->NewError( MessageTemplate::kDataCloneDeserializationError)); } +#if defined(DEBUG) && defined(VERIFY_HEAP) + if (!result.is_null() && v8_flags.enable_slow_asserts && + v8_flags.verify_heap) { + object->ObjectVerify(isolate_); + } +#endif return result; } @@ -2100,7 +2109,7 @@ bool ValueDeserializer::ValidateAndSetJSArrayBufferViewFlags( if (!v8_flags.harmony_rab_gsab) { return false; } - if (!buffer.is_resizable()) { + if (!buffer.is_resizable_by_js()) { return false; } if (is_backed_by_rab && buffer.is_shared()) { diff --git a/deps/v8/src/objects/visitors.h b/deps/v8/src/objects/visitors.h index 49ec0bfb2bb297..742188fd8f91c2 100644 --- a/deps/v8/src/objects/visitors.h +++ b/deps/v8/src/objects/visitors.h @@ -161,9 +161,6 @@ class ObjectVisitor { // Visit pointer embedded into a code object. virtual void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) = 0; - // Visits a runtime entry in the instruction stream. - virtual void VisitRuntimeEntry(Code host, RelocInfo* rinfo) {} - // Visits an external reference embedded into a code object. virtual void VisitExternalReference(Code host, RelocInfo* rinfo) {} diff --git a/deps/v8/src/parsing/parse-info.cc b/deps/v8/src/parsing/parse-info.cc index f773cd5d41bb45..9c0eee6d00bd0e 100644 --- a/deps/v8/src/parsing/parse-info.cc +++ b/deps/v8/src/parsing/parse-info.cc @@ -28,19 +28,18 @@ UnoptimizedCompileFlags::UnoptimizedCompileFlags(Isolate* isolate, function_kind_(FunctionKind::kNormalFunction), function_syntax_kind_(FunctionSyntaxKind::kDeclaration), parsing_while_debugging_(ParsingWhileDebugging::kNo) { - set_collect_type_profile(isolate->is_collecting_type_profile()); set_coverage_enabled(!isolate->is_best_effort_code_coverage()); set_block_coverage_enabled(isolate->is_block_code_coverage()); - set_might_always_turbofan(FLAG_always_turbofan || - FLAG_prepare_always_turbofan); - set_allow_natives_syntax(FLAG_allow_natives_syntax); + set_might_always_turbofan(v8_flags.always_turbofan || + v8_flags.prepare_always_turbofan); + set_allow_natives_syntax(v8_flags.allow_natives_syntax); set_allow_lazy_compile(true); - set_collect_source_positions(!FLAG_enable_lazy_source_positions || + set_collect_source_positions(!v8_flags.enable_lazy_source_positions || isolate->NeedsDetailedOptimizedCodeLineInfo()); set_post_parallel_compile_tasks_for_eager_toplevel( - FLAG_parallel_compile_tasks_for_eager_toplevel); + v8_flags.parallel_compile_tasks_for_eager_toplevel); set_post_parallel_compile_tasks_for_lazy( - FLAG_parallel_compile_tasks_for_lazy); + v8_flags.parallel_compile_tasks_for_lazy); } // static @@ -60,15 +59,6 @@ UnoptimizedCompileFlags UnoptimizedCompileFlags::ForFunctionCompile( #endif // V8_ENABLE_WEBASSEMBLY flags.set_is_repl_mode(shared.is_repl_mode()); - // CollectTypeProfile uses its own feedback slots. If we have existing - // FeedbackMetadata, we can only collect type profile if the feedback vector - // has the appropriate slots. - flags.set_collect_type_profile( - isolate->is_collecting_type_profile() && - (shared.HasFeedbackMetadata() - ? shared.feedback_metadata().HasTypeProfileSlot() - : script.IsUserJavaScript())); - // Do not support re-parsing top-level function of a wrapped script. DCHECK_IMPLIES(flags.is_toplevel(), !script.is_wrapped()); @@ -82,11 +72,11 @@ UnoptimizedCompileFlags UnoptimizedCompileFlags::ForScriptCompile( flags.SetFlagsForFunctionFromScript(script); flags.SetFlagsForToplevelCompile( - isolate->is_collecting_type_profile(), script.IsUserJavaScript(), - flags.outer_language_mode(), construct_repl_mode(script.is_repl_mode()), + script.IsUserJavaScript(), flags.outer_language_mode(), + construct_repl_mode(script.is_repl_mode()), script.origin_options().IsModule() ? ScriptType::kModule : ScriptType::kClassic, - FLAG_lazy); + v8_flags.lazy); if (script.is_wrapped()) { flags.set_function_syntax_kind(FunctionSyntaxKind::kWrapped); } @@ -99,8 +89,7 @@ UnoptimizedCompileFlags UnoptimizedCompileFlags::ForToplevelCompile( Isolate* isolate, bool is_user_javascript, LanguageMode language_mode, REPLMode repl_mode, ScriptType type, bool lazy) { UnoptimizedCompileFlags flags(isolate, isolate->GetNextScriptId()); - flags.SetFlagsForToplevelCompile(isolate->is_collecting_type_profile(), - is_user_javascript, language_mode, repl_mode, + flags.SetFlagsForToplevelCompile(is_user_javascript, language_mode, repl_mode, type, lazy); LOG(isolate, ScriptEvent(V8FileLogger::ScriptEventType::kReserveId, @@ -143,13 +132,11 @@ void UnoptimizedCompileFlags::SetFlagsFromFunction(T function) { } void UnoptimizedCompileFlags::SetFlagsForToplevelCompile( - bool is_collecting_type_profile, bool is_user_javascript, - LanguageMode language_mode, REPLMode repl_mode, ScriptType type, - bool lazy) { + bool is_user_javascript, LanguageMode language_mode, REPLMode repl_mode, + ScriptType type, bool lazy) { set_is_toplevel(true); set_allow_lazy_parsing(lazy); set_allow_lazy_compile(lazy); - set_collect_type_profile(is_user_javascript && is_collecting_type_profile); set_outer_language_mode( stricter_language_mode(outer_language_mode(), language_mode)); set_is_repl_mode((repl_mode == REPLMode::kYes)); @@ -278,8 +265,7 @@ Handle<Script> ParseInfo::CreateScript( } else if (flags().is_eval()) { raw_script.set_compilation_type(Script::COMPILATION_TYPE_EVAL); } - CheckFlagsForToplevelCompileFromScript(raw_script, - isolate->is_collecting_type_profile()); + CheckFlagsForToplevelCompileFromScript(raw_script); return script; } @@ -309,12 +295,9 @@ void ParseInfo::set_character_stream( character_stream_.swap(character_stream); } -void ParseInfo::CheckFlagsForToplevelCompileFromScript( - Script script, bool is_collecting_type_profile) { +void ParseInfo::CheckFlagsForToplevelCompileFromScript(Script script) { CheckFlagsForFunctionFromScript(script); DCHECK(flags().is_toplevel()); - DCHECK_EQ(flags().collect_type_profile(), - is_collecting_type_profile && script.IsUserJavaScript()); DCHECK_EQ(flags().is_repl_mode(), script.is_repl_mode()); if (script.is_wrapped()) { diff --git a/deps/v8/src/parsing/parse-info.h b/deps/v8/src/parsing/parse-info.h index ebf567b1c1fb0a..e2e3bdf9d1847d 100644 --- a/deps/v8/src/parsing/parse-info.h +++ b/deps/v8/src/parsing/parse-info.h @@ -48,7 +48,6 @@ class Zone; V(is_module, bool, 1, _) \ V(allow_lazy_parsing, bool, 1, _) \ V(is_lazy_compile, bool, 1, _) \ - V(collect_type_profile, bool, 1, _) \ V(coverage_enabled, bool, 1, _) \ V(block_coverage_enabled, bool, 1, _) \ V(is_asm_wasm_broken, bool, 1, _) \ @@ -140,8 +139,7 @@ class V8_EXPORT_PRIVATE UnoptimizedCompileFlags { // SharedFunctionInfo |function| template <typename T> void SetFlagsFromFunction(T function); - void SetFlagsForToplevelCompile(bool is_collecting_type_profile, - bool is_user_javascript, + void SetFlagsForToplevelCompile(bool is_user_javascript, LanguageMode language_mode, REPLMode repl_mode, ScriptType type, bool lazy); @@ -345,8 +343,7 @@ class V8_EXPORT_PRIVATE ParseInfo { ReusableUnoptimizedCompileState* reusable_state, uintptr_t stack_limit, RuntimeCallStats* runtime_call_stats); - void CheckFlagsForToplevelCompileFromScript(Script script, - bool is_collecting_type_profile); + void CheckFlagsForToplevelCompileFromScript(Script script); //------------- Inputs to parsing and scope analysis ----------------------- const UnoptimizedCompileFlags flags_; diff --git a/deps/v8/src/parsing/parser-base.h b/deps/v8/src/parsing/parser-base.h index 786d502ff86263..aff9c5f481fdce 100644 --- a/deps/v8/src/parsing/parser-base.h +++ b/deps/v8/src/parsing/parser-base.h @@ -459,7 +459,7 @@ class ParserBase { } void set_next_function_is_likely_called() { - next_function_is_likely_called_ = !FLAG_max_lazy; + next_function_is_likely_called_ = !v8_flags.max_lazy; } void RecordFunctionOrEvalCall() { contains_function_or_eval_ = true; } @@ -3735,7 +3735,7 @@ ParserBase<Impl>::ParseImportExpressions() { AcceptINScope scope(this, true); ExpressionT specifier = ParseAssignmentExpressionCoverGrammar(); - if (FLAG_harmony_import_assertions && Check(Token::COMMA)) { + if (v8_flags.harmony_import_assertions && Check(Token::COMMA)) { if (Check(Token::RPAREN)) { // A trailing comma allowed after the specifier. return factory()->NewImportCallExpression(specifier, pos); @@ -4737,7 +4737,7 @@ typename ParserBase<Impl>::ExpressionT ParserBase<Impl>::ParseClassLiteral( if (Check(Token::SEMICOLON)) continue; // Either we're parsing a `static { }` initialization block or a property. - if (FLAG_harmony_class_static_blocks && peek() == Token::STATIC && + if (v8_flags.harmony_class_static_blocks && peek() == Token::STATIC && PeekAhead() == Token::LBRACE) { BlockT static_block = ParseClassStaticBlock(&class_info); impl()->AddClassStaticBlock(static_block, &class_info); diff --git a/deps/v8/src/parsing/parser.cc b/deps/v8/src/parsing/parser.cc index bb5e65ec5240a4..aba8ca6271d9da 100644 --- a/deps/v8/src/parsing/parser.cc +++ b/deps/v8/src/parsing/parser.cc @@ -379,7 +379,7 @@ Expression* Parser::NewV8Intrinsic(const AstRawString* name, Runtime::FunctionForName(name->raw_data(), name->length()); // Be more permissive when fuzzing. Intrinsics are not supported. - if (FLAG_fuzzing) { + if (v8_flags.fuzzing) { return NewV8RuntimeFunctionForFuzzing(function, args, pos); } @@ -413,7 +413,7 @@ Expression* Parser::NewV8Intrinsic(const AstRawString* name, Expression* Parser::NewV8RuntimeFunctionForFuzzing( const Runtime::Function* function, const ScopedPtrList<Expression>& args, int pos) { - CHECK(FLAG_fuzzing); + CHECK(v8_flags.fuzzing); // Intrinsics are not supported for fuzzing. Only allow allowlisted runtime // functions. Also prevent later errors due to too few arguments and just @@ -1363,7 +1363,7 @@ ImportAssertions* Parser::ParseImportAssertClause() { auto import_assertions = zone()->New<ImportAssertions>(zone()); - if (!FLAG_harmony_import_assertions) { + if (!v8_flags.harmony_import_assertions) { return import_assertions; } diff --git a/deps/v8/src/profiler/cpu-profiler.cc b/deps/v8/src/profiler/cpu-profiler.cc index 154edae60a7456..f8641063a8857e 100644 --- a/deps/v8/src/profiler/cpu-profiler.cc +++ b/deps/v8/src/profiler/cpu-profiler.cc @@ -90,7 +90,7 @@ ProfilingScope::ProfilingScope(Isolate* isolate, ProfilerListener* listener) // callbacks on the heap. DCHECK(isolate_->heap()->HasBeenSetUp()); - if (!FLAG_prof_browser_mode) { + if (!v8_flags.prof_browser_mode) { logger->LogCodeObjects(); } logger->LogCompiledFunctions(); @@ -511,7 +511,7 @@ CpuProfiler::CpuProfiler(Isolate* isolate, CpuProfilingNamingMode naming_mode, naming_mode_(naming_mode), logging_mode_(logging_mode), base_sampling_interval_(base::TimeDelta::FromMicroseconds( - FLAG_cpu_profiler_sampling_interval)), + v8_flags.cpu_profiler_sampling_interval)), code_observer_(test_code_observer), profiles_(test_profiles), symbolizer_(test_symbolizer), diff --git a/deps/v8/src/profiler/heap-snapshot-generator.cc b/deps/v8/src/profiler/heap-snapshot-generator.cc index 3921dc7647938f..edbdb4c91a0e15 100644 --- a/deps/v8/src/profiler/heap-snapshot-generator.cc +++ b/deps/v8/src/profiler/heap-snapshot-generator.cc @@ -547,7 +547,7 @@ bool HeapObjectsMap::MoveObject(Address from, Address to, int object_size) { // Size of an object can change during its life, so to keep information // about the object in entries_ consistent, we have to adjust size when the // object is migrated. - if (FLAG_heap_profiler_trace_objects) { + if (v8_flags.heap_profiler_trace_objects) { PrintF("Move object from %p to %p old size %6d new size %6d\n", reinterpret_cast<void*>(from), reinterpret_cast<void*>(to), entries_.at(from_entry_info_index).size, object_size); @@ -586,7 +586,7 @@ SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr, static_cast<int>(reinterpret_cast<intptr_t>(entry->value)); EntryInfo& entry_info = entries_.at(entry_index); entry_info.accessed = accessed; - if (FLAG_heap_profiler_trace_objects) { + if (v8_flags.heap_profiler_trace_objects) { PrintF("Update object size : %p with old size %d and new size %d\n", reinterpret_cast<void*>(addr), entry_info.size, size); } @@ -622,7 +622,7 @@ void HeapObjectsMap::AddMergedNativeEntry(NativeObject addr, void HeapObjectsMap::StopHeapObjectsTracking() { time_intervals_.clear(); } void HeapObjectsMap::UpdateHeapObjectsMap() { - if (FLAG_heap_profiler_trace_objects) { + if (v8_flags.heap_profiler_trace_objects) { PrintF("Begin HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n", entries_map_.occupancy()); } @@ -634,14 +634,14 @@ void HeapObjectsMap::UpdateHeapObjectsMap() { obj = iterator.Next()) { int object_size = obj.Size(cage_base); FindOrAddEntry(obj.address(), object_size); - if (FLAG_heap_profiler_trace_objects) { + if (v8_flags.heap_profiler_trace_objects) { PrintF("Update object : %p %6d. Next address is %p\n", reinterpret_cast<void*>(obj.address()), object_size, reinterpret_cast<void*>(obj.address() + object_size)); } } RemoveDeadEntries(); - if (FLAG_heap_profiler_trace_objects) { + if (v8_flags.heap_profiler_trace_objects) { PrintF("End HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n", entries_map_.occupancy()); } @@ -877,7 +877,8 @@ HeapEntry* V8HeapExplorer::AddEntry(HeapObject object) { HeapEntry* V8HeapExplorer::AddEntry(HeapObject object, HeapEntry::Type type, const char* name) { - if (FLAG_heap_profiler_show_hidden_objects && type == HeapEntry::kHidden) { + if (v8_flags.heap_profiler_show_hidden_objects && + type == HeapEntry::kHidden) { type = HeapEntry::kNative; } PtrComprCageBase cage_base(isolate()); @@ -2094,7 +2095,7 @@ bool V8HeapExplorer::IterateAndExtractReferences( // objects, and fails DCHECKs if we attempt to. Read-only objects can // never retain read-write objects, so there is no risk in skipping // verification for them. - if (FLAG_heap_snapshot_verify && + if (v8_flags.heap_snapshot_verify && !BasicMemoryChunk::FromHeapObject(obj)->InReadOnlySpace()) { verifier = std::make_unique<HeapEntryVerifier>(generator, obj); } @@ -2643,7 +2644,7 @@ bool NativeObjectsExplorer::IterateAndExtractReferences( HeapSnapshotGenerator* generator) { generator_ = generator; - if (FLAG_heap_profiler_use_embedder_graph && + if (v8_flags.heap_profiler_use_embedder_graph && snapshot_->profiler()->HasBuildEmbedderGraphCallback()) { v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate_)); DisallowGarbageCollection no_gc; @@ -2726,7 +2727,7 @@ bool HeapSnapshotGenerator::GenerateSnapshot() { #ifdef VERIFY_HEAP Heap* debug_heap = heap_; - if (FLAG_verify_heap) { + if (v8_flags.verify_heap) { HeapVerifier::VerifyHeap(debug_heap); } #endif @@ -2734,7 +2735,7 @@ bool HeapSnapshotGenerator::GenerateSnapshot() { InitProgressCounter(); #ifdef VERIFY_HEAP - if (FLAG_verify_heap) { + if (v8_flags.verify_heap) { HeapVerifier::VerifyHeap(debug_heap); } #endif diff --git a/deps/v8/src/profiler/heap-snapshot-generator.h b/deps/v8/src/profiler/heap-snapshot-generator.h index 50d97de2986659..7750c2b5c24e82 100644 --- a/deps/v8/src/profiler/heap-snapshot-generator.h +++ b/deps/v8/src/profiler/heap-snapshot-generator.h @@ -591,7 +591,7 @@ class HeapSnapshotGenerator : public SnapshottingProgressReportingInterface { HeapEntry* result = entries_map_.emplace(ptr, allocator->AllocateEntry(ptr)).first->second; #ifdef V8_ENABLE_HEAP_SNAPSHOT_VERIFY - if (FLAG_heap_snapshot_verify) { + if (v8_flags.heap_snapshot_verify) { reverse_entries_map_.emplace(result, ptr); } #endif @@ -602,7 +602,7 @@ class HeapSnapshotGenerator : public SnapshottingProgressReportingInterface { HeapThing FindHeapThingForHeapEntry(HeapEntry* entry) { // The reverse lookup map is only populated if the verification flag is // enabled. - DCHECK(FLAG_heap_snapshot_verify); + DCHECK(v8_flags.heap_snapshot_verify); auto it = reverse_entries_map_.find(entry); return it == reverse_entries_map_.end() ? nullptr : it->second; diff --git a/deps/v8/src/profiler/sampling-heap-profiler.cc b/deps/v8/src/profiler/sampling-heap-profiler.cc index b38d235e988e61..50a32dd4d51a5c 100644 --- a/deps/v8/src/profiler/sampling-heap-profiler.cc +++ b/deps/v8/src/profiler/sampling-heap-profiler.cc @@ -25,7 +25,7 @@ namespace internal { // Let u be a uniformly distributed random number between 0 and 1, then // next_sample = (- ln u) / λ intptr_t SamplingHeapProfiler::Observer::GetNextSampleInterval(uint64_t rate) { - if (FLAG_sampling_heap_profiler_suppress_randomness) + if (v8_flags.sampling_heap_profiler_suppress_randomness) return static_cast<intptr_t>(rate); double u = random_->NextDouble(); double next = (-base::ieee754::log(u)) * rate; @@ -96,8 +96,8 @@ void SamplingHeapProfiler::OnWeakCallback( const WeakCallbackInfo<Sample>& data) { Sample* sample = data.GetParameter(); Heap* heap = reinterpret_cast<Isolate*>(data.GetIsolate())->heap(); - bool is_minor_gc = - heap->current_or_last_garbage_collector() == GarbageCollector::SCAVENGER; + bool is_minor_gc = Heap::IsYoungGenerationCollector( + heap->current_or_last_garbage_collector()); bool should_keep_sample = is_minor_gc ? (sample->profiler->flags_ & diff --git a/deps/v8/src/profiler/strings-storage.cc b/deps/v8/src/profiler/strings-storage.cc index 592d101e6831d9..a1bae849bcb878 100644 --- a/deps/v8/src/profiler/strings-storage.cc +++ b/deps/v8/src/profiler/strings-storage.cc @@ -81,8 +81,8 @@ const char* StringsStorage::GetSymbol(Symbol sym) { return "<symbol>"; } String description = String::cast(sym.description()); - int length = - std::min(FLAG_heap_snapshot_string_limit.value(), description.length()); + int length = std::min(v8_flags.heap_snapshot_string_limit.value(), + description.length()); auto data = description.ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL, 0, length, &length); if (sym.is_private_name()) { @@ -98,7 +98,7 @@ const char* StringsStorage::GetName(Name name) { if (name.IsString()) { String str = String::cast(name); int length = - std::min(FLAG_heap_snapshot_string_limit.value(), str.length()); + std::min(v8_flags.heap_snapshot_string_limit.value(), str.length()); int actual_length = 0; std::unique_ptr<char[]> data = str.ToCString( DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL, 0, length, &actual_length); @@ -117,7 +117,7 @@ const char* StringsStorage::GetConsName(const char* prefix, Name name) { if (name.IsString()) { String str = String::cast(name); int length = - std::min(FLAG_heap_snapshot_string_limit.value(), str.length()); + std::min(v8_flags.heap_snapshot_string_limit.value(), str.length()); int actual_length = 0; std::unique_ptr<char[]> data = str.ToCString( DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL, 0, length, &actual_length); diff --git a/deps/v8/src/profiler/symbolizer.cc b/deps/v8/src/profiler/symbolizer.cc index 6c9d92b2c19376..8528b62693af8d 100644 --- a/deps/v8/src/profiler/symbolizer.cc +++ b/deps/v8/src/profiler/symbolizer.cc @@ -161,7 +161,7 @@ Symbolizer::SymbolizedSample Symbolizer::SymbolizeTickSample( } } - if (FLAG_prof_browser_mode) { + if (v8_flags.prof_browser_mode) { bool no_symbolized_entries = true; for (auto e : stack_trace) { if (e.code_entry != nullptr) { diff --git a/deps/v8/src/regexp/arm/regexp-macro-assembler-arm.cc b/deps/v8/src/regexp/arm/regexp-macro-assembler-arm.cc index 1988a1759db29c..2658068b6f94b9 100644 --- a/deps/v8/src/regexp/arm/regexp-macro-assembler-arm.cc +++ b/deps/v8/src/regexp/arm/regexp-macro-assembler-arm.cc @@ -529,8 +529,8 @@ void RegExpMacroAssemblerARM::CheckBitInTable( BranchOrBacktrack(ne, on_bit_set); } -bool RegExpMacroAssemblerARM::CheckSpecialCharacterClass( - StandardCharacterSet type, Label* on_no_match) { +bool RegExpMacroAssemblerARM::CheckSpecialClassRanges(StandardCharacterSet type, + Label* on_no_match) { // Range checks (c in min..max) are generally implemented by an unsigned // (c - min) <= (max - min) check // TODO(jgruber): No custom implementation (yet): s(UC16), S(UC16). diff --git a/deps/v8/src/regexp/arm/regexp-macro-assembler-arm.h b/deps/v8/src/regexp/arm/regexp-macro-assembler-arm.h index ab550d53768ea2..edf7650bd527ef 100644 --- a/deps/v8/src/regexp/arm/regexp-macro-assembler-arm.h +++ b/deps/v8/src/regexp/arm/regexp-macro-assembler-arm.h @@ -57,8 +57,8 @@ class V8_EXPORT_PRIVATE RegExpMacroAssemblerARM // Checks whether the given offset from the current position is before // the end of the string. void CheckPosition(int cp_offset, Label* on_outside_input) override; - bool CheckSpecialCharacterClass(StandardCharacterSet type, - Label* on_no_match) override; + bool CheckSpecialClassRanges(StandardCharacterSet type, + Label* on_no_match) override; void Fail() override; Handle<HeapObject> GetCode(Handle<String> source) override; void GoTo(Label* label) override; diff --git a/deps/v8/src/regexp/arm64/regexp-macro-assembler-arm64.cc b/deps/v8/src/regexp/arm64/regexp-macro-assembler-arm64.cc index ec7808d44ef241..2e462ba7485736 100644 --- a/deps/v8/src/regexp/arm64/regexp-macro-assembler-arm64.cc +++ b/deps/v8/src/regexp/arm64/regexp-macro-assembler-arm64.cc @@ -640,7 +640,7 @@ void RegExpMacroAssemblerARM64::CheckBitInTable( CompareAndBranchOrBacktrack(w11, 0, ne, on_bit_set); } -bool RegExpMacroAssemblerARM64::CheckSpecialCharacterClass( +bool RegExpMacroAssemblerARM64::CheckSpecialClassRanges( StandardCharacterSet type, Label* on_no_match) { // Range checks (c in min..max) are generally implemented by an unsigned // (c - min) <= (max - min) check diff --git a/deps/v8/src/regexp/arm64/regexp-macro-assembler-arm64.h b/deps/v8/src/regexp/arm64/regexp-macro-assembler-arm64.h index bda529c7a61861..3801591b64ce8e 100644 --- a/deps/v8/src/regexp/arm64/regexp-macro-assembler-arm64.h +++ b/deps/v8/src/regexp/arm64/regexp-macro-assembler-arm64.h @@ -61,8 +61,8 @@ class V8_EXPORT_PRIVATE RegExpMacroAssemblerARM64 // Checks whether the given offset from the current position is before // the end of the string. void CheckPosition(int cp_offset, Label* on_outside_input) override; - bool CheckSpecialCharacterClass(StandardCharacterSet type, - Label* on_no_match) override; + bool CheckSpecialClassRanges(StandardCharacterSet type, + Label* on_no_match) override; void BindJumpTarget(Label* label = nullptr) override; void Fail() override; Handle<HeapObject> GetCode(Handle<String> source) override; diff --git a/deps/v8/src/regexp/experimental/experimental-compiler.cc b/deps/v8/src/regexp/experimental/experimental-compiler.cc index 86bcf183f18569..ce6e0c75cacfe0 100644 --- a/deps/v8/src/regexp/experimental/experimental-compiler.cc +++ b/deps/v8/src/regexp/experimental/experimental-compiler.cc @@ -65,7 +65,13 @@ class CanBeHandledVisitor final : private RegExpVisitor { return nullptr; } - void* VisitCharacterClass(RegExpCharacterClass* node, void*) override { + void* VisitClassRanges(RegExpClassRanges* node, void*) override { + return nullptr; + } + + void* VisitClassSetExpression(RegExpClassSetExpression* node, + void*) override { + result_ = false; return nullptr; } @@ -385,7 +391,7 @@ class CompileVisitor : private RegExpVisitor { return nullptr; } - void* VisitCharacterClass(RegExpCharacterClass* node, void*) override { + void* VisitClassRanges(RegExpClassRanges* node, void*) override { // A character class is compiled as Disjunction over its `CharacterRange`s. ZoneList<CharacterRange>* ranges = node->ranges(zone_); CharacterRange::Canonicalize(ranges); @@ -419,6 +425,12 @@ class CompileVisitor : private RegExpVisitor { return nullptr; } + void* VisitClassSetExpression(RegExpClassSetExpression* node, + void*) override { + // TODO(v8:11935): Add support. + UNREACHABLE(); + } + void* VisitAtom(RegExpAtom* node, void*) override { for (base::uc16 c : node->data()) { assembler_.ConsumeRange(c, c); diff --git a/deps/v8/src/regexp/ia32/regexp-macro-assembler-ia32.cc b/deps/v8/src/regexp/ia32/regexp-macro-assembler-ia32.cc index 093931fa9dc3a4..600234542042ce 100644 --- a/deps/v8/src/regexp/ia32/regexp-macro-assembler-ia32.cc +++ b/deps/v8/src/regexp/ia32/regexp-macro-assembler-ia32.cc @@ -567,7 +567,7 @@ void RegExpMacroAssemblerIA32::CheckBitInTable( BranchOrBacktrack(not_equal, on_bit_set); } -bool RegExpMacroAssemblerIA32::CheckSpecialCharacterClass( +bool RegExpMacroAssemblerIA32::CheckSpecialClassRanges( StandardCharacterSet type, Label* on_no_match) { // Range checks (c in min..max) are generally implemented by an unsigned // (c - min) <= (max - min) check diff --git a/deps/v8/src/regexp/ia32/regexp-macro-assembler-ia32.h b/deps/v8/src/regexp/ia32/regexp-macro-assembler-ia32.h index 3933a68b1ff5e6..96d31634298309 100644 --- a/deps/v8/src/regexp/ia32/regexp-macro-assembler-ia32.h +++ b/deps/v8/src/regexp/ia32/regexp-macro-assembler-ia32.h @@ -58,8 +58,8 @@ class V8_EXPORT_PRIVATE RegExpMacroAssemblerIA32 // Checks whether the given offset from the current position is before // the end of the string. void CheckPosition(int cp_offset, Label* on_outside_input) override; - bool CheckSpecialCharacterClass(StandardCharacterSet type, - Label* on_no_match) override; + bool CheckSpecialClassRanges(StandardCharacterSet type, + Label* on_no_match) override; void Fail() override; Handle<HeapObject> GetCode(Handle<String> source) override; void GoTo(Label* label) override; diff --git a/deps/v8/src/regexp/loong64/regexp-macro-assembler-loong64.cc b/deps/v8/src/regexp/loong64/regexp-macro-assembler-loong64.cc index a55ea6557a39ae..35fd95bd0f2d21 100644 --- a/deps/v8/src/regexp/loong64/regexp-macro-assembler-loong64.cc +++ b/deps/v8/src/regexp/loong64/regexp-macro-assembler-loong64.cc @@ -496,7 +496,7 @@ void RegExpMacroAssemblerLOONG64::CheckBitInTable(Handle<ByteArray> table, BranchOrBacktrack(on_bit_set, ne, a0, Operand(zero_reg)); } -bool RegExpMacroAssemblerLOONG64::CheckSpecialCharacterClass( +bool RegExpMacroAssemblerLOONG64::CheckSpecialClassRanges( StandardCharacterSet type, Label* on_no_match) { // Range checks (c in min..max) are generally implemented by an unsigned // (c - min) <= (max - min) check. diff --git a/deps/v8/src/regexp/loong64/regexp-macro-assembler-loong64.h b/deps/v8/src/regexp/loong64/regexp-macro-assembler-loong64.h index a141936613aaa3..fe40a4e74fb8b8 100644 --- a/deps/v8/src/regexp/loong64/regexp-macro-assembler-loong64.h +++ b/deps/v8/src/regexp/loong64/regexp-macro-assembler-loong64.h @@ -56,8 +56,8 @@ class V8_EXPORT_PRIVATE RegExpMacroAssemblerLOONG64 // Checks whether the given offset from the current position is before // the end of the string. void CheckPosition(int cp_offset, Label* on_outside_input) override; - bool CheckSpecialCharacterClass(StandardCharacterSet type, - Label* on_no_match) override; + bool CheckSpecialClassRanges(StandardCharacterSet type, + Label* on_no_match) override; void Fail() override; Handle<HeapObject> GetCode(Handle<String> source) override; void GoTo(Label* label) override; diff --git a/deps/v8/src/regexp/mips64/regexp-macro-assembler-mips64.cc b/deps/v8/src/regexp/mips64/regexp-macro-assembler-mips64.cc index 598083dcbb2394..456e166adefc72 100644 --- a/deps/v8/src/regexp/mips64/regexp-macro-assembler-mips64.cc +++ b/deps/v8/src/regexp/mips64/regexp-macro-assembler-mips64.cc @@ -545,7 +545,7 @@ void RegExpMacroAssemblerMIPS::CheckBitInTable( BranchOrBacktrack(on_bit_set, ne, a0, Operand(zero_reg)); } -bool RegExpMacroAssemblerMIPS::CheckSpecialCharacterClass( +bool RegExpMacroAssemblerMIPS::CheckSpecialClassRanges( StandardCharacterSet type, Label* on_no_match) { // Range checks (c in min..max) are generally implemented by an unsigned // (c - min) <= (max - min) check. diff --git a/deps/v8/src/regexp/mips64/regexp-macro-assembler-mips64.h b/deps/v8/src/regexp/mips64/regexp-macro-assembler-mips64.h index 9b8c7c26d8d5ab..449084b0dab139 100644 --- a/deps/v8/src/regexp/mips64/regexp-macro-assembler-mips64.h +++ b/deps/v8/src/regexp/mips64/regexp-macro-assembler-mips64.h @@ -56,8 +56,8 @@ class V8_EXPORT_PRIVATE RegExpMacroAssemblerMIPS // Checks whether the given offset from the current position is before // the end of the string. void CheckPosition(int cp_offset, Label* on_outside_input) override; - bool CheckSpecialCharacterClass(StandardCharacterSet type, - Label* on_no_match) override; + bool CheckSpecialClassRanges(StandardCharacterSet type, + Label* on_no_match) override; void Fail() override; Handle<HeapObject> GetCode(Handle<String> source) override; void GoTo(Label* label) override; diff --git a/deps/v8/src/regexp/ppc/regexp-macro-assembler-ppc.cc b/deps/v8/src/regexp/ppc/regexp-macro-assembler-ppc.cc index 483c4f395a4dd7..4fdad878947d4a 100644 --- a/deps/v8/src/regexp/ppc/regexp-macro-assembler-ppc.cc +++ b/deps/v8/src/regexp/ppc/regexp-macro-assembler-ppc.cc @@ -562,8 +562,8 @@ void RegExpMacroAssemblerPPC::CheckBitInTable(Handle<ByteArray> table, BranchOrBacktrack(ne, on_bit_set); } -bool RegExpMacroAssemblerPPC::CheckSpecialCharacterClass( - StandardCharacterSet type, Label* on_no_match) { +bool RegExpMacroAssemblerPPC::CheckSpecialClassRanges(StandardCharacterSet type, + Label* on_no_match) { // Range checks (c in min..max) are generally implemented by an unsigned // (c - min) <= (max - min) check // TODO(jgruber): No custom implementation (yet): s(UC16), S(UC16). diff --git a/deps/v8/src/regexp/ppc/regexp-macro-assembler-ppc.h b/deps/v8/src/regexp/ppc/regexp-macro-assembler-ppc.h index 5760809d96215d..db2783ff7217bb 100644 --- a/deps/v8/src/regexp/ppc/regexp-macro-assembler-ppc.h +++ b/deps/v8/src/regexp/ppc/regexp-macro-assembler-ppc.h @@ -57,8 +57,8 @@ class V8_EXPORT_PRIVATE RegExpMacroAssemblerPPC // Checks whether the given offset from the current position is before // the end of the string. void CheckPosition(int cp_offset, Label* on_outside_input) override; - bool CheckSpecialCharacterClass(StandardCharacterSet type, - Label* on_no_match) override; + bool CheckSpecialClassRanges(StandardCharacterSet type, + Label* on_no_match) override; void Fail() override; Handle<HeapObject> GetCode(Handle<String> source) override; void GoTo(Label* label) override; diff --git a/deps/v8/src/regexp/property-sequences.cc b/deps/v8/src/regexp/property-sequences.cc deleted file mode 100644 index 643bde954b6a17..00000000000000 --- a/deps/v8/src/regexp/property-sequences.cc +++ /dev/null @@ -1,1246 +0,0 @@ -// Copyright 2018 the V8 project authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#ifdef V8_INTL_SUPPORT - -#include "src/regexp/property-sequences.h" - -namespace v8 { -namespace internal { - -/* -Generated from following Node.js source: - -package.json - -``` -{ - "private": true, - "dependencies": { - "unicode-12.0.0": "^0.7.9" - } -} -``` - -generate-unicode-sequence-property-data.js - -``` -const toHex = (symbol) => { - return '0x' + symbol.codePointAt(0).toString(16) - .toUpperCase().padStart(6, '0'); -}; - -const generateData = (property) => { - const sequences = - require(`unicode-12.0.0/Sequence_Property/${ property }/index.js`); - const id = property.replace(/_/g, '') + 's'; - const buffer = []; - for (const sequence of sequences) { - const symbols = [...sequence]; - const codePoints = symbols.map(symbol => toHex(symbol)); - buffer.push(' ' + codePoints.join(', ') + ', 0,'); - } - const output = - `const base::uc32 UnicodePropertySequences::k${ id }[] = {\n` + - `${ buffer.join('\n') }\n 0 // null-terminating the list\n};\n`; - return output; -}; - -const properties = [ - 'Emoji_Flag_Sequence', - 'Emoji_Tag_Sequence', - 'Emoji_ZWJ_Sequence', -]; - -for (const property of properties) { - console.log(generateData(property)); -} -``` -*/ - -// clang-format off -const base::uc32 UnicodePropertySequences::kEmojiFlagSequences[] = { - 0x01F1E6, 0x01F1E8, 0, - 0x01F1FF, 0x01F1FC, 0, - 0x01F1E6, 0x01F1EA, 0, - 0x01F1E6, 0x01F1EB, 0, - 0x01F1E6, 0x01F1EC, 0, - 0x01F1E6, 0x01F1EE, 0, - 0x01F1E6, 0x01F1F1, 0, - 0x01F1E6, 0x01F1F2, 0, - 0x01F1E6, 0x01F1F4, 0, - 0x01F1E6, 0x01F1F6, 0, - 0x01F1E6, 0x01F1F7, 0, - 0x01F1E6, 0x01F1F8, 0, - 0x01F1E6, 0x01F1F9, 0, - 0x01F1E6, 0x01F1FA, 0, - 0x01F1E6, 0x01F1FC, 0, - 0x01F1E6, 0x01F1FD, 0, - 0x01F1E6, 0x01F1FF, 0, - 0x01F1E7, 0x01F1E6, 0, - 0x01F1E7, 0x01F1E7, 0, - 0x01F1E7, 0x01F1E9, 0, - 0x01F1E7, 0x01F1EA, 0, - 0x01F1E7, 0x01F1EB, 0, - 0x01F1E7, 0x01F1EC, 0, - 0x01F1E7, 0x01F1ED, 0, - 0x01F1E7, 0x01F1EE, 0, - 0x01F1E7, 0x01F1EF, 0, - 0x01F1E7, 0x01F1F1, 0, - 0x01F1E7, 0x01F1F2, 0, - 0x01F1E7, 0x01F1F3, 0, - 0x01F1E7, 0x01F1F4, 0, - 0x01F1E7, 0x01F1F6, 0, - 0x01F1E7, 0x01F1F7, 0, - 0x01F1E7, 0x01F1F8, 0, - 0x01F1E7, 0x01F1F9, 0, - 0x01F1E7, 0x01F1FB, 0, - 0x01F1E7, 0x01F1FC, 0, - 0x01F1E7, 0x01F1FE, 0, - 0x01F1E7, 0x01F1FF, 0, - 0x01F1E8, 0x01F1E6, 0, - 0x01F1E8, 0x01F1E8, 0, - 0x01F1E8, 0x01F1E9, 0, - 0x01F1E8, 0x01F1EB, 0, - 0x01F1E8, 0x01F1EC, 0, - 0x01F1E8, 0x01F1ED, 0, - 0x01F1E8, 0x01F1EE, 0, - 0x01F1E8, 0x01F1F0, 0, - 0x01F1E8, 0x01F1F1, 0, - 0x01F1E8, 0x01F1F2, 0, - 0x01F1E8, 0x01F1F3, 0, - 0x01F1E8, 0x01F1F4, 0, - 0x01F1E8, 0x01F1F5, 0, - 0x01F1E8, 0x01F1F7, 0, - 0x01F1E8, 0x01F1FA, 0, - 0x01F1E8, 0x01F1FB, 0, - 0x01F1E8, 0x01F1FC, 0, - 0x01F1E8, 0x01F1FD, 0, - 0x01F1E8, 0x01F1FE, 0, - 0x01F1E8, 0x01F1FF, 0, - 0x01F1E9, 0x01F1EA, 0, - 0x01F1E9, 0x01F1EC, 0, - 0x01F1E9, 0x01F1EF, 0, - 0x01F1E9, 0x01F1F0, 0, - 0x01F1E9, 0x01F1F2, 0, - 0x01F1E9, 0x01F1F4, 0, - 0x01F1E9, 0x01F1FF, 0, - 0x01F1EA, 0x01F1E6, 0, - 0x01F1EA, 0x01F1E8, 0, - 0x01F1EA, 0x01F1EA, 0, - 0x01F1EA, 0x01F1EC, 0, - 0x01F1EA, 0x01F1ED, 0, - 0x01F1EA, 0x01F1F7, 0, - 0x01F1EA, 0x01F1F8, 0, - 0x01F1EA, 0x01F1F9, 0, - 0x01F1EA, 0x01F1FA, 0, - 0x01F1EB, 0x01F1EE, 0, - 0x01F1EB, 0x01F1EF, 0, - 0x01F1EB, 0x01F1F0, 0, - 0x01F1EB, 0x01F1F2, 0, - 0x01F1EB, 0x01F1F4, 0, - 0x01F1EB, 0x01F1F7, 0, - 0x01F1EC, 0x01F1E6, 0, - 0x01F1EC, 0x01F1E7, 0, - 0x01F1EC, 0x01F1E9, 0, - 0x01F1EC, 0x01F1EA, 0, - 0x01F1EC, 0x01F1EB, 0, - 0x01F1EC, 0x01F1EC, 0, - 0x01F1EC, 0x01F1ED, 0, - 0x01F1EC, 0x01F1EE, 0, - 0x01F1EC, 0x01F1F1, 0, - 0x01F1EC, 0x01F1F2, 0, - 0x01F1EC, 0x01F1F3, 0, - 0x01F1EC, 0x01F1F5, 0, - 0x01F1EC, 0x01F1F6, 0, - 0x01F1EC, 0x01F1F7, 0, - 0x01F1EC, 0x01F1F8, 0, - 0x01F1EC, 0x01F1F9, 0, - 0x01F1EC, 0x01F1FA, 0, - 0x01F1EC, 0x01F1FC, 0, - 0x01F1EC, 0x01F1FE, 0, - 0x01F1ED, 0x01F1F0, 0, - 0x01F1ED, 0x01F1F2, 0, - 0x01F1ED, 0x01F1F3, 0, - 0x01F1ED, 0x01F1F7, 0, - 0x01F1ED, 0x01F1F9, 0, - 0x01F1ED, 0x01F1FA, 0, - 0x01F1EE, 0x01F1E8, 0, - 0x01F1EE, 0x01F1E9, 0, - 0x01F1EE, 0x01F1EA, 0, - 0x01F1EE, 0x01F1F1, 0, - 0x01F1EE, 0x01F1F2, 0, - 0x01F1EE, 0x01F1F3, 0, - 0x01F1EE, 0x01F1F4, 0, - 0x01F1EE, 0x01F1F6, 0, - 0x01F1EE, 0x01F1F7, 0, - 0x01F1EE, 0x01F1F8, 0, - 0x01F1EE, 0x01F1F9, 0, - 0x01F1EF, 0x01F1EA, 0, - 0x01F1EF, 0x01F1F2, 0, - 0x01F1EF, 0x01F1F4, 0, - 0x01F1EF, 0x01F1F5, 0, - 0x01F1F0, 0x01F1EA, 0, - 0x01F1F0, 0x01F1EC, 0, - 0x01F1F0, 0x01F1ED, 0, - 0x01F1F0, 0x01F1EE, 0, - 0x01F1F0, 0x01F1F2, 0, - 0x01F1F0, 0x01F1F3, 0, - 0x01F1F0, 0x01F1F5, 0, - 0x01F1F0, 0x01F1F7, 0, - 0x01F1F0, 0x01F1FC, 0, - 0x01F1E6, 0x01F1E9, 0, - 0x01F1F0, 0x01F1FF, 0, - 0x01F1F1, 0x01F1E6, 0, - 0x01F1F1, 0x01F1E7, 0, - 0x01F1F1, 0x01F1E8, 0, - 0x01F1F1, 0x01F1EE, 0, - 0x01F1F1, 0x01F1F0, 0, - 0x01F1F1, 0x01F1F7, 0, - 0x01F1F1, 0x01F1F8, 0, - 0x01F1F1, 0x01F1F9, 0, - 0x01F1F1, 0x01F1FA, 0, - 0x01F1F1, 0x01F1FB, 0, - 0x01F1F1, 0x01F1FE, 0, - 0x01F1F2, 0x01F1E6, 0, - 0x01F1F2, 0x01F1E8, 0, - 0x01F1F2, 0x01F1E9, 0, - 0x01F1F2, 0x01F1EA, 0, - 0x01F1F2, 0x01F1EB, 0, - 0x01F1F2, 0x01F1EC, 0, - 0x01F1F2, 0x01F1ED, 0, - 0x01F1F2, 0x01F1F0, 0, - 0x01F1F2, 0x01F1F1, 0, - 0x01F1F2, 0x01F1F2, 0, - 0x01F1F2, 0x01F1F3, 0, - 0x01F1F2, 0x01F1F4, 0, - 0x01F1F2, 0x01F1F5, 0, - 0x01F1F2, 0x01F1F6, 0, - 0x01F1F2, 0x01F1F7, 0, - 0x01F1F2, 0x01F1F8, 0, - 0x01F1F2, 0x01F1F9, 0, - 0x01F1F2, 0x01F1FA, 0, - 0x01F1F2, 0x01F1FB, 0, - 0x01F1F2, 0x01F1FC, 0, - 0x01F1F2, 0x01F1FD, 0, - 0x01F1F2, 0x01F1FE, 0, - 0x01F1F2, 0x01F1FF, 0, - 0x01F1F3, 0x01F1E6, 0, - 0x01F1F3, 0x01F1E8, 0, - 0x01F1F3, 0x01F1EA, 0, - 0x01F1F3, 0x01F1EB, 0, - 0x01F1F3, 0x01F1EC, 0, - 0x01F1F3, 0x01F1EE, 0, - 0x01F1F3, 0x01F1F1, 0, - 0x01F1F3, 0x01F1F4, 0, - 0x01F1F3, 0x01F1F5, 0, - 0x01F1F3, 0x01F1F7, 0, - 0x01F1F3, 0x01F1FA, 0, - 0x01F1F3, 0x01F1FF, 0, - 0x01F1F4, 0x01F1F2, 0, - 0x01F1F5, 0x01F1E6, 0, - 0x01F1F5, 0x01F1EA, 0, - 0x01F1F5, 0x01F1EB, 0, - 0x01F1F5, 0x01F1EC, 0, - 0x01F1F5, 0x01F1ED, 0, - 0x01F1F5, 0x01F1F0, 0, - 0x01F1F5, 0x01F1F1, 0, - 0x01F1F5, 0x01F1F2, 0, - 0x01F1F5, 0x01F1F3, 0, - 0x01F1F5, 0x01F1F7, 0, - 0x01F1F5, 0x01F1F8, 0, - 0x01F1F5, 0x01F1F9, 0, - 0x01F1F5, 0x01F1FC, 0, - 0x01F1F5, 0x01F1FE, 0, - 0x01F1F6, 0x01F1E6, 0, - 0x01F1F7, 0x01F1EA, 0, - 0x01F1F7, 0x01F1F4, 0, - 0x01F1F7, 0x01F1F8, 0, - 0x01F1F7, 0x01F1FA, 0, - 0x01F1F7, 0x01F1FC, 0, - 0x01F1F8, 0x01F1E6, 0, - 0x01F1F8, 0x01F1E7, 0, - 0x01F1F8, 0x01F1E8, 0, - 0x01F1F8, 0x01F1E9, 0, - 0x01F1F8, 0x01F1EA, 0, - 0x01F1F8, 0x01F1EC, 0, - 0x01F1F8, 0x01F1ED, 0, - 0x01F1F8, 0x01F1EE, 0, - 0x01F1F8, 0x01F1EF, 0, - 0x01F1F8, 0x01F1F0, 0, - 0x01F1F8, 0x01F1F1, 0, - 0x01F1F8, 0x01F1F2, 0, - 0x01F1F8, 0x01F1F3, 0, - 0x01F1F8, 0x01F1F4, 0, - 0x01F1F8, 0x01F1F7, 0, - 0x01F1F8, 0x01F1F8, 0, - 0x01F1F8, 0x01F1F9, 0, - 0x01F1F8, 0x01F1FB, 0, - 0x01F1F8, 0x01F1FD, 0, - 0x01F1F8, 0x01F1FE, 0, - 0x01F1F8, 0x01F1FF, 0, - 0x01F1F9, 0x01F1E6, 0, - 0x01F1F9, 0x01F1E8, 0, - 0x01F1F9, 0x01F1E9, 0, - 0x01F1F9, 0x01F1EB, 0, - 0x01F1F9, 0x01F1EC, 0, - 0x01F1F9, 0x01F1ED, 0, - 0x01F1F9, 0x01F1EF, 0, - 0x01F1F9, 0x01F1F0, 0, - 0x01F1F9, 0x01F1F1, 0, - 0x01F1F9, 0x01F1F2, 0, - 0x01F1F9, 0x01F1F3, 0, - 0x01F1F9, 0x01F1F4, 0, - 0x01F1F9, 0x01F1F7, 0, - 0x01F1F9, 0x01F1F9, 0, - 0x01F1F9, 0x01F1FB, 0, - 0x01F1F9, 0x01F1FC, 0, - 0x01F1F9, 0x01F1FF, 0, - 0x01F1FA, 0x01F1E6, 0, - 0x01F1FA, 0x01F1EC, 0, - 0x01F1FA, 0x01F1F2, 0, - 0x01F1FA, 0x01F1F3, 0, - 0x01F1FA, 0x01F1F8, 0, - 0x01F1FA, 0x01F1FE, 0, - 0x01F1FA, 0x01F1FF, 0, - 0x01F1FB, 0x01F1E6, 0, - 0x01F1FB, 0x01F1E8, 0, - 0x01F1FB, 0x01F1EA, 0, - 0x01F1FB, 0x01F1EC, 0, - 0x01F1FB, 0x01F1EE, 0, - 0x01F1FB, 0x01F1F3, 0, - 0x01F1FB, 0x01F1FA, 0, - 0x01F1FC, 0x01F1EB, 0, - 0x01F1FC, 0x01F1F8, 0, - 0x01F1FD, 0x01F1F0, 0, - 0x01F1FE, 0x01F1EA, 0, - 0x01F1FE, 0x01F1F9, 0, - 0x01F1FF, 0x01F1E6, 0, - 0x01F1FF, 0x01F1F2, 0, - 0x01F1F0, 0x01F1FE, 0, - 0 // null-terminating the list -}; - -const base::uc32 UnicodePropertySequences::kEmojiTagSequences[] = { - 0x01F3F4, 0x0E0067, 0x0E0062, 0x0E0065, 0x0E006E, 0x0E0067, 0x0E007F, 0, - 0x01F3F4, 0x0E0067, 0x0E0062, 0x0E0073, 0x0E0063, 0x0E0074, 0x0E007F, 0, - 0x01F3F4, 0x0E0067, 0x0E0062, 0x0E0077, 0x0E006C, 0x0E0073, 0x0E007F, 0, - 0 // null-terminating the list -}; - -const base::uc32 UnicodePropertySequences::kEmojiZWJSequences[] = { - 0x01F468, 0x00200D, 0x002764, 0x00FE0F, 0x00200D, 0x01F468, 0, - 0x01F441, 0x00FE0F, 0x00200D, 0x01F5E8, 0x00FE0F, 0, - 0x01F468, 0x00200D, 0x01F466, 0, - 0x01F468, 0x00200D, 0x01F466, 0x00200D, 0x01F466, 0, - 0x01F468, 0x00200D, 0x01F467, 0, - 0x01F468, 0x00200D, 0x01F467, 0x00200D, 0x01F466, 0, - 0x01F468, 0x00200D, 0x01F467, 0x00200D, 0x01F467, 0, - 0x01F468, 0x00200D, 0x01F468, 0x00200D, 0x01F466, 0, - 0x01F468, 0x00200D, 0x01F468, 0x00200D, 0x01F466, 0x00200D, 0x01F466, 0, - 0x01F468, 0x00200D, 0x01F468, 0x00200D, 0x01F467, 0, - 0x01F468, 0x00200D, 0x01F468, 0x00200D, 0x01F467, 0x00200D, 0x01F466, 0, - 0x01F468, 0x00200D, 0x01F468, 0x00200D, 0x01F467, 0x00200D, 0x01F467, 0, - 0x01F468, 0x00200D, 0x01F469, 0x00200D, 0x01F466, 0, - 0x01F468, 0x00200D, 0x01F469, 0x00200D, 0x01F466, 0x00200D, 0x01F466, 0, - 0x01F468, 0x00200D, 0x01F469, 0x00200D, 0x01F467, 0, - 0x01F468, 0x00200D, 0x01F469, 0x00200D, 0x01F467, 0x00200D, 0x01F466, 0, - 0x01F468, 0x00200D, 0x01F469, 0x00200D, 0x01F467, 0x00200D, 0x01F467, 0, - 0x01F468, 0x01F3FC, 0x00200D, 0x01F91D, 0x00200D, 0x01F468, 0x01F3FB, 0, - 0x01F468, 0x01F3FD, 0x00200D, 0x01F91D, 0x00200D, 0x01F468, 0x01F3FB, 0, - 0x01F468, 0x01F3FD, 0x00200D, 0x01F91D, 0x00200D, 0x01F468, 0x01F3FC, 0, - 0x01F468, 0x01F3FE, 0x00200D, 0x01F91D, 0x00200D, 0x01F468, 0x01F3FB, 0, - 0x01F468, 0x01F3FE, 0x00200D, 0x01F91D, 0x00200D, 0x01F468, 0x01F3FC, 0, - 0x01F468, 0x01F3FE, 0x00200D, 0x01F91D, 0x00200D, 0x01F468, 0x01F3FD, 0, - 0x01F468, 0x01F3FF, 0x00200D, 0x01F91D, 0x00200D, 0x01F468, 0x01F3FB, 0, - 0x01F468, 0x01F3FF, 0x00200D, 0x01F91D, 0x00200D, 0x01F468, 0x01F3FC, 0, - 0x01F468, 0x01F3FF, 0x00200D, 0x01F91D, 0x00200D, 0x01F468, 0x01F3FD, 0, - 0x01F468, 0x01F3FF, 0x00200D, 0x01F91D, 0x00200D, 0x01F468, 0x01F3FE, 0, - 0x01F469, 0x00200D, 0x002764, 0x00FE0F, 0x00200D, 0x01F468, 0, - 0x01F469, 0x00200D, 0x002764, 0x00FE0F, 0x00200D, 0x01F469, 0, - 0x01F469, 0x00200D, 0x002764, 0x00FE0F, 0x00200D, 0x01F48B, 0x00200D, - 0x01F468, 0, - 0x01F469, 0x00200D, 0x002764, 0x00FE0F, 0x00200D, 0x01F48B, 0x00200D, - 0x01F469, 0, - 0x01F469, 0x00200D, 0x01F466, 0, - 0x01F469, 0x00200D, 0x01F466, 0x00200D, 0x01F466, 0, - 0x01F469, 0x00200D, 0x01F467, 0, - 0x01F469, 0x00200D, 0x01F467, 0x00200D, 0x01F466, 0, - 0x01F469, 0x00200D, 0x01F467, 0x00200D, 0x01F467, 0, - 0x01F469, 0x00200D, 0x01F469, 0x00200D, 0x01F466, 0, - 0x01F469, 0x00200D, 0x01F469, 0x00200D, 0x01F466, 0x00200D, 0x01F466, 0, - 0x01F469, 0x00200D, 0x01F469, 0x00200D, 0x01F467, 0, - 0x01F469, 0x00200D, 0x01F469, 0x00200D, 0x01F467, 0x00200D, 0x01F466, 0, - 0x01F469, 0x00200D, 0x01F469, 0x00200D, 0x01F467, 0x00200D, 0x01F467, 0, - 0x01F469, 0x01F3FB, 0x00200D, 0x01F91D, 0x00200D, 0x01F468, 0x01F3FC, 0, - 0x01F469, 0x01F3FB, 0x00200D, 0x01F91D, 0x00200D, 0x01F468, 0x01F3FD, 0, - 0x01F469, 0x01F3FB, 0x00200D, 0x01F91D, 0x00200D, 0x01F468, 0x01F3FE, 0, - 0x01F469, 0x01F3FB, 0x00200D, 0x01F91D, 0x00200D, 0x01F468, 0x01F3FF, 0, - 0x01F469, 0x01F3FC, 0x00200D, 0x01F91D, 0x00200D, 0x01F468, 0x01F3FB, 0, - 0x01F469, 0x01F3FC, 0x00200D, 0x01F91D, 0x00200D, 0x01F468, 0x01F3FD, 0, - 0x01F469, 0x01F3FC, 0x00200D, 0x01F91D, 0x00200D, 0x01F468, 0x01F3FE, 0, - 0x01F469, 0x01F3FC, 0x00200D, 0x01F91D, 0x00200D, 0x01F468, 0x01F3FF, 0, - 0x01F469, 0x01F3FC, 0x00200D, 0x01F91D, 0x00200D, 0x01F469, 0x01F3FB, 0, - 0x01F469, 0x01F3FD, 0x00200D, 0x01F91D, 0x00200D, 0x01F468, 0x01F3FB, 0, - 0x01F469, 0x01F3FD, 0x00200D, 0x01F91D, 0x00200D, 0x01F468, 0x01F3FC, 0, - 0x01F469, 0x01F3FD, 0x00200D, 0x01F91D, 0x00200D, 0x01F468, 0x01F3FE, 0, - 0x01F469, 0x01F3FD, 0x00200D, 0x01F91D, 0x00200D, 0x01F468, 0x01F3FF, 0, - 0x01F469, 0x01F3FD, 0x00200D, 0x01F91D, 0x00200D, 0x01F469, 0x01F3FB, 0, - 0x01F469, 0x01F3FD, 0x00200D, 0x01F91D, 0x00200D, 0x01F469, 0x01F3FC, 0, - 0x01F469, 0x01F3FE, 0x00200D, 0x01F91D, 0x00200D, 0x01F468, 0x01F3FB, 0, - 0x01F469, 0x01F3FE, 0x00200D, 0x01F91D, 0x00200D, 0x01F468, 0x01F3FC, 0, - 0x01F469, 0x01F3FE, 0x00200D, 0x01F91D, 0x00200D, 0x01F468, 0x01F3FD, 0, - 0x01F469, 0x01F3FE, 0x00200D, 0x01F91D, 0x00200D, 0x01F468, 0x01F3FF, 0, - 0x01F469, 0x01F3FE, 0x00200D, 0x01F91D, 0x00200D, 0x01F469, 0x01F3FB, 0, - 0x01F469, 0x01F3FE, 0x00200D, 0x01F91D, 0x00200D, 0x01F469, 0x01F3FC, 0, - 0x01F469, 0x01F3FE, 0x00200D, 0x01F91D, 0x00200D, 0x01F469, 0x01F3FD, 0, - 0x01F469, 0x01F3FF, 0x00200D, 0x01F91D, 0x00200D, 0x01F468, 0x01F3FB, 0, - 0x01F469, 0x01F3FF, 0x00200D, 0x01F91D, 0x00200D, 0x01F468, 0x01F3FC, 0, - 0x01F469, 0x01F3FF, 0x00200D, 0x01F91D, 0x00200D, 0x01F468, 0x01F3FD, 0, - 0x01F469, 0x01F3FF, 0x00200D, 0x01F91D, 0x00200D, 0x01F468, 0x01F3FE, 0, - 0x01F469, 0x01F3FF, 0x00200D, 0x01F91D, 0x00200D, 0x01F469, 0x01F3FB, 0, - 0x01F469, 0x01F3FF, 0x00200D, 0x01F91D, 0x00200D, 0x01F469, 0x01F3FC, 0, - 0x01F469, 0x01F3FF, 0x00200D, 0x01F91D, 0x00200D, 0x01F469, 0x01F3FD, 0, - 0x01F469, 0x01F3FF, 0x00200D, 0x01F91D, 0x00200D, 0x01F469, 0x01F3FE, 0, - 0x01F9D1, 0x00200D, 0x01F91D, 0x00200D, 0x01F9D1, 0, - 0x01F9D1, 0x01F3FB, 0x00200D, 0x01F91D, 0x00200D, 0x01F9D1, 0x01F3FB, 0, - 0x01F9D1, 0x01F3FC, 0x00200D, 0x01F91D, 0x00200D, 0x01F9D1, 0x01F3FB, 0, - 0x01F9D1, 0x01F3FC, 0x00200D, 0x01F91D, 0x00200D, 0x01F9D1, 0x01F3FC, 0, - 0x01F9D1, 0x01F3FD, 0x00200D, 0x01F91D, 0x00200D, 0x01F9D1, 0x01F3FB, 0, - 0x01F9D1, 0x01F3FD, 0x00200D, 0x01F91D, 0x00200D, 0x01F9D1, 0x01F3FC, 0, - 0x01F9D1, 0x01F3FD, 0x00200D, 0x01F91D, 0x00200D, 0x01F9D1, 0x01F3FD, 0, - 0x01F9D1, 0x01F3FE, 0x00200D, 0x01F91D, 0x00200D, 0x01F9D1, 0x01F3FB, 0, - 0x01F9D1, 0x01F3FE, 0x00200D, 0x01F91D, 0x00200D, 0x01F9D1, 0x01F3FC, 0, - 0x01F9D1, 0x01F3FE, 0x00200D, 0x01F91D, 0x00200D, 0x01F9D1, 0x01F3FD, 0, - 0x01F9D1, 0x01F3FE, 0x00200D, 0x01F91D, 0x00200D, 0x01F9D1, 0x01F3FE, 0, - 0x01F9D1, 0x01F3FF, 0x00200D, 0x01F91D, 0x00200D, 0x01F9D1, 0x01F3FB, 0, - 0x01F9D1, 0x01F3FF, 0x00200D, 0x01F91D, 0x00200D, 0x01F9D1, 0x01F3FC, 0, - 0x01F9D1, 0x01F3FF, 0x00200D, 0x01F91D, 0x00200D, 0x01F9D1, 0x01F3FD, 0, - 0x01F9D1, 0x01F3FF, 0x00200D, 0x01F91D, 0x00200D, 0x01F9D1, 0x01F3FE, 0, - 0x01F9D1, 0x01F3FF, 0x00200D, 0x01F91D, 0x00200D, 0x01F9D1, 0x01F3FF, 0, - 0x01F468, 0x00200D, 0x002695, 0x00FE0F, 0, - 0x01F468, 0x00200D, 0x002696, 0x00FE0F, 0, - 0x01F468, 0x00200D, 0x002708, 0x00FE0F, 0, - 0x01F468, 0x00200D, 0x01F33E, 0, - 0x01F468, 0x00200D, 0x01F373, 0, - 0x01F468, 0x00200D, 0x01F393, 0, - 0x01F468, 0x00200D, 0x01F3A4, 0, - 0x01F468, 0x00200D, 0x01F3A8, 0, - 0x01F468, 0x00200D, 0x01F3EB, 0, - 0x01F468, 0x00200D, 0x01F3ED, 0, - 0x01F468, 0x00200D, 0x01F4BB, 0, - 0x01F468, 0x00200D, 0x01F4BC, 0, - 0x01F468, 0x00200D, 0x01F527, 0, - 0x01F468, 0x00200D, 0x01F52C, 0, - 0x01F468, 0x00200D, 0x01F680, 0, - 0x01F468, 0x00200D, 0x01F692, 0, - 0x01F468, 0x00200D, 0x01F9AF, 0, - 0x01F468, 0x00200D, 0x01F9BC, 0, - 0x01F468, 0x00200D, 0x01F9BD, 0, - 0x01F468, 0x01F3FB, 0x00200D, 0x002695, 0x00FE0F, 0, - 0x01F468, 0x01F3FB, 0x00200D, 0x002696, 0x00FE0F, 0, - 0x01F468, 0x01F3FB, 0x00200D, 0x002708, 0x00FE0F, 0, - 0x01F468, 0x01F3FB, 0x00200D, 0x01F33E, 0, - 0x01F468, 0x01F3FB, 0x00200D, 0x01F373, 0, - 0x01F468, 0x01F3FB, 0x00200D, 0x01F393, 0, - 0x01F468, 0x01F3FB, 0x00200D, 0x01F3A4, 0, - 0x01F468, 0x01F3FB, 0x00200D, 0x01F3A8, 0, - 0x01F468, 0x01F3FB, 0x00200D, 0x01F3EB, 0, - 0x01F468, 0x01F3FB, 0x00200D, 0x01F3ED, 0, - 0x01F468, 0x01F3FB, 0x00200D, 0x01F4BB, 0, - 0x01F468, 0x01F3FB, 0x00200D, 0x01F4BC, 0, - 0x01F468, 0x01F3FB, 0x00200D, 0x01F527, 0, - 0x01F468, 0x01F3FB, 0x00200D, 0x01F52C, 0, - 0x01F468, 0x01F3FB, 0x00200D, 0x01F680, 0, - 0x01F468, 0x01F3FB, 0x00200D, 0x01F692, 0, - 0x01F468, 0x01F3FB, 0x00200D, 0x01F9AF, 0, - 0x01F468, 0x01F3FB, 0x00200D, 0x01F9BC, 0, - 0x01F468, 0x01F3FB, 0x00200D, 0x01F9BD, 0, - 0x01F468, 0x01F3FC, 0x00200D, 0x002695, 0x00FE0F, 0, - 0x01F468, 0x01F3FC, 0x00200D, 0x002696, 0x00FE0F, 0, - 0x01F468, 0x01F3FC, 0x00200D, 0x002708, 0x00FE0F, 0, - 0x01F468, 0x01F3FC, 0x00200D, 0x01F33E, 0, - 0x01F468, 0x01F3FC, 0x00200D, 0x01F373, 0, - 0x01F468, 0x01F3FC, 0x00200D, 0x01F393, 0, - 0x01F468, 0x01F3FC, 0x00200D, 0x01F3A4, 0, - 0x01F468, 0x01F3FC, 0x00200D, 0x01F3A8, 0, - 0x01F468, 0x01F3FC, 0x00200D, 0x01F3EB, 0, - 0x01F468, 0x01F3FC, 0x00200D, 0x01F3ED, 0, - 0x01F468, 0x01F3FC, 0x00200D, 0x01F4BB, 0, - 0x01F468, 0x01F3FC, 0x00200D, 0x01F4BC, 0, - 0x01F468, 0x01F3FC, 0x00200D, 0x01F527, 0, - 0x01F468, 0x01F3FC, 0x00200D, 0x01F52C, 0, - 0x01F468, 0x01F3FC, 0x00200D, 0x01F680, 0, - 0x01F468, 0x01F3FC, 0x00200D, 0x01F692, 0, - 0x01F468, 0x01F3FC, 0x00200D, 0x01F9AF, 0, - 0x01F468, 0x01F3FC, 0x00200D, 0x01F9BC, 0, - 0x01F468, 0x01F3FC, 0x00200D, 0x01F9BD, 0, - 0x01F468, 0x01F3FD, 0x00200D, 0x002695, 0x00FE0F, 0, - 0x01F468, 0x01F3FD, 0x00200D, 0x002696, 0x00FE0F, 0, - 0x01F468, 0x01F3FD, 0x00200D, 0x002708, 0x00FE0F, 0, - 0x01F468, 0x01F3FD, 0x00200D, 0x01F33E, 0, - 0x01F468, 0x01F3FD, 0x00200D, 0x01F373, 0, - 0x01F468, 0x01F3FD, 0x00200D, 0x01F393, 0, - 0x01F468, 0x01F3FD, 0x00200D, 0x01F3A4, 0, - 0x01F468, 0x01F3FD, 0x00200D, 0x01F3A8, 0, - 0x01F468, 0x01F3FD, 0x00200D, 0x01F3EB, 0, - 0x01F468, 0x01F3FD, 0x00200D, 0x01F3ED, 0, - 0x01F468, 0x01F3FD, 0x00200D, 0x01F4BB, 0, - 0x01F468, 0x01F3FD, 0x00200D, 0x01F4BC, 0, - 0x01F468, 0x01F3FD, 0x00200D, 0x01F527, 0, - 0x01F468, 0x01F3FD, 0x00200D, 0x01F52C, 0, - 0x01F468, 0x01F3FD, 0x00200D, 0x01F680, 0, - 0x01F468, 0x01F3FD, 0x00200D, 0x01F692, 0, - 0x01F468, 0x01F3FD, 0x00200D, 0x01F9AF, 0, - 0x01F468, 0x01F3FD, 0x00200D, 0x01F9BC, 0, - 0x01F468, 0x01F3FD, 0x00200D, 0x01F9BD, 0, - 0x01F468, 0x01F3FE, 0x00200D, 0x002695, 0x00FE0F, 0, - 0x01F468, 0x01F3FE, 0x00200D, 0x002696, 0x00FE0F, 0, - 0x01F468, 0x01F3FE, 0x00200D, 0x002708, 0x00FE0F, 0, - 0x01F468, 0x01F3FE, 0x00200D, 0x01F33E, 0, - 0x01F468, 0x01F3FE, 0x00200D, 0x01F373, 0, - 0x01F468, 0x01F3FE, 0x00200D, 0x01F393, 0, - 0x01F468, 0x01F3FE, 0x00200D, 0x01F3A4, 0, - 0x01F468, 0x01F3FE, 0x00200D, 0x01F3A8, 0, - 0x01F468, 0x01F3FE, 0x00200D, 0x01F3EB, 0, - 0x01F468, 0x01F3FE, 0x00200D, 0x01F3ED, 0, - 0x01F468, 0x01F3FE, 0x00200D, 0x01F4BB, 0, - 0x01F468, 0x01F3FE, 0x00200D, 0x01F4BC, 0, - 0x01F468, 0x01F3FE, 0x00200D, 0x01F527, 0, - 0x01F468, 0x01F3FE, 0x00200D, 0x01F52C, 0, - 0x01F468, 0x01F3FE, 0x00200D, 0x01F680, 0, - 0x01F468, 0x01F3FE, 0x00200D, 0x01F692, 0, - 0x01F468, 0x01F3FE, 0x00200D, 0x01F9AF, 0, - 0x01F468, 0x01F3FE, 0x00200D, 0x01F9BC, 0, - 0x01F468, 0x01F3FE, 0x00200D, 0x01F9BD, 0, - 0x01F468, 0x01F3FF, 0x00200D, 0x002695, 0x00FE0F, 0, - 0x01F468, 0x01F3FF, 0x00200D, 0x002696, 0x00FE0F, 0, - 0x01F468, 0x01F3FF, 0x00200D, 0x002708, 0x00FE0F, 0, - 0x01F468, 0x01F3FF, 0x00200D, 0x01F33E, 0, - 0x01F468, 0x01F3FF, 0x00200D, 0x01F373, 0, - 0x01F468, 0x01F3FF, 0x00200D, 0x01F393, 0, - 0x01F468, 0x01F3FF, 0x00200D, 0x01F3A4, 0, - 0x01F468, 0x01F3FF, 0x00200D, 0x01F3A8, 0, - 0x01F468, 0x01F3FF, 0x00200D, 0x01F3EB, 0, - 0x01F468, 0x01F3FF, 0x00200D, 0x01F3ED, 0, - 0x01F468, 0x01F3FF, 0x00200D, 0x01F4BB, 0, - 0x01F468, 0x01F3FF, 0x00200D, 0x01F4BC, 0, - 0x01F468, 0x01F3FF, 0x00200D, 0x01F527, 0, - 0x01F468, 0x01F3FF, 0x00200D, 0x01F52C, 0, - 0x01F468, 0x01F3FF, 0x00200D, 0x01F680, 0, - 0x01F468, 0x01F3FF, 0x00200D, 0x01F692, 0, - 0x01F468, 0x01F3FF, 0x00200D, 0x01F9AF, 0, - 0x01F468, 0x01F3FF, 0x00200D, 0x01F9BC, 0, - 0x01F468, 0x01F3FF, 0x00200D, 0x01F9BD, 0, - 0x01F469, 0x00200D, 0x002695, 0x00FE0F, 0, - 0x01F469, 0x00200D, 0x002696, 0x00FE0F, 0, - 0x01F469, 0x00200D, 0x002708, 0x00FE0F, 0, - 0x01F469, 0x00200D, 0x01F33E, 0, - 0x01F469, 0x00200D, 0x01F373, 0, - 0x01F469, 0x00200D, 0x01F393, 0, - 0x01F469, 0x00200D, 0x01F3A4, 0, - 0x01F469, 0x00200D, 0x01F3A8, 0, - 0x01F469, 0x00200D, 0x01F3EB, 0, - 0x01F469, 0x00200D, 0x01F3ED, 0, - 0x01F469, 0x00200D, 0x01F4BB, 0, - 0x01F469, 0x00200D, 0x01F4BC, 0, - 0x01F469, 0x00200D, 0x01F527, 0, - 0x01F469, 0x00200D, 0x01F52C, 0, - 0x01F469, 0x00200D, 0x01F680, 0, - 0x01F469, 0x00200D, 0x01F692, 0, - 0x01F469, 0x00200D, 0x01F9AF, 0, - 0x01F469, 0x00200D, 0x01F9BC, 0, - 0x01F469, 0x00200D, 0x01F9BD, 0, - 0x01F469, 0x01F3FB, 0x00200D, 0x002695, 0x00FE0F, 0, - 0x01F469, 0x01F3FB, 0x00200D, 0x002696, 0x00FE0F, 0, - 0x01F469, 0x01F3FB, 0x00200D, 0x002708, 0x00FE0F, 0, - 0x01F469, 0x01F3FB, 0x00200D, 0x01F33E, 0, - 0x01F469, 0x01F3FB, 0x00200D, 0x01F373, 0, - 0x01F469, 0x01F3FB, 0x00200D, 0x01F393, 0, - 0x01F469, 0x01F3FB, 0x00200D, 0x01F3A4, 0, - 0x01F469, 0x01F3FB, 0x00200D, 0x01F3A8, 0, - 0x01F469, 0x01F3FB, 0x00200D, 0x01F3EB, 0, - 0x01F469, 0x01F3FB, 0x00200D, 0x01F3ED, 0, - 0x01F469, 0x01F3FB, 0x00200D, 0x01F4BB, 0, - 0x01F469, 0x01F3FB, 0x00200D, 0x01F4BC, 0, - 0x01F469, 0x01F3FB, 0x00200D, 0x01F527, 0, - 0x01F469, 0x01F3FB, 0x00200D, 0x01F52C, 0, - 0x01F469, 0x01F3FB, 0x00200D, 0x01F680, 0, - 0x01F469, 0x01F3FB, 0x00200D, 0x01F692, 0, - 0x01F469, 0x01F3FB, 0x00200D, 0x01F9AF, 0, - 0x01F469, 0x01F3FB, 0x00200D, 0x01F9BC, 0, - 0x01F469, 0x01F3FB, 0x00200D, 0x01F9BD, 0, - 0x01F469, 0x01F3FC, 0x00200D, 0x002695, 0x00FE0F, 0, - 0x01F469, 0x01F3FC, 0x00200D, 0x002696, 0x00FE0F, 0, - 0x01F469, 0x01F3FC, 0x00200D, 0x002708, 0x00FE0F, 0, - 0x01F469, 0x01F3FC, 0x00200D, 0x01F33E, 0, - 0x01F469, 0x01F3FC, 0x00200D, 0x01F373, 0, - 0x01F469, 0x01F3FC, 0x00200D, 0x01F393, 0, - 0x01F469, 0x01F3FC, 0x00200D, 0x01F3A4, 0, - 0x01F469, 0x01F3FC, 0x00200D, 0x01F3A8, 0, - 0x01F469, 0x01F3FC, 0x00200D, 0x01F3EB, 0, - 0x01F469, 0x01F3FC, 0x00200D, 0x01F3ED, 0, - 0x01F469, 0x01F3FC, 0x00200D, 0x01F4BB, 0, - 0x01F469, 0x01F3FC, 0x00200D, 0x01F4BC, 0, - 0x01F469, 0x01F3FC, 0x00200D, 0x01F527, 0, - 0x01F469, 0x01F3FC, 0x00200D, 0x01F52C, 0, - 0x01F469, 0x01F3FC, 0x00200D, 0x01F680, 0, - 0x01F469, 0x01F3FC, 0x00200D, 0x01F692, 0, - 0x01F469, 0x01F3FC, 0x00200D, 0x01F9AF, 0, - 0x01F469, 0x01F3FC, 0x00200D, 0x01F9BC, 0, - 0x01F469, 0x01F3FC, 0x00200D, 0x01F9BD, 0, - 0x01F469, 0x01F3FD, 0x00200D, 0x002695, 0x00FE0F, 0, - 0x01F469, 0x01F3FD, 0x00200D, 0x002696, 0x00FE0F, 0, - 0x01F469, 0x01F3FD, 0x00200D, 0x002708, 0x00FE0F, 0, - 0x01F469, 0x01F3FD, 0x00200D, 0x01F33E, 0, - 0x01F469, 0x01F3FD, 0x00200D, 0x01F373, 0, - 0x01F469, 0x01F3FD, 0x00200D, 0x01F393, 0, - 0x01F469, 0x01F3FD, 0x00200D, 0x01F3A4, 0, - 0x01F469, 0x01F3FD, 0x00200D, 0x01F3A8, 0, - 0x01F469, 0x01F3FD, 0x00200D, 0x01F3EB, 0, - 0x01F469, 0x01F3FD, 0x00200D, 0x01F3ED, 0, - 0x01F469, 0x01F3FD, 0x00200D, 0x01F4BB, 0, - 0x01F469, 0x01F3FD, 0x00200D, 0x01F4BC, 0, - 0x01F469, 0x01F3FD, 0x00200D, 0x01F527, 0, - 0x01F469, 0x01F3FD, 0x00200D, 0x01F52C, 0, - 0x01F469, 0x01F3FD, 0x00200D, 0x01F680, 0, - 0x01F469, 0x01F3FD, 0x00200D, 0x01F692, 0, - 0x01F469, 0x01F3FD, 0x00200D, 0x01F9AF, 0, - 0x01F469, 0x01F3FD, 0x00200D, 0x01F9BC, 0, - 0x01F469, 0x01F3FD, 0x00200D, 0x01F9BD, 0, - 0x01F469, 0x01F3FE, 0x00200D, 0x002695, 0x00FE0F, 0, - 0x01F469, 0x01F3FE, 0x00200D, 0x002696, 0x00FE0F, 0, - 0x01F469, 0x01F3FE, 0x00200D, 0x002708, 0x00FE0F, 0, - 0x01F469, 0x01F3FE, 0x00200D, 0x01F33E, 0, - 0x01F469, 0x01F3FE, 0x00200D, 0x01F373, 0, - 0x01F469, 0x01F3FE, 0x00200D, 0x01F393, 0, - 0x01F469, 0x01F3FE, 0x00200D, 0x01F3A4, 0, - 0x01F469, 0x01F3FE, 0x00200D, 0x01F3A8, 0, - 0x01F469, 0x01F3FE, 0x00200D, 0x01F3EB, 0, - 0x01F469, 0x01F3FE, 0x00200D, 0x01F3ED, 0, - 0x01F469, 0x01F3FE, 0x00200D, 0x01F4BB, 0, - 0x01F469, 0x01F3FE, 0x00200D, 0x01F4BC, 0, - 0x01F469, 0x01F3FE, 0x00200D, 0x01F527, 0, - 0x01F469, 0x01F3FE, 0x00200D, 0x01F52C, 0, - 0x01F469, 0x01F3FE, 0x00200D, 0x01F680, 0, - 0x01F469, 0x01F3FE, 0x00200D, 0x01F692, 0, - 0x01F469, 0x01F3FE, 0x00200D, 0x01F9AF, 0, - 0x01F469, 0x01F3FE, 0x00200D, 0x01F9BC, 0, - 0x01F469, 0x01F3FE, 0x00200D, 0x01F9BD, 0, - 0x01F469, 0x01F3FF, 0x00200D, 0x002695, 0x00FE0F, 0, - 0x01F469, 0x01F3FF, 0x00200D, 0x002696, 0x00FE0F, 0, - 0x01F469, 0x01F3FF, 0x00200D, 0x002708, 0x00FE0F, 0, - 0x01F469, 0x01F3FF, 0x00200D, 0x01F33E, 0, - 0x01F469, 0x01F3FF, 0x00200D, 0x01F373, 0, - 0x01F469, 0x01F3FF, 0x00200D, 0x01F393, 0, - 0x01F469, 0x01F3FF, 0x00200D, 0x01F3A4, 0, - 0x01F469, 0x01F3FF, 0x00200D, 0x01F3A8, 0, - 0x01F469, 0x01F3FF, 0x00200D, 0x01F3EB, 0, - 0x01F469, 0x01F3FF, 0x00200D, 0x01F3ED, 0, - 0x01F469, 0x01F3FF, 0x00200D, 0x01F4BB, 0, - 0x01F469, 0x01F3FF, 0x00200D, 0x01F4BC, 0, - 0x01F469, 0x01F3FF, 0x00200D, 0x01F527, 0, - 0x01F469, 0x01F3FF, 0x00200D, 0x01F52C, 0, - 0x01F469, 0x01F3FF, 0x00200D, 0x01F680, 0, - 0x01F469, 0x01F3FF, 0x00200D, 0x01F692, 0, - 0x01F469, 0x01F3FF, 0x00200D, 0x01F9AF, 0, - 0x01F469, 0x01F3FF, 0x00200D, 0x01F9BC, 0, - 0x01F469, 0x01F3FF, 0x00200D, 0x01F9BD, 0, - 0x0026F9, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x0026F9, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x0026F9, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x0026F9, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x0026F9, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x0026F9, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x0026F9, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x0026F9, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x0026F9, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x0026F9, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x0026F9, 0x00FE0F, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x0026F9, 0x00FE0F, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F3C3, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F3C3, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F3C3, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F3C3, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F3C3, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F3C3, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F3C3, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F3C3, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F3C3, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F3C3, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F3C3, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F3C3, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F3C4, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F3C4, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F3C4, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F3C4, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F3C4, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F3C4, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F3C4, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F3C4, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F3C4, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F3C4, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F3C4, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F3C4, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F3CA, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F3CA, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F3CA, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F3CA, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F3CA, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F3CA, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F3CA, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F3CA, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F3CA, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F3CA, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F3CA, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F3CA, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F3CB, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F3CB, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F3CB, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F3CB, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F3CB, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F3CB, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F3CB, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F3CB, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F3CB, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F3CB, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F3CB, 0x00FE0F, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F3CB, 0x00FE0F, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F3CC, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F3CC, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F3CC, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F3CC, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F3CC, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F3CC, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F3CC, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F3CC, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F3CC, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F3CC, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F3CC, 0x00FE0F, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F3CC, 0x00FE0F, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F46E, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F46E, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F46E, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F46E, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F46E, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F46E, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F46E, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F46E, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F46E, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F46E, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F46E, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F46E, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F46F, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F46F, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F471, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F471, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F471, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F471, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F471, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F471, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F471, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F471, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F471, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F471, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F471, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F471, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F473, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F473, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F473, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F473, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F473, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F473, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F473, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F473, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F473, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F473, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F473, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F473, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F477, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F477, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F477, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F477, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F477, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F477, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F477, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F477, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F477, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F477, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F477, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F477, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F481, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F481, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F481, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F481, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F481, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F481, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F481, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F481, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F481, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F481, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F481, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F481, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F482, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F482, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F468, 0x00200D, 0x002764, 0x00FE0F, 0x00200D, 0x01F48B, 0x00200D, - 0x01F468, 0, - 0x01F482, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F482, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F482, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F482, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F482, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F482, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F482, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F482, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F482, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F486, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F486, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F486, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F486, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F486, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F486, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F486, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F486, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F486, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F486, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F486, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F486, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F487, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F487, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F487, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F487, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F487, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F487, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F487, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F487, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F487, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F487, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F487, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F487, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F575, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F575, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F575, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F575, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F575, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F575, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F575, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F575, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F575, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F575, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F575, 0x00FE0F, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F575, 0x00FE0F, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F645, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F645, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F645, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F645, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F645, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F645, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F645, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F645, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F645, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F645, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F645, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F645, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F646, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F646, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F646, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F646, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F646, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F646, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F646, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F646, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F646, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F646, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F646, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F646, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F647, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F647, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F647, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F647, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F647, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F647, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F647, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F647, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F647, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F647, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F647, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F647, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F64B, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F64B, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F64B, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F64B, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F64B, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F64B, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F64B, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F64B, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F64B, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F64B, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F64B, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F64B, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F64D, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F64D, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F64D, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F64D, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F64D, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F64D, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F64D, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F64D, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F64D, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F64D, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F64D, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F64D, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F64E, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F64E, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F64E, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F64E, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F64E, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F64E, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F64E, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F64E, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F64E, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F64E, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F64E, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F64E, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F6A3, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F6A3, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F6A3, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F6A3, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F6A3, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F6A3, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F6A3, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F6A3, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F6A3, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F6A3, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F6A3, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F6A3, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F6B4, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F6B4, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F6B4, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F6B4, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F6B4, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F6B4, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F6B4, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F6B4, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F6B4, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F6B4, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F6B4, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F6B4, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F6B5, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F6B5, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F6B5, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F6B5, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F6B5, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F6B5, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F6B5, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F6B5, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F6B5, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F6B5, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F6B5, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F6B5, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F6B6, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F6B6, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F6B6, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F6B6, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F6B6, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F6B6, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F6B6, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F6B6, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F6B6, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F6B6, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F6B6, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F6B6, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F926, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F926, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F926, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F926, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F926, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F926, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F926, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F926, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F926, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F926, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F926, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F926, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F937, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F937, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F937, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F937, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F937, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F937, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F937, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F937, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F937, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F937, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F937, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F937, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F938, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F938, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F938, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F938, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F938, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F938, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F938, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F938, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F938, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F938, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F938, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F938, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F939, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F939, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F939, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F939, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F939, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F939, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F939, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F939, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F939, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F939, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F939, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F939, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F93C, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F93C, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F93D, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F93D, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F93D, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F93D, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F93D, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F93D, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F93D, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F93D, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F93D, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F93D, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F93D, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F93D, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F93E, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F93E, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F93E, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F93E, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F93E, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F93E, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F93E, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F93E, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F93E, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F93E, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F93E, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F93E, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9B8, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9B8, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9B8, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9B8, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9B8, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9B8, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9B8, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9B8, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9B8, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9B8, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9B8, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9B8, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9B9, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9B9, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9B9, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9B9, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9B9, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9B9, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9B9, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9B9, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9B9, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9B9, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9B9, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9B9, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9CD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9CD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9CD, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9CD, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9CD, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9CD, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9CD, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9CD, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9CD, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9CD, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9CD, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9CD, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9CE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9CE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9CE, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9CE, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9CE, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9CE, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9CE, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9CE, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9CE, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9CE, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9CE, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9CE, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9CF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9CF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9CF, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9CF, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9CF, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9CF, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9CF, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9CF, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9CF, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9CF, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9CF, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9CF, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9D6, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9D6, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9D6, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9D6, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9D6, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9D6, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9D6, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9D6, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9D6, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9D6, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9D6, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9D6, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9D7, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9D7, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9D7, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9D7, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9D7, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9D7, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9D7, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9D7, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9D7, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9D7, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9D7, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9D7, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9D8, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9D8, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9D8, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9D8, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9D8, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9D8, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9D8, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9D8, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9D8, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9D8, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9D8, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9D8, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9D9, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9D9, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9D9, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9D9, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9D9, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9D9, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9D9, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9D9, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9D9, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9D9, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9D9, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9D9, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9DA, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9DA, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9DA, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9DA, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9DA, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9DA, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9DA, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9DA, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9DA, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9DA, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9DA, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9DA, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9DB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9DB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9DB, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9DB, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9DB, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9DB, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9DB, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9DB, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9DB, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9DB, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9DB, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9DB, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9DC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9DC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9DC, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9DC, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9DC, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9DC, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9DC, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9DC, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9DC, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9DC, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9DC, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9DC, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9DD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9DD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9DD, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9DD, 0x01F3FB, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9DD, 0x01F3FC, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9DD, 0x01F3FC, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9DD, 0x01F3FD, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9DD, 0x01F3FD, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9DD, 0x01F3FE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9DD, 0x01F3FE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9DD, 0x01F3FF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9DD, 0x01F3FF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9DE, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9DE, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F9DF, 0x00200D, 0x002640, 0x00FE0F, 0, - 0x01F9DF, 0x00200D, 0x002642, 0x00FE0F, 0, - 0x01F468, 0x00200D, 0x01F9B0, 0, - 0x01F468, 0x00200D, 0x01F9B1, 0, - 0x01F468, 0x00200D, 0x01F9B2, 0, - 0x01F468, 0x00200D, 0x01F9B3, 0, - 0x01F468, 0x01F3FB, 0x00200D, 0x01F9B0, 0, - 0x01F468, 0x01F3FB, 0x00200D, 0x01F9B1, 0, - 0x01F468, 0x01F3FB, 0x00200D, 0x01F9B2, 0, - 0x01F468, 0x01F3FB, 0x00200D, 0x01F9B3, 0, - 0x01F468, 0x01F3FC, 0x00200D, 0x01F9B0, 0, - 0x01F468, 0x01F3FC, 0x00200D, 0x01F9B1, 0, - 0x01F468, 0x01F3FC, 0x00200D, 0x01F9B2, 0, - 0x01F468, 0x01F3FC, 0x00200D, 0x01F9B3, 0, - 0x01F468, 0x01F3FD, 0x00200D, 0x01F9B0, 0, - 0x01F468, 0x01F3FD, 0x00200D, 0x01F9B1, 0, - 0x01F468, 0x01F3FD, 0x00200D, 0x01F9B2, 0, - 0x01F468, 0x01F3FD, 0x00200D, 0x01F9B3, 0, - 0x01F468, 0x01F3FE, 0x00200D, 0x01F9B0, 0, - 0x01F468, 0x01F3FE, 0x00200D, 0x01F9B1, 0, - 0x01F468, 0x01F3FE, 0x00200D, 0x01F9B2, 0, - 0x01F468, 0x01F3FE, 0x00200D, 0x01F9B3, 0, - 0x01F468, 0x01F3FF, 0x00200D, 0x01F9B0, 0, - 0x01F468, 0x01F3FF, 0x00200D, 0x01F9B1, 0, - 0x01F468, 0x01F3FF, 0x00200D, 0x01F9B2, 0, - 0x01F468, 0x01F3FF, 0x00200D, 0x01F9B3, 0, - 0x01F469, 0x00200D, 0x01F9B0, 0, - 0x01F469, 0x00200D, 0x01F9B1, 0, - 0x01F469, 0x00200D, 0x01F9B2, 0, - 0x01F469, 0x00200D, 0x01F9B3, 0, - 0x01F469, 0x01F3FB, 0x00200D, 0x01F9B0, 0, - 0x01F469, 0x01F3FB, 0x00200D, 0x01F9B1, 0, - 0x01F469, 0x01F3FB, 0x00200D, 0x01F9B2, 0, - 0x01F469, 0x01F3FB, 0x00200D, 0x01F9B3, 0, - 0x01F469, 0x01F3FC, 0x00200D, 0x01F9B0, 0, - 0x01F469, 0x01F3FC, 0x00200D, 0x01F9B1, 0, - 0x01F469, 0x01F3FC, 0x00200D, 0x01F9B2, 0, - 0x01F469, 0x01F3FC, 0x00200D, 0x01F9B3, 0, - 0x01F469, 0x01F3FD, 0x00200D, 0x01F9B0, 0, - 0x01F469, 0x01F3FD, 0x00200D, 0x01F9B1, 0, - 0x01F469, 0x01F3FD, 0x00200D, 0x01F9B2, 0, - 0x01F469, 0x01F3FD, 0x00200D, 0x01F9B3, 0, - 0x01F469, 0x01F3FE, 0x00200D, 0x01F9B0, 0, - 0x01F469, 0x01F3FE, 0x00200D, 0x01F9B1, 0, - 0x01F469, 0x01F3FE, 0x00200D, 0x01F9B2, 0, - 0x01F469, 0x01F3FE, 0x00200D, 0x01F9B3, 0, - 0x01F469, 0x01F3FF, 0x00200D, 0x01F9B0, 0, - 0x01F469, 0x01F3FF, 0x00200D, 0x01F9B1, 0, - 0x01F469, 0x01F3FF, 0x00200D, 0x01F9B2, 0, - 0x01F469, 0x01F3FF, 0x00200D, 0x01F9B3, 0, - 0x01F3F3, 0x00FE0F, 0x00200D, 0x01F308, 0, - 0x01F3F4, 0x00200D, 0x002620, 0x00FE0F, 0, - 0x01F415, 0x00200D, 0x01F9BA, 0, - 0x01F482, 0x01F3FB, 0x00200D, 0x002640, 0x00FE0F, 0, - 0 // null-terminating the list -}; -// clang-format on - -} // namespace internal -} // namespace v8 - -#endif // V8_INTL_SUPPORT diff --git a/deps/v8/src/regexp/property-sequences.h b/deps/v8/src/regexp/property-sequences.h deleted file mode 100644 index 26470974991376..00000000000000 --- a/deps/v8/src/regexp/property-sequences.h +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright 2018 the V8 project authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#ifndef V8_REGEXP_PROPERTY_SEQUENCES_H_ -#define V8_REGEXP_PROPERTY_SEQUENCES_H_ - -#ifdef V8_INTL_SUPPORT - -#include "src/base/strings.h" -#include "src/common/globals.h" - -namespace v8 { -namespace internal { - -class UnicodePropertySequences : public AllStatic { - public: - static const base::uc32 kEmojiFlagSequences[]; - static const base::uc32 kEmojiTagSequences[]; - static const base::uc32 kEmojiZWJSequences[]; -}; - -} // namespace internal -} // namespace v8 - -#endif // V8_INTL_SUPPORT - -#endif // V8_REGEXP_PROPERTY_SEQUENCES_H_ diff --git a/deps/v8/src/regexp/regexp-ast.cc b/deps/v8/src/regexp/regexp-ast.cc index 6315057f029c1a..ef0f153c689bed 100644 --- a/deps/v8/src/regexp/regexp-ast.cc +++ b/deps/v8/src/regexp/regexp-ast.cc @@ -182,9 +182,7 @@ void RegExpUnparser::VisitCharacterRange(CharacterRange that) { } } - -void* RegExpUnparser::VisitCharacterClass(RegExpCharacterClass* that, - void* data) { +void* RegExpUnparser::VisitClassRanges(RegExpClassRanges* that, void* data) { if (that->is_negated()) os_ << "^"; os_ << "["; for (int i = 0; i < that->ranges(zone_)->length(); i++) { @@ -195,6 +193,28 @@ void* RegExpUnparser::VisitCharacterClass(RegExpCharacterClass* that, return nullptr; } +void* RegExpUnparser::VisitClassSetExpression(RegExpClassSetExpression* that, + void* data) { + switch (that->operation()) { + case RegExpClassSetExpression::OperationType::kUnion: + os_ << "++"; + break; + case RegExpClassSetExpression::OperationType::kIntersection: + os_ << "&&"; + break; + case RegExpClassSetExpression::OperationType::kSubtraction: + os_ << "--"; + break; + } + if (that->is_negated()) os_ << "^"; + os_ << "["; + for (int i = 0; i < that->operands()->length(); i++) { + if (i > 0) os_ << " "; + that->operands()->at(i)->Accept(this, data); + } + os_ << "]"; + return nullptr; +} void* RegExpUnparser::VisitAssertion(RegExpAssertion* that, void* data) { switch (that->assertion_type()) { diff --git a/deps/v8/src/regexp/regexp-ast.h b/deps/v8/src/regexp/regexp-ast.h index 9716920d726b52..718ecb2b5040af 100644 --- a/deps/v8/src/regexp/regexp-ast.h +++ b/deps/v8/src/regexp/regexp-ast.h @@ -18,7 +18,8 @@ namespace internal { VISIT(Disjunction) \ VISIT(Alternative) \ VISIT(Assertion) \ - VISIT(CharacterClass) \ + VISIT(ClassRanges) \ + VISIT(ClassSetExpression) \ VISIT(Atom) \ VISIT(Quantifier) \ VISIT(Capture) \ @@ -117,29 +118,49 @@ class CharacterRange { StandardCharacterSet standard_character_set, ZoneList<CharacterRange>* ranges, bool add_unicode_case_equivalents, Zone* zone); + // Add case equivalents to ranges. Only used for /i, not for /ui or /vi, as + // the semantics for unicode mode are slightly different. + // See https://tc39.es/ecma262/#sec-runtime-semantics-canonicalize-ch Note 4. V8_EXPORT_PRIVATE static void AddCaseEquivalents( Isolate* isolate, Zone* zone, ZoneList<CharacterRange>* ranges, bool is_one_byte); + // Add case equivalent code points to ranges. Only used for /ui and /vi, not + // for /i, as the semantics for non-unicode mode are slightly different. + // See https://tc39.es/ecma262/#sec-runtime-semantics-canonicalize-ch Note 4. + static void AddUnicodeCaseEquivalents(ZoneList<CharacterRange>* ranges, + Zone* zone); bool Contains(base::uc32 i) const { return from_ <= i && i <= to_; } base::uc32 from() const { return from_; } base::uc32 to() const { return to_; } bool IsEverything(base::uc32 max) const { return from_ == 0 && to_ >= max; } bool IsSingleton() const { return from_ == to_; } + // Whether a range list is in canonical form: Ranges ordered by from value, // and ranges non-overlapping and non-adjacent. - V8_EXPORT_PRIVATE static bool IsCanonical(ZoneList<CharacterRange>* ranges); + V8_EXPORT_PRIVATE static bool IsCanonical( + const ZoneList<CharacterRange>* ranges); // Convert range list to canonical form. The characters covered by the ranges // will still be the same, but no character is in more than one range, and // adjacent ranges are merged. The resulting list may be shorter than the // original, but cannot be longer. static void Canonicalize(ZoneList<CharacterRange>* ranges); // Negate the contents of a character range in canonical form. - static void Negate(ZoneList<CharacterRange>* src, + static void Negate(const ZoneList<CharacterRange>* src, ZoneList<CharacterRange>* dst, Zone* zone); - + // Intersect the contents of two character ranges in canonical form. + static void Intersect(const ZoneList<CharacterRange>* lhs, + const ZoneList<CharacterRange>* rhs, + ZoneList<CharacterRange>* dst, Zone* zone); + // Subtract the contents of |to_remove| from the contents of |src|. + static void Subtract(const ZoneList<CharacterRange>* src, + const ZoneList<CharacterRange>* to_remove, + ZoneList<CharacterRange>* dst, Zone* zone); // Remove all ranges outside the one-byte range. static void ClampToOneByte(ZoneList<CharacterRange>* ranges); + // Checks if two ranges (both need to be canonical) are equal. + static bool Equals(const ZoneList<CharacterRange>* lhs, + const ZoneList<CharacterRange>* rhs); private: CharacterRange(base::uc32 from, base::uc32 to) : from_(from), to_(to) {} @@ -150,6 +171,13 @@ class CharacterRange { base::uc32 to_ = 0; }; +inline bool operator==(const CharacterRange& lhs, const CharacterRange& rhs) { + return lhs.from() == rhs.from() && lhs.to() == rhs.to(); +} +inline bool operator!=(const CharacterRange& lhs, const CharacterRange& rhs) { + return !operator==(lhs, rhs); +} + #define DECL_BOILERPLATE(Name) \ void* Accept(RegExpVisitor* visitor, void* data) override; \ RegExpNode* ToNode(RegExpCompiler* compiler, RegExpNode* on_success) \ @@ -271,7 +299,7 @@ class CharacterSet final { base::Optional<StandardCharacterSet> standard_set_type_; }; -class RegExpCharacterClass final : public RegExpTree { +class RegExpClassRanges final : public RegExpTree { public: // NEGATED: The character class is negated and should match everything but // the specified ranges. @@ -281,22 +309,21 @@ class RegExpCharacterClass final : public RegExpTree { NEGATED = 1 << 0, CONTAINS_SPLIT_SURROGATE = 1 << 1, }; - using CharacterClassFlags = base::Flags<Flag>; + using ClassRangesFlags = base::Flags<Flag>; - RegExpCharacterClass( - Zone* zone, ZoneList<CharacterRange>* ranges, - CharacterClassFlags character_class_flags = CharacterClassFlags()) - : set_(ranges), character_class_flags_(character_class_flags) { + RegExpClassRanges(Zone* zone, ZoneList<CharacterRange>* ranges, + ClassRangesFlags class_ranges_flags = ClassRangesFlags()) + : set_(ranges), class_ranges_flags_(class_ranges_flags) { // Convert the empty set of ranges to the negated Everything() range. if (ranges->is_empty()) { ranges->Add(CharacterRange::Everything(), zone); - character_class_flags_ ^= NEGATED; + class_ranges_flags_ ^= NEGATED; } } - explicit RegExpCharacterClass(StandardCharacterSet standard_set_type) - : set_(standard_set_type), character_class_flags_() {} + explicit RegExpClassRanges(StandardCharacterSet standard_set_type) + : set_(standard_set_type), class_ranges_flags_() {} - DECL_BOILERPLATE(CharacterClass); + DECL_BOILERPLATE(ClassRanges); bool IsTextElement() override { return true; } int min_match() override { return 1; } @@ -319,14 +346,55 @@ class RegExpCharacterClass final : public RegExpTree { CharacterSet character_set() const { return set_; } ZoneList<CharacterRange>* ranges(Zone* zone) { return set_.ranges(zone); } - bool is_negated() const { return (character_class_flags_ & NEGATED) != 0; } + bool is_negated() const { return (class_ranges_flags_ & NEGATED) != 0; } bool contains_split_surrogate() const { - return (character_class_flags_ & CONTAINS_SPLIT_SURROGATE) != 0; + return (class_ranges_flags_ & CONTAINS_SPLIT_SURROGATE) != 0; } private: CharacterSet set_; - CharacterClassFlags character_class_flags_; + ClassRangesFlags class_ranges_flags_; +}; + +class RegExpClassSetExpression final : public RegExpTree { + public: + enum class OperationType { kUnion, kIntersection, kSubtraction }; + + RegExpClassSetExpression(OperationType op, bool is_negated, + ZoneList<RegExpTree*>* operands) + : operation_(op), is_negated_(is_negated), operands_(operands) {} + + DECL_BOILERPLATE(ClassSetExpression); + + bool IsTextElement() override { return true; } + // At least 1 character is consumed. + int min_match() override { return 1; } + // Up to two code points might be consumed. + int max_match() override { return 2; } + + OperationType operation() const { return operation_; } + bool is_negated() const { return is_negated_; } + const ZoneList<RegExpTree*>* operands() const { return operands_; } + + private: + RegExpClassRanges* ToCharacterClass(Zone* zone); + + // Recursively evaluates the tree rooted at |root|, computing the valid + // CharacterRanges after applying all set operations and storing the result in + // |result_ranges|. |temp_ranges| is list used for intermediate results, + // passed as parameter to avoid allocating new lists all the time. + static void ComputeCharacterRanges(RegExpTree* root, + ZoneList<CharacterRange>* result_ranges, + ZoneList<CharacterRange>* temp_ranges, + Zone* zone); + + const OperationType operation_; + const bool is_negated_; + ZoneList<RegExpTree*>* operands_ = nullptr; +#ifdef ENABLE_SLOW_DCHECKS + // Cache ranges for each node during computation for (slow) DCHECKs. + ZoneList<CharacterRange>* ranges_ = nullptr; +#endif }; class RegExpAtom final : public RegExpTree { @@ -349,10 +417,10 @@ class RegExpAtom final : public RegExpTree { class TextElement final { public: - enum TextType { ATOM, CHAR_CLASS }; + enum TextType { ATOM, CLASS_RANGES }; static TextElement Atom(RegExpAtom* atom); - static TextElement CharClass(RegExpCharacterClass* char_class); + static TextElement ClassRanges(RegExpClassRanges* class_ranges); int cp_offset() const { return cp_offset_; } void set_cp_offset(int cp_offset) { cp_offset_ = cp_offset; } @@ -367,9 +435,9 @@ class TextElement final { return reinterpret_cast<RegExpAtom*>(tree()); } - RegExpCharacterClass* char_class() const { - DCHECK(text_type() == CHAR_CLASS); - return reinterpret_cast<RegExpCharacterClass*>(tree()); + RegExpClassRanges* class_ranges() const { + DCHECK(text_type() == CLASS_RANGES); + return reinterpret_cast<RegExpClassRanges*>(tree()); } private: diff --git a/deps/v8/src/regexp/regexp-compiler-tonode.cc b/deps/v8/src/regexp/regexp-compiler-tonode.cc index da01f246d97296..22f53831cda2bf 100644 --- a/deps/v8/src/regexp/regexp-compiler-tonode.cc +++ b/deps/v8/src/regexp/regexp-compiler-tonode.cc @@ -93,7 +93,7 @@ bool CompareRanges(ZoneList<CharacterRange>* ranges, const int* special_class, } // namespace -bool RegExpCharacterClass::is_standard(Zone* zone) { +bool RegExpClassRanges::is_standard(Zone* zone) { // TODO(lrn): Remove need for this function, by not throwing away information // along the way. if (is_negated()) { @@ -419,9 +419,23 @@ RegExpNode* UnanchoredAdvance(RegExpCompiler* compiler, return TextNode::CreateForCharacterRanges(zone, range, false, on_success); } -void AddUnicodeCaseEquivalents(ZoneList<CharacterRange>* ranges, Zone* zone) { +} // namespace + +// TODO(pthier, v8:11935): We use this method to implement +// MaybeSimpleCaseFolding +// TODO(v8:11935): Change to permalink once proposal is in stage 4. +// https://arai-a.github.io/ecma262-compare/snapshot.html?pr=2418#sec-maybesimplecasefolding +// which is slightly different. The main difference is that we retain original +// characters and add case equivalents, whereas according to the spec original +// characters should be replaced with their case equivalent. +// This shouldn't make a difference for correctness, but we could potentially +// create smaller character classes for unicode sets. + +// static +void CharacterRange::AddUnicodeCaseEquivalents(ZoneList<CharacterRange>* ranges, + Zone* zone) { #ifdef V8_INTL_SUPPORT - DCHECK(CharacterRange::IsCanonical(ranges)); + DCHECK(IsCanonical(ranges)); // Micro-optimization to avoid passing large ranges to UnicodeSet::closeOver. // See also https://crbug.com/v8/6727. @@ -444,32 +458,40 @@ void AddUnicodeCaseEquivalents(ZoneList<CharacterRange>* ranges, Zone* zone) { // we end up with only simple and common case mappings. set.removeAllStrings(); for (int i = 0; i < set.getRangeCount(); i++) { - ranges->Add(CharacterRange::Range(set.getRangeStart(i), set.getRangeEnd(i)), - zone); + ranges->Add(Range(set.getRangeStart(i), set.getRangeEnd(i)), zone); } // No errors and everything we collected have been ranges. - CharacterRange::Canonicalize(ranges); + Canonicalize(ranges); #endif // V8_INTL_SUPPORT } -} // namespace - -RegExpNode* RegExpCharacterClass::ToNode(RegExpCompiler* compiler, - RegExpNode* on_success) { +RegExpNode* RegExpClassRanges::ToNode(RegExpCompiler* compiler, + RegExpNode* on_success) { set_.Canonicalize(); Zone* const zone = compiler->zone(); ZoneList<CharacterRange>* ranges = this->ranges(zone); if (NeedsUnicodeCaseEquivalents(compiler->flags())) { - AddUnicodeCaseEquivalents(ranges, zone); + CharacterRange::AddUnicodeCaseEquivalents(ranges, zone); } - if (!IsUnicode(compiler->flags()) || compiler->one_byte() || + if (!IsEitherUnicode(compiler->flags()) || compiler->one_byte() || contains_split_surrogate()) { return zone->New<TextNode>(this, compiler->read_backward(), on_success); } if (is_negated()) { + // With /v, character classes are never negated. + // TODO(v8:11935): Change permalink once proposal is in stage 4. + // https://arai-a.github.io/ecma262-compare/snapshot.html?pr=2418#sec-compileatom + // Atom :: CharacterClass + // 4. Assert: cc.[[Invert]] is false. + // Instead the complement is created when evaluating the class set. + // The only exception is the "nothing range" (negated everything), which is + // internally created for an empty set. + DCHECK_IMPLIES( + IsUnicodeSets(compiler->flags()), + ranges->length() == 1 && ranges->first().IsEverything(kMaxCodePoint)); ZoneList<CharacterRange>* negated = zone->New<ZoneList<CharacterRange>>(2, zone); CharacterRange::Negate(ranges, negated, zone); @@ -478,7 +500,7 @@ RegExpNode* RegExpCharacterClass::ToNode(RegExpCompiler* compiler, if (ranges->length() == 0) { // The empty character class is used as a 'fail' node. - RegExpCharacterClass* fail = zone->New<RegExpCharacterClass>(zone, ranges); + RegExpClassRanges* fail = zone->New<RegExpClassRanges>(zone, ranges); return zone->New<TextNode>(fail, compiler->read_backward(), on_success); } @@ -505,6 +527,11 @@ RegExpNode* RegExpCharacterClass::ToNode(RegExpCompiler* compiler, return result; } +RegExpNode* RegExpClassSetExpression::ToNode(RegExpCompiler* compiler, + RegExpNode* on_success) { + return ToCharacterClass(compiler->zone())->ToNode(compiler, on_success); +} + namespace { int CompareFirstChar(RegExpTree* const* a, RegExpTree* const* b) { @@ -770,7 +797,7 @@ void RegExpDisjunction::FixSingleCharacterDisjunctions( continue; } const RegExpFlags flags = compiler->flags(); - DCHECK_IMPLIES(IsUnicode(flags), + DCHECK_IMPLIES(IsEitherUnicode(flags), !unibrow::Utf16::IsLeadSurrogate(atom->data().at(0))); bool contains_trail_surrogate = unibrow::Utf16::IsTrailSurrogate(atom->data().at(0)); @@ -783,7 +810,7 @@ void RegExpDisjunction::FixSingleCharacterDisjunctions( if (!alternative->IsAtom()) break; RegExpAtom* const alt_atom = alternative->AsAtom(); if (alt_atom->length() != 1) break; - DCHECK_IMPLIES(IsUnicode(flags), + DCHECK_IMPLIES(IsEitherUnicode(flags), !unibrow::Utf16::IsLeadSurrogate(alt_atom->data().at(0))); contains_trail_surrogate |= unibrow::Utf16::IsTrailSurrogate(alt_atom->data().at(0)); @@ -799,12 +826,12 @@ void RegExpDisjunction::FixSingleCharacterDisjunctions( DCHECK_EQ(old_atom->length(), 1); ranges->Add(CharacterRange::Singleton(old_atom->data().at(0)), zone); } - RegExpCharacterClass::CharacterClassFlags character_class_flags; - if (IsUnicode(flags) && contains_trail_surrogate) { - character_class_flags = RegExpCharacterClass::CONTAINS_SPLIT_SURROGATE; + RegExpClassRanges::ClassRangesFlags class_ranges_flags; + if (IsEitherUnicode(flags) && contains_trail_surrogate) { + class_ranges_flags = RegExpClassRanges::CONTAINS_SPLIT_SURROGATE; } alternatives->at(write_posn++) = - zone->New<RegExpCharacterClass>(zone, ranges, character_class_flags); + zone->New<RegExpClassRanges>(zone, ranges, class_ranges_flags); } else { // Just copy any trivial alternatives. for (int j = first_in_run; j < i; j++) { @@ -922,8 +949,8 @@ RegExpNode* RegExpAssertion::ToNode(RegExpCompiler* compiler, zone->New<ZoneList<CharacterRange>>(3, zone); CharacterRange::AddClassEscape(StandardCharacterSet::kLineTerminator, newline_ranges, false, zone); - RegExpCharacterClass* newline_atom = zone->New<RegExpCharacterClass>( - StandardCharacterSet::kLineTerminator); + RegExpClassRanges* newline_atom = + zone->New<RegExpClassRanges>(StandardCharacterSet::kLineTerminator); TextNode* newline_matcher = zone->New<TextNode>(newline_atom, false, ActionNode::PositiveSubmatchSuccess( @@ -1110,7 +1137,7 @@ class AssertionSequenceRewriter final { // negated '*' (everything) range serves the purpose. ZoneList<CharacterRange>* ranges = zone_->New<ZoneList<CharacterRange>>(0, zone_); - RegExpCharacterClass* cc = zone_->New<RegExpCharacterClass>(zone_, ranges); + RegExpClassRanges* cc = zone_->New<RegExpClassRanges>(zone_, ranges); terms_->Set(from, cc); // Zero out the rest. @@ -1359,7 +1386,7 @@ void CharacterRange::AddCaseEquivalents(Isolate* isolate, Zone* zone, #endif // V8_INTL_SUPPORT } -bool CharacterRange::IsCanonical(ZoneList<CharacterRange>* ranges) { +bool CharacterRange::IsCanonical(const ZoneList<CharacterRange>* ranges) { DCHECK_NOT_NULL(ranges); int n = ranges->length(); if (n <= 1) return true; @@ -1463,6 +1490,128 @@ void CharacterSet::Canonicalize() { CharacterRange::Canonicalize(ranges_); } +RegExpClassRanges* RegExpClassSetExpression::ToCharacterClass(Zone* zone) { + ZoneList<CharacterRange>* result_ranges = + zone->template New<ZoneList<CharacterRange>>(2, zone); + ZoneList<CharacterRange>* temp_ranges = + zone->template New<ZoneList<CharacterRange>>(2, zone); + ComputeCharacterRanges(this, result_ranges, temp_ranges, zone); + return zone->template New<RegExpClassRanges>(zone, result_ranges); +} + +// static +void RegExpClassSetExpression::ComputeCharacterRanges( + RegExpTree* root, ZoneList<CharacterRange>* result_ranges, + ZoneList<CharacterRange>* temp_ranges, Zone* zone) { + DCHECK_EQ(temp_ranges->length(), 0); + DCHECK(root->IsClassRanges() || root->IsClassSetExpression()); + if (root->IsClassRanges()) { + DCHECK(!root->AsClassRanges()->is_negated()); + ZoneList<CharacterRange>* ranges = root->AsClassRanges()->ranges(zone); + CharacterRange::Canonicalize(ranges); + result_ranges->AddAll(*ranges, zone); + return; + } + RegExpClassSetExpression* node = root->AsClassSetExpression(); + switch (node->operation()) { + case OperationType::kUnion: { + ZoneList<CharacterRange>* op_ranges = + zone->template New<ZoneList<CharacterRange>>(2, zone); + for (int i = 0; i < node->operands()->length(); i++) { + RegExpTree* op = node->operands()->at(i); + ComputeCharacterRanges(op, op_ranges, temp_ranges, zone); + result_ranges->AddAll(*op_ranges, zone); + op_ranges->Rewind(0); + } + CharacterRange::Canonicalize(result_ranges); + break; + } + case OperationType::kIntersection: { + ZoneList<CharacterRange>* op_ranges = + zone->template New<ZoneList<CharacterRange>>(2, zone); + ComputeCharacterRanges(node->operands()->at(0), op_ranges, temp_ranges, + zone); + result_ranges->AddAll(*op_ranges, zone); + op_ranges->Rewind(0); + for (int i = 1; i < node->operands()->length(); i++) { + ComputeCharacterRanges(node->operands()->at(i), op_ranges, temp_ranges, + zone); + CharacterRange::Intersect(result_ranges, op_ranges, temp_ranges, zone); + std::swap(*result_ranges, *temp_ranges); + temp_ranges->Rewind(0); + op_ranges->Rewind(0); + } + break; + } + case OperationType::kSubtraction: { + ZoneList<CharacterRange>* op_ranges = + zone->template New<ZoneList<CharacterRange>>(2, zone); + ComputeCharacterRanges(node->operands()->at(0), op_ranges, temp_ranges, + zone); + result_ranges->AddAll(*op_ranges, zone); + op_ranges->Rewind(0); + for (int i = 1; i < node->operands()->length(); i++) { + ComputeCharacterRanges(node->operands()->at(i), op_ranges, temp_ranges, + zone); + CharacterRange::Subtract(result_ranges, op_ranges, temp_ranges, zone); + std::swap(*result_ranges, *temp_ranges); + temp_ranges->Rewind(0); + op_ranges->Rewind(0); + } +#ifdef ENABLE_SLOW_DCHECKS + // Check that the result is equal to subtracting the union of all RHS + // operands from the LHS operand. + // TODO(pthier): It is unclear whether this variant is faster or slower + // than subtracting multiple ranges in practice. + ZoneList<CharacterRange>* lhs_range = + node->operands()->at(0)->IsClassRanges() + ? node->operands()->at(0)->AsClassRanges()->ranges(zone) + : node->operands()->at(0)->AsClassSetExpression()->ranges_; + ZoneList<CharacterRange>* rhs_union = + zone->template New<ZoneList<CharacterRange>>(2, zone); + for (int i = 1; i < node->operands()->length(); i++) { + ZoneList<CharacterRange>* op_range = + node->operands()->at(i)->IsClassRanges() + ? node->operands()->at(i)->AsClassRanges()->ranges(zone) + : node->operands()->at(i)->AsClassSetExpression()->ranges_; + rhs_union->AddAll(*op_range, zone); + } + CharacterRange::Canonicalize(rhs_union); + ZoneList<CharacterRange>* ranges_check = + zone->template New<ZoneList<CharacterRange>>(2, zone); + CharacterRange::Subtract(lhs_range, rhs_union, ranges_check, zone); + DCHECK(CharacterRange::Equals(result_ranges, ranges_check)); + + // Check that the result is equal to intersecting the LHS operand with the + // complemented union of all RHS operands + ZoneList<CharacterRange>* rhs_union_negated = + zone->template New<ZoneList<CharacterRange>>(rhs_union->length(), + zone); + CharacterRange::Negate(rhs_union, rhs_union_negated, zone); + ranges_check->Rewind(0); + CharacterRange::Intersect(lhs_range, rhs_union_negated, ranges_check, + zone); + DCHECK(CharacterRange::Equals(result_ranges, ranges_check)); +#endif + break; + } + } + + if (node->is_negated()) { + CharacterRange::Negate(result_ranges, temp_ranges, zone); + std::swap(*result_ranges, *temp_ranges); + temp_ranges->Rewind(0); + } + + DCHECK_EQ(temp_ranges->length(), 0); + +#ifdef ENABLE_SLOW_DCHECKS + // Cache results for DCHECKs. + node->ranges_ = + zone->template New<ZoneList<CharacterRange>>(*result_ranges, zone); +#endif +} + // static void CharacterRange::Canonicalize(ZoneList<CharacterRange>* character_ranges) { if (character_ranges->length() <= 1) return; @@ -1500,7 +1649,7 @@ void CharacterRange::Canonicalize(ZoneList<CharacterRange>* character_ranges) { } // static -void CharacterRange::Negate(ZoneList<CharacterRange>* ranges, +void CharacterRange::Negate(const ZoneList<CharacterRange>* ranges, ZoneList<CharacterRange>* negated_ranges, Zone* zone) { DCHECK(CharacterRange::IsCanonical(ranges)); @@ -1523,6 +1672,128 @@ void CharacterRange::Negate(ZoneList<CharacterRange>* ranges, } } +// static +void CharacterRange::Intersect(const ZoneList<CharacterRange>* lhs, + const ZoneList<CharacterRange>* rhs, + ZoneList<CharacterRange>* intersection, + Zone* zone) { + DCHECK(CharacterRange::IsCanonical(lhs)); + DCHECK(CharacterRange::IsCanonical(rhs)); + DCHECK_EQ(0, intersection->length()); + int lhs_index = 0; + int rhs_index = 0; + while (lhs_index < lhs->length() && rhs_index < rhs->length()) { + // Skip non-overlapping ranges. + if (lhs->at(lhs_index).to() < rhs->at(rhs_index).from()) { + lhs_index++; + continue; + } + if (rhs->at(rhs_index).to() < lhs->at(lhs_index).from()) { + rhs_index++; + continue; + } + + base::uc32 from = + std::max(lhs->at(lhs_index).from(), rhs->at(rhs_index).from()); + base::uc32 to = std::min(lhs->at(lhs_index).to(), rhs->at(rhs_index).to()); + intersection->Add(CharacterRange::Range(from, to), zone); + if (to == lhs->at(lhs_index).to()) { + lhs_index++; + } else { + rhs_index++; + } + } + + DCHECK(IsCanonical(intersection)); +} + +namespace { + +// Advance |index| and set |from| and |to| to the new range, if not out of +// bounds of |range|, otherwise |from| is set to a code point beyond the legal +// unicode character range. +void SafeAdvanceRange(const ZoneList<CharacterRange>* range, int* index, + base::uc32* from, base::uc32* to) { + ++(*index); + if (*index < range->length()) { + *from = range->at(*index).from(); + *to = range->at(*index).to(); + } else { + *from = kMaxCodePoint + 1; + } +} + +} // namespace + +// static +void CharacterRange::Subtract(const ZoneList<CharacterRange>* src, + const ZoneList<CharacterRange>* to_remove, + ZoneList<CharacterRange>* result, Zone* zone) { + DCHECK(CharacterRange::IsCanonical(src)); + DCHECK(CharacterRange::IsCanonical(to_remove)); + DCHECK_EQ(0, result->length()); + int src_index = 0; + int to_remove_index = 0; + base::uc32 from = src->at(src_index).from(); + base::uc32 to = src->at(src_index).to(); + while (src_index < src->length() && to_remove_index < to_remove->length()) { + CharacterRange remove_range = to_remove->at(to_remove_index); + if (remove_range.to() < from) { + // (a) Non-overlapping case, ignore current to_remove range. + // |-------| + // |-------| + to_remove_index++; + } else if (to < remove_range.from()) { + // (b) Non-overlapping case, add full current range to result. + // |-------| + // |-------| + result->Add(CharacterRange::Range(from, to), zone); + SafeAdvanceRange(src, &src_index, &from, &to); + } else if (from >= remove_range.from() && to <= remove_range.to()) { + // (c) Current to_remove range fully covers current range. + // |---| + // |-------| + SafeAdvanceRange(src, &src_index, &from, &to); + } else if (from < remove_range.from() && to > remove_range.to()) { + // (d) Split current range. + // |-------| + // |---| + result->Add(CharacterRange::Range(from, remove_range.from() - 1), zone); + from = remove_range.to() + 1; + to_remove_index++; + } else if (from < remove_range.from()) { + // (e) End current range. + // |-------| + // |-------| + to = remove_range.from() - 1; + result->Add(CharacterRange::Range(from, to), zone); + SafeAdvanceRange(src, &src_index, &from, &to); + } else if (to > remove_range.to()) { + // (f) Modify start of current range. + // |-------| + // |-------| + from = remove_range.to() + 1; + to_remove_index++; + } else { + UNREACHABLE(); + } + } + // The last range needs special treatment after |to_remove| is exhausted, as + // |from| might have been modified by the last |to_remove| range and |to| was + // not yet known (i.e. cases d and f). + if (from <= to) { + result->Add(CharacterRange::Range(from, to), zone); + } + src_index++; + + // Add remaining ranges after |to_remove| is exhausted. + for (; src_index < src->length(); src_index++) { + result->Add(src->at(src_index), zone); + } + + DCHECK(IsCanonical(result)); +} + // static void CharacterRange::ClampToOneByte(ZoneList<CharacterRange>* ranges) { DCHECK(IsCanonical(ranges)); @@ -1544,6 +1815,20 @@ void CharacterRange::ClampToOneByte(ZoneList<CharacterRange>* ranges) { ranges->Rewind(n); } +// static +bool CharacterRange::Equals(const ZoneList<CharacterRange>* lhs, + const ZoneList<CharacterRange>* rhs) { + DCHECK(IsCanonical(lhs)); + DCHECK(IsCanonical(rhs)); + if (lhs->length() != rhs->length()) return false; + + for (int i = 0; i < lhs->length(); i++) { + if (lhs->at(i) != rhs->at(i)) return false; + } + + return true; +} + namespace { // Scoped object to keep track of how much we unroll quantifier loops in the diff --git a/deps/v8/src/regexp/regexp-compiler.cc b/deps/v8/src/regexp/regexp-compiler.cc index 93cbd9620a0f61..6b76c8ab1ad969 100644 --- a/deps/v8/src/regexp/regexp-compiler.cc +++ b/deps/v8/src/regexp/regexp-compiler.cc @@ -196,8 +196,8 @@ void RegExpAtom::AppendToText(RegExpText* text, Zone* zone) { text->AddElement(TextElement::Atom(this), zone); } -void RegExpCharacterClass::AppendToText(RegExpText* text, Zone* zone) { - text->AddElement(TextElement::CharClass(this), zone); +void RegExpClassRanges::AppendToText(RegExpText* text, Zone* zone) { + text->AddElement(TextElement::ClassRanges(this), zone); } void RegExpText::AppendToText(RegExpText* text, Zone* zone) { @@ -209,8 +209,8 @@ TextElement TextElement::Atom(RegExpAtom* atom) { return TextElement(ATOM, atom); } -TextElement TextElement::CharClass(RegExpCharacterClass* char_class) { - return TextElement(CHAR_CLASS, char_class); +TextElement TextElement::ClassRanges(RegExpClassRanges* class_ranges) { + return TextElement(CLASS_RANGES, class_ranges); } int TextElement::length() const { @@ -218,7 +218,7 @@ int TextElement::length() const { case ATOM: return atom()->length(); - case CHAR_CLASS: + case CLASS_RANGES: return 1; } UNREACHABLE(); @@ -1215,11 +1215,11 @@ void GenerateBranches(RegExpMacroAssembler* masm, ZoneList<base::uc32>* ranges, } } -void EmitCharClass(RegExpMacroAssembler* macro_assembler, - RegExpCharacterClass* cc, bool one_byte, Label* on_failure, - int cp_offset, bool check_offset, bool preloaded, - Zone* zone) { - ZoneList<CharacterRange>* ranges = cc->ranges(zone); +void EmitClassRanges(RegExpMacroAssembler* macro_assembler, + RegExpClassRanges* cr, bool one_byte, Label* on_failure, + int cp_offset, bool check_offset, bool preloaded, + Zone* zone) { + ZoneList<CharacterRange>* ranges = cr->ranges(zone); CharacterRange::Canonicalize(ranges); // Now that all processing (like case-insensitivity) is done, clamp the @@ -1228,7 +1228,7 @@ void EmitCharClass(RegExpMacroAssembler* macro_assembler, const int ranges_length = ranges->length(); if (ranges_length == 0) { - if (!cc->is_negated()) { + if (!cr->is_negated()) { macro_assembler->GoTo(on_failure); } if (check_offset) { @@ -1239,7 +1239,7 @@ void EmitCharClass(RegExpMacroAssembler* macro_assembler, const base::uc32 max_char = MaxCodeUnit(one_byte); if (ranges_length == 1 && ranges->at(0).IsEverything(max_char)) { - if (cc->is_negated()) { + if (cr->is_negated()) { macro_assembler->GoTo(on_failure); } else { // This is a common case hit by non-anchored expressions. @@ -1254,8 +1254,8 @@ void EmitCharClass(RegExpMacroAssembler* macro_assembler, macro_assembler->LoadCurrentCharacter(cp_offset, on_failure, check_offset); } - if (cc->is_standard(zone) && macro_assembler->CheckSpecialCharacterClass( - cc->standard_type(), on_failure)) { + if (cr->is_standard(zone) && macro_assembler->CheckSpecialClassRanges( + cr->standard_type(), on_failure)) { return; } @@ -1266,7 +1266,7 @@ void EmitCharClass(RegExpMacroAssembler* macro_assembler, // Note the flipped logic below (we check InRange if negated, NotInRange if // not negated); this is necessary since the method falls through on // failure whereas we want to fall through on success. - if (cc->is_negated()) { + if (cr->is_negated()) { if (macro_assembler->CheckCharacterInRangeArray(ranges, on_failure)) { return; } @@ -1283,7 +1283,7 @@ void EmitCharClass(RegExpMacroAssembler* macro_assembler, ZoneList<base::uc32>* range_boundaries = zone->New<ZoneList<base::uc32>>(ranges_length * 2, zone); - bool zeroth_entry_is_failure = !cc->is_negated(); + bool zeroth_entry_is_failure = !cr->is_negated(); for (int i = 0; i < ranges_length; i++) { CharacterRange& range = ranges->at(i); @@ -1659,7 +1659,7 @@ void TextNode::GetQuickCheckDetails(QuickCheckDetails* details, } else { QuickCheckDetails::Position* pos = details->positions(characters_filled_in); - RegExpCharacterClass* tree = elm.char_class(); + RegExpClassRanges* tree = elm.class_ranges(); ZoneList<CharacterRange>* ranges = tree->ranges(zone()); if (tree->is_negated() || ranges->is_empty()) { // A quick check uses multi-character mask and compare. There is no @@ -1887,13 +1887,13 @@ RegExpNode* TextNode::FilterOneByte(int depth, RegExpFlags flags) { writable_quarks[j] = c; } } else { - DCHECK(elm.text_type() == TextElement::CHAR_CLASS); - RegExpCharacterClass* cc = elm.char_class(); - ZoneList<CharacterRange>* ranges = cc->ranges(zone()); + DCHECK(elm.text_type() == TextElement::CLASS_RANGES); + RegExpClassRanges* cr = elm.class_ranges(); + ZoneList<CharacterRange>* ranges = cr->ranges(zone()); CharacterRange::Canonicalize(ranges); // Now they are in order so we only need to look at the first. int range_count = ranges->length(); - if (cc->is_negated()) { + if (cr->is_negated()) { if (range_count != 0 && ranges->at(0).from() == 0 && ranges->at(0).to() >= String::kMaxOneByteCharCode) { // This will be handled in a later filter. @@ -2091,7 +2091,7 @@ namespace { // Check for [0-9A-Z_a-z]. void EmitWordCheck(RegExpMacroAssembler* assembler, Label* word, Label* non_word, bool fall_through_on_word) { - if (assembler->CheckSpecialCharacterClass( + if (assembler->CheckSpecialClassRanges( fall_through_on_word ? StandardCharacterSet::kWord : StandardCharacterSet::kNotWord, fall_through_on_word ? non_word : word)) { @@ -2139,8 +2139,8 @@ void EmitHat(RegExpCompiler* compiler, RegExpNode* on_success, Trace* trace) { const bool can_skip_bounds_check = !may_be_at_or_before_subject_string_start; assembler->LoadCurrentCharacter(new_trace.cp_offset() - 1, new_trace.backtrack(), can_skip_bounds_check); - if (!assembler->CheckSpecialCharacterClass( - StandardCharacterSet::kLineTerminator, new_trace.backtrack())) { + if (!assembler->CheckSpecialClassRanges(StandardCharacterSet::kLineTerminator, + new_trace.backtrack())) { // Newline means \n, \r, 0x2028 or 0x2029. if (!compiler->one_byte()) { assembler->CheckCharacterAfterAnd(0x2028, 0xFFFE, &ok); @@ -2388,14 +2388,14 @@ void TextNode::TextEmitPass(RegExpCompiler* compiler, TextEmitPassType pass, if (bounds_checked) UpdateBoundsCheck(cp_offset + j, checked_up_to); } } else { - DCHECK_EQ(TextElement::CHAR_CLASS, elm.text_type()); + DCHECK_EQ(TextElement::CLASS_RANGES, elm.text_type()); if (pass == CHARACTER_CLASS_MATCH) { if (first_element_checked && i == 0) continue; if (DeterminedAlready(quick_check, elm.cp_offset())) continue; - RegExpCharacterClass* cc = elm.char_class(); + RegExpClassRanges* cr = elm.class_ranges(); bool bounds_check = *checked_up_to < cp_offset || read_backward(); - EmitCharClass(assembler, cc, one_byte, backtrack, cp_offset, - bounds_check, preloaded, zone()); + EmitClassRanges(assembler, cr, one_byte, backtrack, cp_offset, + bounds_check, preloaded, zone()); UpdateBoundsCheck(cp_offset, checked_up_to); } } @@ -2422,8 +2422,8 @@ TextNode* TextNode::CreateForCharacterRanges(Zone* zone, RegExpNode* on_success) { DCHECK_NOT_NULL(ranges); // TODO(jgruber): There's no fundamental need to create this - // RegExpCharacterClass; we could refactor to avoid the allocation. - return zone->New<TextNode>(zone->New<RegExpCharacterClass>(zone, ranges), + // RegExpClassRanges; we could refactor to avoid the allocation. + return zone->New<TextNode>(zone->New<RegExpClassRanges>(zone, ranges), read_backward, on_success); } @@ -2432,11 +2432,11 @@ TextNode* TextNode::CreateForSurrogatePair( bool read_backward, RegExpNode* on_success) { ZoneList<CharacterRange>* lead_ranges = CharacterRange::List(zone, lead); ZoneList<TextElement>* elms = zone->New<ZoneList<TextElement>>(2, zone); - elms->Add(TextElement::CharClass( - zone->New<RegExpCharacterClass>(zone, lead_ranges)), - zone); - elms->Add(TextElement::CharClass( - zone->New<RegExpCharacterClass>(zone, trail_ranges)), + elms->Add( + TextElement::ClassRanges(zone->New<RegExpClassRanges>(zone, lead_ranges)), + zone); + elms->Add(TextElement::ClassRanges( + zone->New<RegExpClassRanges>(zone, trail_ranges)), zone); return zone->New<TextNode>(elms, read_backward, on_success); } @@ -2446,11 +2446,11 @@ TextNode* TextNode::CreateForSurrogatePair( bool read_backward, RegExpNode* on_success) { ZoneList<CharacterRange>* trail_ranges = CharacterRange::List(zone, trail); ZoneList<TextElement>* elms = zone->New<ZoneList<TextElement>>(2, zone); - elms->Add(TextElement::CharClass( - zone->New<RegExpCharacterClass>(zone, lead_ranges)), - zone); - elms->Add(TextElement::CharClass( - zone->New<RegExpCharacterClass>(zone, trail_ranges)), + elms->Add( + TextElement::ClassRanges(zone->New<RegExpClassRanges>(zone, lead_ranges)), + zone); + elms->Add(TextElement::ClassRanges( + zone->New<RegExpClassRanges>(zone, trail_ranges)), zone); return zone->New<TextNode>(elms, read_backward, on_success); } @@ -2534,12 +2534,12 @@ void TextNode::MakeCaseIndependent(Isolate* isolate, bool is_one_byte, int element_count = elements()->length(); for (int i = 0; i < element_count; i++) { TextElement elm = elements()->at(i); - if (elm.text_type() == TextElement::CHAR_CLASS) { - RegExpCharacterClass* cc = elm.char_class(); + if (elm.text_type() == TextElement::CLASS_RANGES) { + RegExpClassRanges* cr = elm.class_ranges(); // None of the standard character classes is different in the case // independent case and it slows us down if we don't know that. - if (cc->is_standard(zone())) continue; - ZoneList<CharacterRange>* ranges = cc->ranges(zone()); + if (cr->is_standard(zone())) continue; + ZoneList<CharacterRange>* ranges = cr->ranges(zone()); CharacterRange::AddCaseEquivalents(isolate, zone(), ranges, is_one_byte); } } @@ -2552,8 +2552,8 @@ RegExpNode* TextNode::GetSuccessorOfOmnivorousTextNode( if (read_backward()) return nullptr; if (elements()->length() != 1) return nullptr; TextElement elm = elements()->at(0); - if (elm.text_type() != TextElement::CHAR_CLASS) return nullptr; - RegExpCharacterClass* node = elm.char_class(); + if (elm.text_type() != TextElement::CLASS_RANGES) return nullptr; + RegExpClassRanges* node = elm.class_ranges(); ZoneList<CharacterRange>* ranges = node->ranges(zone()); CharacterRange::Canonicalize(ranges); if (node->is_negated()) { @@ -3479,7 +3479,7 @@ void BackReferenceNode::Emit(RegExpCompiler* compiler, Trace* trace) { DCHECK_EQ(start_reg_ + 1, end_reg_); if (IsIgnoreCase(flags_)) { - bool unicode = IsUnicode(flags_); + bool unicode = IsEitherUnicode(flags_); assembler->CheckNotBackReferenceIgnoreCase(start_reg_, read_backward(), unicode, trace->backtrack()); } else { @@ -3490,7 +3490,7 @@ void BackReferenceNode::Emit(RegExpCompiler* compiler, Trace* trace) { if (read_backward()) trace->set_at_start(Trace::UNKNOWN); // Check that the back reference does not end inside a surrogate pair. - if (IsUnicode(flags_) && !compiler->one_byte()) { + if (IsEitherUnicode(flags_) && !compiler->one_byte()) { assembler->CheckNotInSurrogatePair(trace->cp_offset(), trace->backtrack()); } on_success()->Emit(compiler, trace); @@ -3856,10 +3856,10 @@ void TextNode::FillInBMInfo(Isolate* isolate, int initial_offset, int budget, } } } else { - DCHECK_EQ(TextElement::CHAR_CLASS, text.text_type()); - RegExpCharacterClass* char_class = text.char_class(); - ZoneList<CharacterRange>* ranges = char_class->ranges(zone()); - if (char_class->is_negated()) { + DCHECK_EQ(TextElement::CLASS_RANGES, text.text_type()); + RegExpClassRanges* class_ranges = text.class_ranges(); + ZoneList<CharacterRange>* ranges = class_ranges->ranges(zone()); + if (class_ranges->is_negated()) { bm->SetAll(offset); } else { for (int k = 0; k < ranges->length(); k++) { @@ -3919,8 +3919,8 @@ RegExpNode* RegExpCompiler::PreprocessRegExp(RegExpCompileData* data, // this expression is anchored at the beginning or sticky. RegExpNode* loop_node = RegExpQuantifier::ToNode( 0, RegExpTree::kInfinity, false, - zone()->New<RegExpCharacterClass>(StandardCharacterSet::kEverything), - this, captured_body, data->contains_anchor); + zone()->New<RegExpClassRanges>(StandardCharacterSet::kEverything), this, + captured_body, data->contains_anchor); if (data->contains_anchor) { // Unroll loop once, to take care of the case that might start @@ -3928,7 +3928,7 @@ RegExpNode* RegExpCompiler::PreprocessRegExp(RegExpCompileData* data, ChoiceNode* first_step_node = zone()->New<ChoiceNode>(2, zone()); first_step_node->AddAlternative(GuardedAlternative(captured_body)); first_step_node->AddAlternative(GuardedAlternative(zone()->New<TextNode>( - zone()->New<RegExpCharacterClass>(StandardCharacterSet::kEverything), + zone()->New<RegExpClassRanges>(StandardCharacterSet::kEverything), false, loop_node))); node = first_step_node; } else { @@ -3942,7 +3942,7 @@ RegExpNode* RegExpCompiler::PreprocessRegExp(RegExpCompileData* data, if (node != nullptr) { node = node->FilterOneByte(RegExpCompiler::kMaxRecursion, flags); } - } else if (IsUnicode(flags) && (IsGlobal(flags) || IsSticky(flags))) { + } else if (IsEitherUnicode(flags) && (IsGlobal(flags) || IsSticky(flags))) { node = OptionallyStepBackToLeadSurrogate(node); } diff --git a/deps/v8/src/regexp/regexp-compiler.h b/deps/v8/src/regexp/regexp-compiler.h index 421fc9457c047d..39b1c946fb5b91 100644 --- a/deps/v8/src/regexp/regexp-compiler.h +++ b/deps/v8/src/regexp/regexp-compiler.h @@ -51,9 +51,9 @@ constexpr int kPatternTooShortForBoyerMoore = 2; } // namespace regexp_compiler_constants inline bool NeedsUnicodeCaseEquivalents(RegExpFlags flags) { - // Both unicode and ignore_case flags are set. We need to use ICU to find - // the closure over case equivalents. - return IsUnicode(flags) && IsIgnoreCase(flags); + // Both unicode (or unicode sets) and ignore_case flags are set. We need to + // use ICU to find the closure over case equivalents. + return IsEitherUnicode(flags) && IsIgnoreCase(flags); } // Details of a quick mask-compare check that can look ahead in the diff --git a/deps/v8/src/regexp/regexp-dotprinter.cc b/deps/v8/src/regexp/regexp-dotprinter.cc index bf651963af8d34..3ce86cd619e829 100644 --- a/deps/v8/src/regexp/regexp-dotprinter.cc +++ b/deps/v8/src/regexp/regexp-dotprinter.cc @@ -135,8 +135,8 @@ void DotPrinterImpl::VisitText(TextNode* that) { } break; } - case TextElement::CHAR_CLASS: { - RegExpCharacterClass* node = elm.char_class(); + case TextElement::CLASS_RANGES: { + RegExpClassRanges* node = elm.class_ranges(); os_ << "["; if (node->is_negated()) os_ << "^"; for (int j = 0; j < node->ranges(zone)->length(); j++) { diff --git a/deps/v8/src/regexp/regexp-error.h b/deps/v8/src/regexp/regexp-error.h index 6485e74bb65204..5c4ea28d7da13c 100644 --- a/deps/v8/src/regexp/regexp-error.h +++ b/deps/v8/src/regexp/regexp-error.h @@ -42,7 +42,9 @@ namespace internal { T(InvalidClassPropertyName, "Invalid property name in character class") \ T(InvalidCharacterClass, "Invalid character class") \ T(UnterminatedCharacterClass, "Unterminated character class") \ - T(OutOfOrderCharacterClass, "Range out of order in character class") + T(OutOfOrderCharacterClass, "Range out of order in character class") \ + T(InvalidClassSetOperation, "Invalid set operation in character class") \ + T(InvalidCharacterInClass, "Invalid character in character class") enum class RegExpError : uint32_t { #define TEMPLATE(NAME, STRING) k##NAME, diff --git a/deps/v8/src/regexp/regexp-flags.h b/deps/v8/src/regexp/regexp-flags.h index 4f9bff2d25afb6..5eddfec6baa384 100644 --- a/deps/v8/src/regexp/regexp-flags.h +++ b/deps/v8/src/regexp/regexp-flags.h @@ -57,6 +57,10 @@ DEFINE_OPERATORS_FOR_FLAGS(RegExpFlags) REGEXP_FLAG_LIST(V) #undef V +constexpr bool IsEitherUnicode(RegExpFlags f) { + return IsUnicode(f) || IsUnicodeSets(f); +} + // clang-format off #define V(Lower, Camel, LowerCamel, Char, Bit) \ c == Char ? RegExpFlag::k##Camel : diff --git a/deps/v8/src/regexp/regexp-macro-assembler-tracer.cc b/deps/v8/src/regexp/regexp-macro-assembler-tracer.cc index 916cedb640d369..50a467a9801083 100644 --- a/deps/v8/src/regexp/regexp-macro-assembler-tracer.cc +++ b/deps/v8/src/regexp/regexp-macro-assembler-tracer.cc @@ -392,11 +392,10 @@ void RegExpMacroAssemblerTracer::CheckPosition(int cp_offset, assembler_->CheckPosition(cp_offset, on_outside_input); } -bool RegExpMacroAssemblerTracer::CheckSpecialCharacterClass( +bool RegExpMacroAssemblerTracer::CheckSpecialClassRanges( StandardCharacterSet type, Label* on_no_match) { - bool supported = assembler_->CheckSpecialCharacterClass(type, - on_no_match); - PrintF(" CheckSpecialCharacterClass(type='%c', label[%08x]): %s;\n", + bool supported = assembler_->CheckSpecialClassRanges(type, on_no_match); + PrintF(" CheckSpecialClassRanges(type='%c', label[%08x]): %s;\n", static_cast<char>(type), LabelToInt(on_no_match), supported ? "true" : "false"); return supported; diff --git a/deps/v8/src/regexp/regexp-macro-assembler-tracer.h b/deps/v8/src/regexp/regexp-macro-assembler-tracer.h index 82e6b9e89e963a..5f9da291448add 100644 --- a/deps/v8/src/regexp/regexp-macro-assembler-tracer.h +++ b/deps/v8/src/regexp/regexp-macro-assembler-tracer.h @@ -54,8 +54,8 @@ class RegExpMacroAssemblerTracer: public RegExpMacroAssembler { Label* on_not_in_range) override; void CheckBitInTable(Handle<ByteArray> table, Label* on_bit_set) override; void CheckPosition(int cp_offset, Label* on_outside_input) override; - bool CheckSpecialCharacterClass(StandardCharacterSet type, - Label* on_no_match) override; + bool CheckSpecialClassRanges(StandardCharacterSet type, + Label* on_no_match) override; void Fail() override; Handle<HeapObject> GetCode(Handle<String> source) override; void GoTo(Label* label) override; diff --git a/deps/v8/src/regexp/regexp-macro-assembler.h b/deps/v8/src/regexp/regexp-macro-assembler.h index e27e90b8d0d342..e75489398fe87e 100644 --- a/deps/v8/src/regexp/regexp-macro-assembler.h +++ b/deps/v8/src/regexp/regexp-macro-assembler.h @@ -114,8 +114,8 @@ class RegExpMacroAssembler { // character. Returns false if the type of special character class does // not have custom support. // May clobber the current loaded character. - virtual bool CheckSpecialCharacterClass(StandardCharacterSet type, - Label* on_no_match) { + virtual bool CheckSpecialClassRanges(StandardCharacterSet type, + Label* on_no_match) { return false; } diff --git a/deps/v8/src/regexp/regexp-nodes.h b/deps/v8/src/regexp/regexp-nodes.h index 121980b41108be..b3d6e1aa80f885 100644 --- a/deps/v8/src/regexp/regexp-nodes.h +++ b/deps/v8/src/regexp/regexp-nodes.h @@ -397,12 +397,11 @@ class TextNode : public SeqRegExpNode { TextNode(ZoneList<TextElement>* elms, bool read_backward, RegExpNode* on_success) : SeqRegExpNode(on_success), elms_(elms), read_backward_(read_backward) {} - TextNode(RegExpCharacterClass* that, bool read_backward, - RegExpNode* on_success) + TextNode(RegExpClassRanges* that, bool read_backward, RegExpNode* on_success) : SeqRegExpNode(on_success), elms_(zone()->New<ZoneList<TextElement>>(1, zone())), read_backward_(read_backward) { - elms_->Add(TextElement::CharClass(that), zone()); + elms_->Add(TextElement::ClassRanges(that), zone()); } // Create TextNode for a single character class for the given ranges. static TextNode* CreateForCharacterRanges(Zone* zone, diff --git a/deps/v8/src/regexp/regexp-parser.cc b/deps/v8/src/regexp/regexp-parser.cc index 44652f6767d898..9bc0df663aea19 100644 --- a/deps/v8/src/regexp/regexp-parser.cc +++ b/deps/v8/src/regexp/regexp-parser.cc @@ -7,7 +7,6 @@ #include "src/base/small-vector.h" #include "src/execution/isolate.h" #include "src/objects/string-inl.h" -#include "src/regexp/property-sequences.h" #include "src/regexp/regexp-ast.h" #include "src/regexp/regexp-macro-assembler.h" #include "src/regexp/regexp.h" @@ -33,6 +32,16 @@ enum class InClassEscapeState { kNotInClass, }; +// The production used to derive ClassSetOperand. +enum class ClassSetOperandType { + kClassSetCharacter, + kClassStringDisjunction, + kNestedClass, + kCharacterClassEscape, // \ CharacterClassEscape is a special nested class, + // as we can fold it directly into another range. + kClassSetRange +}; + // Accumulates RegExp atoms and assertions into lists of terms and alternatives. class RegExpBuilder { public: @@ -48,8 +57,8 @@ class RegExpBuilder { // "Adds" an empty expression. Does nothing except consume a // following quantifier void AddEmpty(); - void AddCharacterClass(RegExpCharacterClass* cc); - void AddCharacterClassForDesugaring(base::uc32 c); + void AddClassRanges(RegExpClassRanges* cc); + void AddClassRangesForDesugaring(base::uc32 c); void AddAtom(RegExpTree* tree); void AddTerm(RegExpTree* tree); void AddAssertion(RegExpTree* tree); @@ -71,10 +80,15 @@ class RegExpBuilder { void FlushPendingSurrogate(); void FlushCharacters(); void FlushTerms(); - bool NeedsDesugaringForUnicode(RegExpCharacterClass* cc); + bool NeedsDesugaringForUnicode(RegExpClassRanges* cc); bool NeedsDesugaringForIgnoreCase(base::uc32 c); + bool IsUnicodeMode() const { + // Either /v or /u enable UnicodeMode + // TODO(v8:11935): Change permalink once proposal is in stage 4. + // https://arai-a.github.io/ecma262-compare/snapshot.html?pr=2418#sec-parsepattern + return IsUnicode(flags_) || IsUnicodeSets(flags_); + } Zone* zone() const { return zone_; } - bool unicode() const { return IsUnicode(flags_); } Zone* const zone_; bool pending_empty_ = false; @@ -211,16 +225,8 @@ class RegExpParserImpl final { const ZoneVector<char>& name_1, const ZoneVector<char>& name_2); - RegExpTree* ParseCharacterClass(const RegExpBuilder* state); - - base::uc32 ParseOctalLiteral(); - - // Tries to parse the input as a back reference. If successful it - // stores the result in the output parameter and returns true. If - // it fails it will push back the characters read so the same characters - // can be reparsed. - bool ParseBackReferenceIndex(int* index_out); - + RegExpTree* ParseClassRanges(ZoneList<CharacterRange>* ranges, + bool add_unicode_case_equivalents); // Parse inside a class. Either add escaped class to the range, or return // false and pass parsed single character through |char_out|. void ParseClassEscape(ZoneList<CharacterRange>* ranges, Zone* zone, @@ -232,10 +238,35 @@ class RegExpParserImpl final { ZoneList<CharacterRange>* ranges, Zone* zone, bool add_unicode_case_equivalents); + RegExpTree* ParseClassStringDisjunction(); + RegExpTree* ParseClassSetOperand(const RegExpBuilder* builder, + ClassSetOperandType* type_out); + RegExpTree* ParseClassSetOperand(const RegExpBuilder* builder, + ClassSetOperandType* type_out, + ZoneList<CharacterRange>* ranges); // Parses and returns a single escaped character. base::uc32 ParseCharacterEscape(InClassEscapeState in_class_escape_state, bool* is_escaped_unicode_character); + RegExpTree* ParseClassUnion(const RegExpBuilder* builder, bool is_negated, + RegExpTree* first_operand, + ClassSetOperandType first_operand_type, + ZoneList<CharacterRange>* ranges); + RegExpTree* ParseClassIntersection(const RegExpBuilder* builder, + bool is_negated, + RegExpTree* first_operand); + RegExpTree* ParseClassSubtraction(const RegExpBuilder* builder, + bool is_negated, RegExpTree* first_operand); + RegExpTree* ParseCharacterClass(const RegExpBuilder* state); + + base::uc32 ParseOctalLiteral(); + + // Tries to parse the input as a back reference. If successful it + // stores the result in the output parameter and returns true. If + // it fails it will push back the characters read so the same characters + // can be reparsed. + bool ParseBackReferenceIndex(int* index_out); + RegExpTree* ReportError(RegExpError error); void Advance(); void Advance(int dist); @@ -250,9 +281,20 @@ class RegExpParserImpl final { int captures_started() const { return captures_started_; } int position() const { return next_pos_ - 1; } bool failed() const { return failed_; } - bool unicode() const { return IsUnicode(top_level_flags_) || force_unicode_; } + bool IsUnicodeMode() const { + // Either /v or /u enable UnicodeMode + // TODO(v8:11935): Change permalink once proposal is in stage 4. + // https://arai-a.github.io/ecma262-compare/snapshot.html?pr=2418#sec-parsepattern + return IsUnicode(top_level_flags_) || IsUnicodeSets(top_level_flags_) || + force_unicode_; + } + bool unicode_sets() const { return IsUnicodeSets(top_level_flags_); } + bool ignore_case() const { return IsIgnoreCase(top_level_flags_); } static bool IsSyntaxCharacterOrSlash(base::uc32 c); + static bool IsClassSetSyntaxCharacter(base::uc32 c); + static bool IsClassSetReservedPunctuator(base::uc32 c); + bool IsClassSetReservedDoublePunctuator(base::uc32 c); static const base::uc32 kEndMarker = (1 << 21); @@ -346,13 +388,7 @@ class RegExpParserImpl final { bool failed_; const uintptr_t stack_limit_; - friend bool RegExpParser::ParseRegExpFromHeapString(Isolate*, Zone*, - Handle<String>, - RegExpFlags, - RegExpCompileData*); - friend bool RegExpParser::VerifyRegExpSyntax<CharT>( - Zone*, uintptr_t, const CharT*, int, RegExpFlags, RegExpCompileData*, - const DisallowGarbageCollection&); + friend class v8::internal::RegExpParser; }; template <class CharT> @@ -398,8 +434,8 @@ inline base::uc32 RegExpParserImpl<base::uc16>::ReadNext() { base::uc16 c0 = InputAt(position); base::uc32 result = c0; position++; - // Read the whole surrogate pair in case of unicode flag, if possible. - if (unicode() && position < input_length() && + // Read the whole surrogate pair in case of unicode mode, if possible. + if (IsUnicodeMode() && position < input_length() && unibrow::Utf16::IsLeadSurrogate(c0)) { base::uc16 c1 = InputAt(position); if (unibrow::Utf16::IsTrailSurrogate(c1)) { @@ -442,7 +478,7 @@ void RegExpParserImpl<CharT>::Advance() { template <class CharT> void RegExpParserImpl<CharT>::RewindByOneCodepoint() { - if (current() == kEndMarker) return; + if (!has_more()) return; // Rewinds by one code point, i.e.: two code units if `current` is outside // the basic multilingual plane (= composed of a lead and trail surrogate), // or one code unit otherwise. @@ -464,6 +500,7 @@ void RegExpParserImpl<CharT>::Advance(int dist) { Advance(); } +// static template <class CharT> bool RegExpParserImpl<CharT>::IsSyntaxCharacterOrSlash(base::uc32 c) { switch (c) { @@ -489,6 +526,86 @@ bool RegExpParserImpl<CharT>::IsSyntaxCharacterOrSlash(base::uc32 c) { return false; } +// static +template <class CharT> +bool RegExpParserImpl<CharT>::IsClassSetSyntaxCharacter(base::uc32 c) { + switch (c) { + case '(': + case ')': + case '[': + case ']': + case '{': + case '}': + case '/': + case '-': + case '\\': + case '|': + return true; + default: + break; + } + return false; +} + +// static +template <class CharT> +bool RegExpParserImpl<CharT>::IsClassSetReservedPunctuator(base::uc32 c) { + switch (c) { + case '&': + case '-': + case '!': + case '#': + case '%': + case ',': + case ':': + case ';': + case '<': + case '=': + case '>': + case '@': + case '`': + case '~': + return true; + default: + break; + } + return false; +} + +template <class CharT> +bool RegExpParserImpl<CharT>::IsClassSetReservedDoublePunctuator(base::uc32 c) { +#define DOUBLE_PUNCTUATOR_CASE(Char) \ + case Char: \ + return Next() == Char + + switch (c) { + DOUBLE_PUNCTUATOR_CASE('&'); + DOUBLE_PUNCTUATOR_CASE('!'); + DOUBLE_PUNCTUATOR_CASE('#'); + DOUBLE_PUNCTUATOR_CASE('$'); + DOUBLE_PUNCTUATOR_CASE('%'); + DOUBLE_PUNCTUATOR_CASE('*'); + DOUBLE_PUNCTUATOR_CASE('+'); + DOUBLE_PUNCTUATOR_CASE(','); + DOUBLE_PUNCTUATOR_CASE('.'); + DOUBLE_PUNCTUATOR_CASE(':'); + DOUBLE_PUNCTUATOR_CASE(';'); + DOUBLE_PUNCTUATOR_CASE('<'); + DOUBLE_PUNCTUATOR_CASE('='); + DOUBLE_PUNCTUATOR_CASE('>'); + DOUBLE_PUNCTUATOR_CASE('?'); + DOUBLE_PUNCTUATOR_CASE('@'); + DOUBLE_PUNCTUATOR_CASE('^'); + DOUBLE_PUNCTUATOR_CASE('`'); + DOUBLE_PUNCTUATOR_CASE('~'); + default: + break; + } +#undef DOUBLE_PUNCTUATOR_CASE + + return false; +} + template <class CharT> RegExpTree* RegExpParserImpl<CharT>::ReportError(RegExpError error) { if (failed_) return nullptr; // Do not overwrite any existing error. @@ -635,9 +752,9 @@ RegExpTree* RegExpParserImpl<CharT>::ParseDisjunction() { StandardCharacterSet::kNotLineTerminator, ranges, false, zone()); } - RegExpCharacterClass* cc = - zone()->template New<RegExpCharacterClass>(zone(), ranges); - builder->AddCharacterClass(cc); + RegExpClassRanges* cc = + zone()->template New<RegExpClassRanges>(zone(), ranges); + builder->AddClassRanges(cc); break; } case '(': { @@ -647,7 +764,12 @@ RegExpTree* RegExpParserImpl<CharT>::ParseDisjunction() { } case '[': { RegExpTree* cc = ParseCharacterClass(builder CHECK_FAILED); - builder->AddCharacterClass(cc->AsCharacterClass()); + if (cc->IsClassRanges()) { + builder->AddClassRanges(cc->AsClassRanges()); + } else { + DCHECK(cc->IsClassSetExpression()); + builder->AddTerm(cc); + } break; } // Atom :: @@ -697,9 +819,9 @@ RegExpTree* RegExpParserImpl<CharT>::ParseDisjunction() { } break; } - // With /u, no identity escapes except for syntax characters + // With /u and /v, no identity escapes except for syntax characters // are allowed. Otherwise, all identity escapes are allowed. - if (unicode()) { + if (IsUnicodeMode()) { return ReportError(RegExpError::kInvalidEscape); } base::uc32 first_digit = Next(); @@ -712,8 +834,8 @@ RegExpTree* RegExpParserImpl<CharT>::ParseDisjunction() { } case '0': { Advance(); - if (unicode() && Next() >= '0' && Next() <= '9') { - // With /u, decimal escape with leading 0 are not parsed as octal. + if (IsUnicodeMode() && Next() >= '0' && Next() <= '9') { + // Decimal escape with leading 0 are not parsed as octal. return ReportError(RegExpError::kInvalidDecimalEscape); } base::uc32 octal = ParseOctalLiteral(); @@ -744,17 +866,17 @@ RegExpTree* RegExpParserImpl<CharT>::ParseDisjunction() { ZoneList<CharacterRange>* ranges = zone()->template New<ZoneList<CharacterRange>>(2, zone()); bool add_unicode_case_equivalents = - unicode() && builder->ignore_case(); + IsUnicodeMode() && ignore_case(); bool parsed_character_class_escape = TryParseCharacterClassEscape( next, InClassEscapeState::kNotInClass, ranges, zone(), add_unicode_case_equivalents CHECK_FAILED); if (parsed_character_class_escape) { - RegExpCharacterClass* cc = - zone()->template New<RegExpCharacterClass>(zone(), ranges); - builder->AddCharacterClass(cc); + RegExpClassRanges* cc = + zone()->template New<RegExpClassRanges>(zone(), ranges); + builder->AddClassRanges(cc); } else { - CHECK(!unicode()); + CHECK(!IsUnicodeMode()); Advance(2); builder->AddCharacter(next); // IdentityEscape. } @@ -770,7 +892,7 @@ RegExpTree* RegExpParserImpl<CharT>::ParseDisjunction() { // in all other cases. const bool has_named_captures = HasNamedCaptures(InClassEscapeState::kNotInClass CHECK_FAILED); - if (unicode() || has_named_captures) { + if (IsUnicodeMode() || has_named_captures) { Advance(2); ParseNamedBackReference(builder, state CHECK_FAILED); break; @@ -801,7 +923,7 @@ RegExpTree* RegExpParserImpl<CharT>::ParseDisjunction() { } case '}': case ']': - if (unicode()) { + if (IsUnicodeMode()) { return ReportError(RegExpError::kLoneQuantifierBrackets); } V8_FALLTHROUGH; @@ -840,8 +962,8 @@ RegExpTree* RegExpParserImpl<CharT>::ParseDisjunction() { return ReportError(RegExpError::kRangeOutOfOrder); } break; - } else if (unicode()) { - // With /u, incomplete quantifiers are not allowed. + } else if (IsUnicodeMode()) { + // Incomplete quantifiers are not allowed. return ReportError(RegExpError::kIncompleteQuantifier); } continue; @@ -954,7 +1076,8 @@ bool IsSpecialClassEscape(base::uc32 c) { // characters. // // Important: The scanner has to be in a consistent state when calling -// ScanForCaptures, e.g. not in the middle of an escape sequence '\['. +// ScanForCaptures, e.g. not in the middle of an escape sequence '\[' or while +// parsing a nested class. template <class CharT> void RegExpParserImpl<CharT>::ScanForCaptures( InClassEscapeState in_class_escape_state) { @@ -964,6 +1087,9 @@ void RegExpParserImpl<CharT>::ScanForCaptures( int capture_count = captures_started(); // When we start inside a character class, skip everything inside the class. if (in_class_escape_state == InClassEscapeState::kInClass) { + // \k is always invalid within a class in unicode mode, thus we should never + // call ScanForCaptures within a class. + DCHECK(!IsUnicodeMode()); int c; while ((c = current()) != kEndMarker) { Advance(); @@ -983,13 +1109,19 @@ void RegExpParserImpl<CharT>::ScanForCaptures( Advance(); break; case '[': { + int class_nest_level = 0; int c; while ((c = current()) != kEndMarker) { Advance(); if (c == '\\') { Advance(); - } else { - if (c == ']') break; + } else if (c == '[') { + // With /v, '[' inside a class is treated as a nested class. + // Without /v, '[' is a normal character. + if (unicode_sets()) class_nest_level++; + } else if (c == ']') { + if (class_nest_level == 0) break; + class_nest_level--; } } break; @@ -1046,8 +1178,9 @@ bool RegExpParserImpl<CharT>::ParseBackReferenceIndex(int* index_out) { } } if (value > captures_started()) { - if (!is_scanned_for_captures_) + if (!is_scanned_for_captures_) { ScanForCaptures(InClassEscapeState::kNotInClass); + } if (value > capture_count_) { Reset(start); return false; @@ -1351,7 +1484,7 @@ bool RegExpParserImpl<CharT>::ParseIntervalQuantifier(int* min_out, template <class CharT> base::uc32 RegExpParserImpl<CharT>::ParseOctalLiteral() { - DCHECK(('0' <= current() && current() <= '7') || current() == kEndMarker); + DCHECK(('0' <= current() && current() <= '7') || !has_more()); // For compatibility with some other browsers (not all), we parse // up to three octal digits with a value below 256. // ES#prod-annexB-LegacyOctalEscapeSequence @@ -1392,7 +1525,7 @@ bool RegExpParserImpl<CharT>::ParseUnicodeEscape(base::uc32* value) { // Accept both \uxxxx and \u{xxxxxx} (if harmony unicode escapes are // allowed). In the latter case, the number of hex digits between { } is // arbitrary. \ and u have already been read. - if (current() == '{' && unicode()) { + if (current() == '{' && IsUnicodeMode()) { int start = position(); Advance(); if (ParseUnlimitedLengthHexNumber(0x10FFFF, value)) { @@ -1406,7 +1539,7 @@ bool RegExpParserImpl<CharT>::ParseUnicodeEscape(base::uc32* value) { } // \u but no {, or \u{...} escapes not allowed. bool result = ParseHexEscape(4, value); - if (result && unicode() && unibrow::Utf16::IsLeadSurrogate(*value) && + if (result && IsUnicodeMode() && unibrow::Utf16::IsLeadSurrogate(*value) && current() == '\\') { // Attempt to read trail surrogate. int start = position(); @@ -1461,6 +1594,7 @@ bool IsExactPropertyValueAlias(const char* property_value_name, bool LookupPropertyValueName(UProperty property, const char* property_value_name, bool negate, + bool needs_case_folding, ZoneList<CharacterRange>* result, Zone* zone) { UProperty property_for_lookup = property; if (property_for_lookup == UCHAR_SCRIPT_EXTENSIONS) { @@ -1485,6 +1619,7 @@ bool LookupPropertyValueName(UProperty property, bool success = ec == U_ZERO_ERROR && !set.isEmpty(); if (success) { + if (needs_case_folding) set.closeOver(USET_CASE_INSENSITIVE); set.removeAllStrings(); if (negate) set.complement(); for (int i = 0; i < set.getRangeCount(); i++) { @@ -1503,7 +1638,8 @@ inline bool NameEquals(const char* name, const char (&literal)[N]) { bool LookupSpecialPropertyValueName(const char* name, ZoneList<CharacterRange>* result, - bool negate, Zone* zone) { + bool negate, bool needs_case_folding, + Zone* zone) { if (NameEquals(name, "Any")) { if (negate) { // Leave the list of character ranges empty, since the negation of 'Any' @@ -1517,7 +1653,7 @@ bool LookupSpecialPropertyValueName(const char* name, zone); } else if (NameEquals(name, "Assigned")) { return LookupPropertyValueName(UCHAR_GENERAL_CATEGORY, "Unassigned", - !negate, result, zone); + !negate, needs_case_folding, result, zone); } else { return false; } @@ -1646,23 +1782,29 @@ template <class CharT> bool RegExpParserImpl<CharT>::AddPropertyClassRange( ZoneList<CharacterRange>* add_to, bool negate, const ZoneVector<char>& name_1, const ZoneVector<char>& name_2) { + // With /vi, we need to apply case folding to property values. + // TODO(v8:11935): Change permalink once proposal is in stage 4. + // See + // https://arai-a.github.io/ecma262-compare/snapshot.html?pr=2418#prod-maybesimplecasefolding + const bool needs_case_folding = unicode_sets() && ignore_case(); if (name_2.empty()) { // First attempt to interpret as general category property value name. const char* name = name_1.data(); if (LookupPropertyValueName(UCHAR_GENERAL_CATEGORY_MASK, name, negate, - add_to, zone())) { + needs_case_folding, add_to, zone())) { return true; } // Interpret "Any", "ASCII", and "Assigned". - if (LookupSpecialPropertyValueName(name, add_to, negate, zone())) { + if (LookupSpecialPropertyValueName(name, add_to, negate, needs_case_folding, + zone())) { return true; } // Then attempt to interpret as binary property name with value name 'Y'. UProperty property = u_getPropertyEnum(name); if (!IsSupportedBinaryProperty(property)) return false; if (!IsExactPropertyAlias(name, property)) return false; - return LookupPropertyValueName(property, negate ? "N" : "Y", false, add_to, - zone()); + return LookupPropertyValueName(property, negate ? "N" : "Y", false, + needs_case_folding, add_to, zone()); } else { // Both property name and value name are specified. Attempt to interpret // the property name as enumerated property. @@ -1677,8 +1819,8 @@ bool RegExpParserImpl<CharT>::AddPropertyClassRange( property != UCHAR_SCRIPT_EXTENSIONS) { return false; } - return LookupPropertyValueName(property, value_name, negate, add_to, - zone()); + return LookupPropertyValueName(property, value_name, negate, + needs_case_folding, add_to, zone()); } } @@ -1760,14 +1902,14 @@ base::uc32 RegExpParserImpl<CharT>::ParseCharacterEscape( // 0x00-0x1F. return controlLetter & 0x1F; } - if (unicode()) { - // With /u, invalid escapes are not treated as identity escapes. + if (IsUnicodeMode()) { + // With /u and /v, invalid escapes are not treated as identity escapes. ReportError(RegExpError::kInvalidUnicodeEscape); return 0; } if (in_class_escape_state == InClassEscapeState::kInClass) { // Inside a character class, we also accept digits and underscore as - // control characters, unless with /u. See Annex B: + // control characters, unless with /u or /v. See Annex B: // ES#prod-annexB-ClassControlLetter if ((controlLetter >= '0' && controlLetter <= '9') || controlLetter == '_') { @@ -1800,8 +1942,9 @@ base::uc32 RegExpParserImpl<CharT>::ParseCharacterEscape( // a back reference (and therefore either \0 or not valid according // to the specification) as a 1..3 digit octal character code. // ES#prod-annexB-LegacyOctalEscapeSequence - if (unicode()) { - // With /u, decimal escape is not interpreted as octal character code. + if (IsUnicodeMode()) { + // With /u or /v, decimal escape is not interpreted as octal character + // code. ReportError(RegExpError::kInvalidClassEscape); return 0; } @@ -1812,8 +1955,8 @@ base::uc32 RegExpParserImpl<CharT>::ParseCharacterEscape( Advance(); base::uc32 value; if (ParseHexEscape(2, &value)) return value; - if (unicode()) { - // With /u, invalid escapes are not treated as identity escapes. + if (IsUnicodeMode()) { + // With /u or /v, invalid escapes are not treated as identity escapes. ReportError(RegExpError::kInvalidEscape); return 0; } @@ -1830,8 +1973,8 @@ base::uc32 RegExpParserImpl<CharT>::ParseCharacterEscape( *is_escaped_unicode_character = true; return value; } - if (unicode()) { - // With /u, invalid escapes are not treated as identity escapes. + if (IsUnicodeMode()) { + // With /u or /v, invalid escapes are not treated as identity escapes. ReportError(RegExpError::kInvalidUnicodeEscape); return 0; } @@ -1848,11 +1991,19 @@ base::uc32 RegExpParserImpl<CharT>::ParseCharacterEscape( // // * With /u, no identity escapes except for syntax characters are // allowed. - // * Without /u: + // * With /v, no identity escapes except for syntax characters and + // ClassSetReservedPunctuators (if within a class) are allowed. + // * Without /u or /v: // * '\c' is not an IdentityEscape. // * '\k' is not an IdentityEscape when named captures exist. // * Otherwise, all identity escapes are allowed. - if (unicode()) { + if (unicode_sets() && in_class_escape_state == InClassEscapeState::kInClass) { + if (IsClassSetReservedPunctuator(c)) { + Advance(); + return c; + } + } + if (IsUnicodeMode()) { if (!IsSyntaxCharacterOrSlash(c)) { ReportError(RegExpError::kInvalidEscape); return 0; @@ -1860,7 +2011,7 @@ base::uc32 RegExpParserImpl<CharT>::ParseCharacterEscape( Advance(); return c; } - DCHECK(!unicode()); + DCHECK(!IsUnicodeMode()); if (c == 'c') { ReportError(RegExpError::kInvalidEscape); return 0; @@ -1875,6 +2026,53 @@ base::uc32 RegExpParserImpl<CharT>::ParseCharacterEscape( return c; } +// TODO(v8:11935): Change permalink once proposal is in stage 4. +// https://arai-a.github.io/ecma262-compare/snapshot.html?pr=2418#prod-ClassRanges +template <class CharT> +RegExpTree* RegExpParserImpl<CharT>::ParseClassRanges( + ZoneList<CharacterRange>* ranges, bool add_unicode_case_equivalents) { + base::uc32 char_1, char_2; + bool is_class_1, is_class_2; + while (has_more() && current() != ']') { + ParseClassEscape(ranges, zone(), add_unicode_case_equivalents, &char_1, + &is_class_1 CHECK_FAILED); + // ClassAtom + if (current() == '-') { + Advance(); + if (!has_more()) { + // If we reach the end we break out of the loop and let the + // following code report an error. + break; + } else if (current() == ']') { + if (!is_class_1) ranges->Add(CharacterRange::Singleton(char_1), zone()); + ranges->Add(CharacterRange::Singleton('-'), zone()); + break; + } + ParseClassEscape(ranges, zone(), add_unicode_case_equivalents, &char_2, + &is_class_2 CHECK_FAILED); + if (is_class_1 || is_class_2) { + // Either end is an escaped character class. Treat the '-' verbatim. + if (IsUnicodeMode()) { + // ES2015 21.2.2.15.1 step 1. + return ReportError(RegExpError::kInvalidCharacterClass); + } + if (!is_class_1) ranges->Add(CharacterRange::Singleton(char_1), zone()); + ranges->Add(CharacterRange::Singleton('-'), zone()); + if (!is_class_2) ranges->Add(CharacterRange::Singleton(char_2), zone()); + continue; + } + // ES2015 21.2.2.15.1 step 6. + if (char_1 > char_2) { + return ReportError(RegExpError::kOutOfOrderCharacterClass); + } + ranges->Add(CharacterRange::Range(char_1, char_2), zone()); + } else { + if (!is_class_1) ranges->Add(CharacterRange::Singleton(char_1), zone()); + } + } + return nullptr; +} + // https://tc39.es/ecma262/#prod-ClassEscape template <class CharT> void RegExpParserImpl<CharT>::ParseClassEscape( @@ -1897,7 +2095,7 @@ void RegExpParserImpl<CharT>::ParseClassEscape( Advance(2); return; case '-': - if (unicode()) { + if (IsUnicodeMode()) { *char_out = next; Advance(2); return; @@ -1943,7 +2141,7 @@ bool RegExpParserImpl<CharT>::TryParseCharacterClassEscape( return true; case 'p': case 'P': { - if (!unicode()) return false; + if (!IsUnicodeMode()) return false; bool negate = next == 'P'; Advance(2); ZoneVector<char> name_1(zone); @@ -1961,65 +2159,335 @@ bool RegExpParserImpl<CharT>::TryParseCharacterClassEscape( } } +// TODO(v8:11935): Change permalink once proposal is in stage 4. +// https://arai-a.github.io/ecma262-compare/snapshot.html?pr=2418#prod-ClassStringDisjunction template <class CharT> -RegExpTree* RegExpParserImpl<CharT>::ParseCharacterClass( - const RegExpBuilder* builder) { - DCHECK_EQ(current(), '['); - Advance(); - bool is_negated = false; - if (current() == '^') { - is_negated = true; - Advance(); +RegExpTree* RegExpParserImpl<CharT>::ParseClassStringDisjunction() { + DCHECK(unicode_sets()); + DCHECK_EQ(current(), '\\'); + DCHECK_EQ(Next(), 'q'); + Advance(2); + if (current() != '{') { + // Identity escape of 'q' is not allowed in unicode mode. + return ReportError(RegExpError::kInvalidEscape); } + Advance(); + + // TODO(pthier, v8:11935): Implement. + return ReportError(RegExpError::kInvalidCharacterClass); +} + +// TODO(v8:11935): Change permalink once proposal is in stage 4. +// https://arai-a.github.io/ecma262-compare/snapshot.html?pr=2418#prod-ClassSetOperand +// Tree returned based on type_out: +// * kClassStringDisjunction: RegExpAlternative | RegExpAtom +// * kNestedClass: RegExpClassSetExpression +// * For all other types: RegExpClassRanges +template <class CharT> +RegExpTree* RegExpParserImpl<CharT>::ParseClassSetOperand( + const RegExpBuilder* builder, ClassSetOperandType* type_out) { ZoneList<CharacterRange>* ranges = - zone()->template New<ZoneList<CharacterRange>>(2, zone()); - bool add_unicode_case_equivalents = unicode() && builder->ignore_case(); + zone()->template New<ZoneList<CharacterRange>>(1, zone()); + RegExpTree* tree = + ParseClassSetOperand(builder, type_out, ranges CHECK_FAILED); + DCHECK_IMPLIES(*type_out == ClassSetOperandType::kClassSetCharacter, + ranges->length() == 1); + DCHECK_IMPLIES(*type_out == ClassSetOperandType::kClassSetCharacter, + tree == nullptr); + DCHECK_IMPLIES(*type_out == ClassSetOperandType::kCharacterClassEscape, + !ranges->is_empty()); + DCHECK_IMPLIES(*type_out == ClassSetOperandType::kCharacterClassEscape, + tree == nullptr); + DCHECK_IMPLIES(*type_out == ClassSetOperandType::kClassStringDisjunction, + ranges->is_empty()); + DCHECK_IMPLIES(*type_out == ClassSetOperandType::kClassStringDisjunction, + tree->IsAtom() || tree->IsAlternative()); + DCHECK_IMPLIES(*type_out == ClassSetOperandType::kNestedClass, + ranges->is_empty()); + DCHECK_IMPLIES(*type_out == ClassSetOperandType::kNestedClass, + tree->IsClassSetExpression()); + // ClassSetRange is only used within ClassSetUnion(). + DCHECK_NE(*type_out, ClassSetOperandType::kClassSetRange); + if (tree == nullptr) { + tree = zone()->template New<RegExpClassRanges>(zone(), ranges); + } + return tree; +} + +// TODO(v8:11935): Change permalink once proposal is in stage 4. +// https://arai-a.github.io/ecma262-compare/snapshot.html?pr=2418#prod-ClassSetOperand +// Based on |type_out| either a tree is returned or ranges modifed (never both). +// Tree returned based on type_out: +// * kClassStringDisjunction: RegExpAlternative | RegExpAtom +// * kNestedClass: RegExpClassSetExpression +// For all other types, ranges is modified and nullptr is returned. +template <class CharT> +RegExpTree* RegExpParserImpl<CharT>::ParseClassSetOperand( + const RegExpBuilder* builder, ClassSetOperandType* type_out, + ZoneList<CharacterRange>* ranges) { + DCHECK(unicode_sets()); + const base::uc32 c = current(); + if (c == '\\') { + base::uc32 next = Next(); + switch (next) { + case 'b': + *type_out = ClassSetOperandType::kClassSetCharacter; + ranges->Add(CharacterRange::Singleton('\b'), zone()); + Advance(2); + return nullptr; + case 'q': + *type_out = ClassSetOperandType::kClassStringDisjunction; + return ParseClassStringDisjunction(); + case kEndMarker: + return ReportError(RegExpError::kEscapeAtEndOfPattern); + } + static constexpr InClassEscapeState kInClassEscape = + InClassEscapeState::kInClass; + const bool add_unicode_case_equivalents = ignore_case(); + if (TryParseCharacterClassEscape(next, kInClassEscape, ranges, zone(), + add_unicode_case_equivalents)) { + *type_out = ClassSetOperandType::kCharacterClassEscape; + return nullptr; + } + + bool dummy = false; // Unused. + base::uc32 escaped_char = ParseCharacterEscape(kInClassEscape, &dummy); + *type_out = ClassSetOperandType::kClassSetCharacter; + ranges->Add(CharacterRange::Singleton(escaped_char), zone()); + return nullptr; + } + if (c == '[') { + *type_out = ClassSetOperandType::kNestedClass; + return ParseCharacterClass(builder); + } + if (IsClassSetSyntaxCharacter(c)) { + return ReportError(RegExpError::kInvalidCharacterInClass); + } + if (IsClassSetReservedDoublePunctuator(c)) { + return ReportError(RegExpError::kInvalidClassSetOperation); + } + *type_out = ClassSetOperandType::kClassSetCharacter; + ranges->Add(CharacterRange::Singleton(c), zone()); + Advance(); + return nullptr; +} + +// TODO(v8:11935): Change permalink once proposal is in stage 4. +// https://arai-a.github.io/ecma262-compare/snapshot.html?pr=2418#prod-ClassUnion +template <class CharT> +RegExpTree* RegExpParserImpl<CharT>::ParseClassUnion( + const RegExpBuilder* builder, bool is_negated, RegExpTree* first_operand, + ClassSetOperandType first_operand_type, ZoneList<CharacterRange>* ranges) { + DCHECK(unicode_sets()); + ZoneList<RegExpTree*>* operands = + zone()->template New<ZoneList<RegExpTree*>>(2, zone()); + // Add the lhs to operands if necessary. + // Either the lhs values were added to |ranges| (in which case |first_operand| + // is null), or the lhs was evaluated to a tree and passed as |first_operand| + // (in which case |ranges| are empty). + DCHECK_EQ(first_operand != nullptr, ranges->is_empty()); + if (first_operand != nullptr) { + operands->Add(first_operand, zone()); + } + ClassSetOperandType last_type = first_operand_type; + const bool needs_case_folding = ignore_case(); while (has_more() && current() != ']') { - base::uc32 char_1, char_2; - bool is_class_1, is_class_2; - ParseClassEscape(ranges, zone(), add_unicode_case_equivalents, &char_1, - &is_class_1 CHECK_FAILED); if (current() == '-') { + // Mix of ClassSetRange and ClassSubtraction is not allowed. + if (Next() == '-') { + return ReportError(RegExpError::kInvalidClassSetOperation); + } Advance(); - if (current() == kEndMarker) { + if (!has_more()) { // If we reach the end we break out of the loop and let the // following code report an error. break; - } else if (current() == ']') { - if (!is_class_1) ranges->Add(CharacterRange::Singleton(char_1), zone()); - ranges->Add(CharacterRange::Singleton('-'), zone()); - break; } - ParseClassEscape(ranges, zone(), add_unicode_case_equivalents, &char_2, - &is_class_2 CHECK_FAILED); - if (is_class_1 || is_class_2) { - // Either end is an escaped character class. Treat the '-' verbatim. - if (unicode()) { - // ES2015 21.2.2.15.1 step 1. - return ReportError(RegExpError::kInvalidCharacterClass); - } - if (!is_class_1) ranges->Add(CharacterRange::Singleton(char_1), zone()); - ranges->Add(CharacterRange::Singleton('-'), zone()); - if (!is_class_2) ranges->Add(CharacterRange::Singleton(char_2), zone()); - continue; + // If the lhs and rhs around '-' are both ClassSetCharacters, they + // represent a character range. + // In case one of them is not a ClassSetCharacter, it is a syntax error, + // as '-' can not be used unescaped within a class with /v. + // TODO(v8:11935): Change permalink once proposal is in stage 4. + // See + // https://arai-a.github.io/ecma262-compare/snapshot.html?pr=2418#prod-ClassSetRange + if (last_type != ClassSetOperandType::kClassSetCharacter) { + return ReportError(RegExpError::kInvalidCharacterClass); } - // ES2015 21.2.2.15.1 step 6. - if (char_1 > char_2) { + ParseClassSetOperand(builder, &last_type, ranges CHECK_FAILED); + if (last_type != ClassSetOperandType::kClassSetCharacter) { + return ReportError(RegExpError::kInvalidCharacterClass); + } + // Remove the last two singleton characters added to ranges, and combine + // them into a range. + auto rhs_ranges = ranges->RemoveLast(); + auto lhs_ranges = ranges->RemoveLast(); + DCHECK(lhs_ranges.IsSingleton()); + DCHECK(rhs_ranges.IsSingleton()); + base::uc32 from = lhs_ranges.from(); + base::uc32 to = rhs_ranges.from(); + if (from > to) { return ReportError(RegExpError::kOutOfOrderCharacterClass); } - ranges->Add(CharacterRange::Range(char_1, char_2), zone()); + ranges->Add(CharacterRange::Range(from, to), zone()); + last_type = ClassSetOperandType::kClassSetRange; } else { - if (!is_class_1) ranges->Add(CharacterRange::Singleton(char_1), zone()); + DCHECK_NE(current(), '-'); + RegExpTree* operand = + ParseClassSetOperand(builder, &last_type, ranges CHECK_FAILED); + if (operand != nullptr) { + // Add the range we started building as operand and reset the current + // range. + if (!ranges->is_empty()) { + if (needs_case_folding) { + CharacterRange::AddUnicodeCaseEquivalents(ranges, zone()); + } + operands->Add(zone()->template New<RegExpClassRanges>(zone(), ranges), + zone()); + ranges = zone()->template New<ZoneList<CharacterRange>>(2, zone()); + } + operands->Add(operand, zone()); + } + } + } + + if (!has_more()) { + return ReportError(RegExpError::kUnterminatedCharacterClass); + } + + // Add the range we started building as operand. + if (!ranges->is_empty()) { + if (needs_case_folding) { + CharacterRange::AddUnicodeCaseEquivalents(ranges, zone()); + } + operands->Add(zone()->template New<RegExpClassRanges>(zone(), ranges), + zone()); + } + DCHECK_EQ(current(), ']'); + Advance(); + return zone()->template New<RegExpClassSetExpression>( + RegExpClassSetExpression::OperationType::kUnion, is_negated, operands); +} + +// TODO(v8:11935): Change permalink once proposal is in stage 4. +// https://arai-a.github.io/ecma262-compare/snapshot.html?pr=2418#prod-ClassIntersection +template <class CharT> +RegExpTree* RegExpParserImpl<CharT>::ParseClassIntersection( + const RegExpBuilder* builder, bool is_negated, RegExpTree* first_operand) { + DCHECK(unicode_sets()); + DCHECK(current() == '&' && Next() == '&'); + ZoneList<RegExpTree*>* operands = + zone()->template New<ZoneList<RegExpTree*>>(2, zone()); + operands->Add(first_operand, zone()); + while (has_more() && current() != ']') { + if (current() != '&' || Next() != '&') { + return ReportError(RegExpError::kInvalidClassSetOperation); + } + Advance(2); + // [lookahead ≠ &] + if (current() == '&') { + return ReportError(RegExpError::kInvalidCharacterInClass); + } + + ClassSetOperandType dummy; // unused + RegExpTree* operand = ParseClassSetOperand(builder, &dummy CHECK_FAILED); + operands->Add(operand, zone()); + } + if (!has_more()) { + return ReportError(RegExpError::kUnterminatedCharacterClass); + } + DCHECK_EQ(current(), ']'); + Advance(); + return zone()->template New<RegExpClassSetExpression>( + RegExpClassSetExpression::OperationType::kIntersection, is_negated, + operands); +} + +// TODO(v8:11935): Change permalink once proposal is in stage 4. +// https://arai-a.github.io/ecma262-compare/snapshot.html?pr=2418#prod-ClassSubtraction +template <class CharT> +RegExpTree* RegExpParserImpl<CharT>::ParseClassSubtraction( + const RegExpBuilder* builder, bool is_negated, RegExpTree* first_operand) { + DCHECK(unicode_sets()); + DCHECK(current() == '-' && Next() == '-'); + ZoneList<RegExpTree*>* operands = + zone()->template New<ZoneList<RegExpTree*>>(2, zone()); + operands->Add(first_operand, zone()); + while (has_more() && current() != ']') { + if (current() != '-' || Next() != '-') { + return ReportError(RegExpError::kInvalidClassSetOperation); } + Advance(2); + ClassSetOperandType dummy; // unused + RegExpTree* operand = ParseClassSetOperand(builder, &dummy CHECK_FAILED); + operands->Add(operand, zone()); } if (!has_more()) { return ReportError(RegExpError::kUnterminatedCharacterClass); } + DCHECK_EQ(current(), ']'); Advance(); - RegExpCharacterClass::CharacterClassFlags character_class_flags; - if (is_negated) character_class_flags = RegExpCharacterClass::NEGATED; - return zone()->template New<RegExpCharacterClass>(zone(), ranges, - character_class_flags); + return zone()->template New<RegExpClassSetExpression>( + RegExpClassSetExpression::OperationType::kSubtraction, is_negated, + operands); +} + +// https://tc39.es/ecma262/#prod-CharacterClass +template <class CharT> +RegExpTree* RegExpParserImpl<CharT>::ParseCharacterClass( + const RegExpBuilder* builder) { + DCHECK_EQ(current(), '['); + Advance(); + bool is_negated = false; + if (current() == '^') { + is_negated = true; + Advance(); + } + ZoneList<CharacterRange>* ranges = + zone()->template New<ZoneList<CharacterRange>>(2, zone()); + if (current() == ']') { + Advance(); + RegExpClassRanges::ClassRangesFlags class_ranges_flags; + if (is_negated) class_ranges_flags = RegExpClassRanges::NEGATED; + return zone()->template New<RegExpClassRanges>(zone(), ranges, + class_ranges_flags); + } + + if (!unicode_sets()) { + bool add_unicode_case_equivalents = IsUnicodeMode() && ignore_case(); + ParseClassRanges(ranges, add_unicode_case_equivalents CHECK_FAILED); + if (!has_more()) { + return ReportError(RegExpError::kUnterminatedCharacterClass); + } + DCHECK_EQ(current(), ']'); + Advance(); + RegExpClassRanges::ClassRangesFlags character_class_flags; + if (is_negated) character_class_flags = RegExpClassRanges::NEGATED; + return zone()->template New<RegExpClassRanges>(zone(), ranges, + character_class_flags); + } else { + ClassSetOperandType operand_type; + RegExpTree* operand = + ParseClassSetOperand(builder, &operand_type, ranges CHECK_FAILED); + switch (current()) { + case '-': + if (Next() == '-') { + if (operand == nullptr) { + operand = zone()->template New<RegExpClassRanges>(zone(), ranges); + } + return ParseClassSubtraction(builder, is_negated, operand); + } + // ClassSetRange is handled in ParseClassUnion(). + break; + case '&': + if (Next() == '&') { + if (operand == nullptr) { + operand = zone()->template New<RegExpClassRanges>(zone(), ranges); + } + return ParseClassIntersection(builder, is_negated, operand); + } + } + return ParseClassUnion(builder, is_negated, operand, operand_type, ranges); + } } #undef CHECK_FAILED @@ -2070,7 +2538,7 @@ void RegExpBuilder::AddTrailSurrogate(base::uc16 trail_surrogate) { base::uc32 combined = unibrow::Utf16::CombineSurrogatePair(lead_surrogate, trail_surrogate); if (NeedsDesugaringForIgnoreCase(combined)) { - AddCharacterClassForDesugaring(combined); + AddClassRangesForDesugaring(combined); } else { ZoneList<base::uc16> surrogate_pair(2, zone()); surrogate_pair.Add(lead_surrogate, zone()); @@ -2087,10 +2555,10 @@ void RegExpBuilder::AddTrailSurrogate(base::uc16 trail_surrogate) { void RegExpBuilder::FlushPendingSurrogate() { if (pending_surrogate_ != kNoPendingSurrogate) { - DCHECK(unicode()); + DCHECK(IsUnicodeMode()); base::uc32 c = pending_surrogate_; pending_surrogate_ = kNoPendingSurrogate; - AddCharacterClassForDesugaring(c); + AddClassRangesForDesugaring(c); } } @@ -2126,7 +2594,7 @@ void RegExpBuilder::AddCharacter(base::uc16 c) { FlushPendingSurrogate(); pending_empty_ = false; if (NeedsDesugaringForIgnoreCase(c)) { - AddCharacterClassForDesugaring(c); + AddClassRangesForDesugaring(c); } else { if (characters_ == nullptr) { characters_ = zone()->New<ZoneList<base::uc16>>(4, zone()); @@ -2138,12 +2606,12 @@ void RegExpBuilder::AddCharacter(base::uc16 c) { void RegExpBuilder::AddUnicodeCharacter(base::uc32 c) { if (c > static_cast<base::uc32>(unibrow::Utf16::kMaxNonSurrogateCharCode)) { - DCHECK(unicode()); + DCHECK(IsUnicodeMode()); AddLeadSurrogate(unibrow::Utf16::LeadSurrogate(c)); AddTrailSurrogate(unibrow::Utf16::TrailSurrogate(c)); - } else if (unicode() && unibrow::Utf16::IsLeadSurrogate(c)) { + } else if (IsUnicodeMode() && unibrow::Utf16::IsLeadSurrogate(c)) { AddLeadSurrogate(c); - } else if (unicode() && unibrow::Utf16::IsTrailSurrogate(c)) { + } else if (IsUnicodeMode() && unibrow::Utf16::IsTrailSurrogate(c)) { AddTrailSurrogate(c); } else { AddCharacter(static_cast<base::uc16>(c)); @@ -2160,7 +2628,7 @@ void RegExpBuilder::AddEscapedUnicodeCharacter(base::uc32 character) { void RegExpBuilder::AddEmpty() { pending_empty_ = true; } -void RegExpBuilder::AddCharacterClass(RegExpCharacterClass* cc) { +void RegExpBuilder::AddClassRanges(RegExpClassRanges* cc) { if (NeedsDesugaringForUnicode(cc)) { // With /u, character class needs to be desugared, so it // must be a standalone term instead of being part of a RegExpText. @@ -2170,8 +2638,8 @@ void RegExpBuilder::AddCharacterClass(RegExpCharacterClass* cc) { } } -void RegExpBuilder::AddCharacterClassForDesugaring(base::uc32 c) { - AddTerm(zone()->New<RegExpCharacterClass>( +void RegExpBuilder::AddClassRangesForDesugaring(base::uc32 c) { + AddTerm(zone()->New<RegExpClassRanges>( zone(), CharacterRange::List(zone(), CharacterRange::Singleton(c)))); } @@ -2222,8 +2690,8 @@ void RegExpBuilder::FlushTerms() { LAST(ADD_NONE); } -bool RegExpBuilder::NeedsDesugaringForUnicode(RegExpCharacterClass* cc) { - if (!unicode()) return false; +bool RegExpBuilder::NeedsDesugaringForUnicode(RegExpClassRanges* cc) { + if (!IsUnicodeMode()) return false; // TODO(yangguo): we could be smarter than this. Case-insensitivity does not // necessarily mean that we need to desugar. It's probably nicer to have a // separate pass to figure out unicode desugarings. @@ -2251,7 +2719,7 @@ bool RegExpBuilder::NeedsDesugaringForUnicode(RegExpCharacterClass* cc) { bool RegExpBuilder::NeedsDesugaringForIgnoreCase(base::uc32 c) { #ifdef V8_INTL_SUPPORT - if (unicode() && ignore_case()) { + if (IsUnicodeMode() && ignore_case()) { icu::UnicodeSet set(c, c); set.closeOver(USET_CASE_INSENSITIVE); set.removeAllStrings(); @@ -2304,8 +2772,8 @@ bool RegExpBuilder::AddQuantifierToAtom( atom = terms_.back(); terms_.pop_back(); if (atom->IsLookaround()) { - // With /u, lookarounds are not quantifiable. - if (unicode()) return false; + // With /u or /v, lookarounds are not quantifiable. + if (IsUnicodeMode()) return false; // Lookbehinds are not quantifiable. if (atom->AsLookaround()->type() == RegExpLookaround::LOOKBEHIND) { return false; diff --git a/deps/v8/src/regexp/regexp.cc b/deps/v8/src/regexp/regexp.cc index 71301ab965caa9..ce323df96cdf1c 100644 --- a/deps/v8/src/regexp/regexp.cc +++ b/deps/v8/src/regexp/regexp.cc @@ -99,7 +99,7 @@ class RegExpImpl final : public AllStatic { static void SetIrregexpMaxRegisterCount(FixedArray re, int value); static int IrregexpNumberOfCaptures(FixedArray re); static ByteArray IrregexpByteCode(FixedArray re, bool is_one_byte); - static Code IrregexpNativeCode(FixedArray re, bool is_one_byte); + static CodeT IrregexpNativeCode(FixedArray re, bool is_one_byte); }; // static @@ -629,8 +629,8 @@ ByteArray RegExpImpl::IrregexpByteCode(FixedArray re, bool is_one_byte) { return ByteArray::cast(re.get(JSRegExp::bytecode_index(is_one_byte))); } -Code RegExpImpl::IrregexpNativeCode(FixedArray re, bool is_one_byte) { - return Code::cast(re.get(JSRegExp::code_index(is_one_byte))); +CodeT RegExpImpl::IrregexpNativeCode(FixedArray re, bool is_one_byte) { + return CodeT::cast(re.get(JSRegExp::code_index(is_one_byte))); } void RegExpImpl::IrregexpInitialize(Isolate* isolate, Handle<JSRegExp> re, @@ -993,7 +993,7 @@ bool RegExpImpl::Compile(Isolate* isolate, Zone* zone, RegExpCompileData* data, RegExpMacroAssembler::GlobalMode mode = RegExpMacroAssembler::GLOBAL; if (data->tree->min_match() > 0) { mode = RegExpMacroAssembler::GLOBAL_NO_ZERO_LENGTH_CHECK; - } else if (IsUnicode(flags)) { + } else if (IsEitherUnicode(flags)) { mode = RegExpMacroAssembler::GLOBAL_UNICODE; } macro_assembler->set_global_mode(mode); @@ -1129,7 +1129,7 @@ RegExpGlobalCache::~RegExpGlobalCache() { } int RegExpGlobalCache::AdvanceZeroLength(int last_index) { - if (IsUnicode(JSRegExp::AsRegExpFlags(regexp_->flags())) && + if (IsEitherUnicode(JSRegExp::AsRegExpFlags(regexp_->flags())) && last_index + 1 < subject_->length() && unibrow::Utf16::IsLeadSurrogate(subject_->Get(last_index)) && unibrow::Utf16::IsTrailSurrogate(subject_->Get(last_index + 1))) { diff --git a/deps/v8/src/regexp/riscv/regexp-macro-assembler-riscv.cc b/deps/v8/src/regexp/riscv/regexp-macro-assembler-riscv.cc index 93da768d86cc59..c8f3eb551e0580 100644 --- a/deps/v8/src/regexp/riscv/regexp-macro-assembler-riscv.cc +++ b/deps/v8/src/regexp/riscv/regexp-macro-assembler-riscv.cc @@ -499,7 +499,7 @@ void RegExpMacroAssemblerRISCV::CheckBitInTable(Handle<ByteArray> table, BranchOrBacktrack(on_bit_set, ne, a0, Operand(zero_reg)); } -bool RegExpMacroAssemblerRISCV::CheckSpecialCharacterClass( +bool RegExpMacroAssemblerRISCV::CheckSpecialClassRanges( StandardCharacterSet type, Label* on_no_match) { // Range checks (c in min..max) are generally implemented by an unsigned // (c - min) <= (max - min) check. diff --git a/deps/v8/src/regexp/riscv/regexp-macro-assembler-riscv.h b/deps/v8/src/regexp/riscv/regexp-macro-assembler-riscv.h index 2352af8a17ca6a..1080e72a7ec73a 100644 --- a/deps/v8/src/regexp/riscv/regexp-macro-assembler-riscv.h +++ b/deps/v8/src/regexp/riscv/regexp-macro-assembler-riscv.h @@ -58,8 +58,8 @@ class V8_EXPORT_PRIVATE RegExpMacroAssemblerRISCV // Checks whether the given offset from the current position is before // the end of the string. void CheckPosition(int cp_offset, Label* on_outside_input) override; - bool CheckSpecialCharacterClass(StandardCharacterSet type, - Label* on_no_match) override; + bool CheckSpecialClassRanges(StandardCharacterSet type, + Label* on_no_match) override; void Fail() override; Handle<HeapObject> GetCode(Handle<String> source) override; void GoTo(Label* label) override; diff --git a/deps/v8/src/regexp/s390/regexp-macro-assembler-s390.cc b/deps/v8/src/regexp/s390/regexp-macro-assembler-s390.cc index ab9528dbc4c1cd..a61bc379ba6c26 100644 --- a/deps/v8/src/regexp/s390/regexp-macro-assembler-s390.cc +++ b/deps/v8/src/regexp/s390/regexp-macro-assembler-s390.cc @@ -539,7 +539,7 @@ void RegExpMacroAssemblerS390::CheckBitInTable(Handle<ByteArray> table, BranchOrBacktrack(ne, on_bit_set); } -bool RegExpMacroAssemblerS390::CheckSpecialCharacterClass( +bool RegExpMacroAssemblerS390::CheckSpecialClassRanges( StandardCharacterSet type, Label* on_no_match) { // Range checks (c in min..max) are generally implemented by an unsigned // (c - min) <= (max - min) check diff --git a/deps/v8/src/regexp/s390/regexp-macro-assembler-s390.h b/deps/v8/src/regexp/s390/regexp-macro-assembler-s390.h index 645b01faa56e34..f0b4833eb8ac7c 100644 --- a/deps/v8/src/regexp/s390/regexp-macro-assembler-s390.h +++ b/deps/v8/src/regexp/s390/regexp-macro-assembler-s390.h @@ -57,8 +57,8 @@ class V8_EXPORT_PRIVATE RegExpMacroAssemblerS390 // Checks whether the given offset from the current position is before // the end of the string. void CheckPosition(int cp_offset, Label* on_outside_input) override; - bool CheckSpecialCharacterClass(StandardCharacterSet type, - Label* on_no_match) override; + bool CheckSpecialClassRanges(StandardCharacterSet type, + Label* on_no_match) override; void Fail() override; Handle<HeapObject> GetCode(Handle<String> source) override; void GoTo(Label* label) override; diff --git a/deps/v8/src/regexp/x64/regexp-macro-assembler-x64.cc b/deps/v8/src/regexp/x64/regexp-macro-assembler-x64.cc index 1b4aa566f8f4e2..89fd2e34f12961 100644 --- a/deps/v8/src/regexp/x64/regexp-macro-assembler-x64.cc +++ b/deps/v8/src/regexp/x64/regexp-macro-assembler-x64.cc @@ -578,8 +578,8 @@ void RegExpMacroAssemblerX64::CheckBitInTable( BranchOrBacktrack(not_equal, on_bit_set); } -bool RegExpMacroAssemblerX64::CheckSpecialCharacterClass( - StandardCharacterSet type, Label* on_no_match) { +bool RegExpMacroAssemblerX64::CheckSpecialClassRanges(StandardCharacterSet type, + Label* on_no_match) { // Range checks (c in min..max) are generally implemented by an unsigned // (c - min) <= (max - min) check, using the sequence: // leal(rax, Operand(current_character(), -min)) or sub(rax, Immediate(min)) diff --git a/deps/v8/src/regexp/x64/regexp-macro-assembler-x64.h b/deps/v8/src/regexp/x64/regexp-macro-assembler-x64.h index c0a743afc639fd..683d3bc42803c4 100644 --- a/deps/v8/src/regexp/x64/regexp-macro-assembler-x64.h +++ b/deps/v8/src/regexp/x64/regexp-macro-assembler-x64.h @@ -57,8 +57,8 @@ class V8_EXPORT_PRIVATE RegExpMacroAssemblerX64 // Checks whether the given offset from the current position is before // the end of the string. void CheckPosition(int cp_offset, Label* on_outside_input) override; - bool CheckSpecialCharacterClass(StandardCharacterSet type, - Label* on_no_match) override; + bool CheckSpecialClassRanges(StandardCharacterSet type, + Label* on_no_match) override; void Fail() override; Handle<HeapObject> GetCode(Handle<String> source) override; void GoTo(Label* label) override; diff --git a/deps/v8/src/roots/roots.cc b/deps/v8/src/roots/roots.cc index 54755d899bd8b9..1f798c94b565b1 100644 --- a/deps/v8/src/roots/roots.cc +++ b/deps/v8/src/roots/roots.cc @@ -39,7 +39,8 @@ void ReadOnlyRoots::VerifyNameForProtectors() { if (root_index != RootIndex::kFirstNameForProtector) { // Make sure the objects are adjacent in memory. CHECK_LT(prev.address(), current.address()); - Address computed_address = prev.address() + prev.Size(); + Address computed_address = + prev.address() + ALIGN_TO_ALLOCATION_ALIGNMENT(prev.Size()); CHECK_EQ(computed_address, current.address()); } prev = current; diff --git a/deps/v8/src/roots/roots.h b/deps/v8/src/roots/roots.h index 16d022d6e5f641..a65e41e3da4195 100644 --- a/deps/v8/src/roots/roots.h +++ b/deps/v8/src/roots/roots.h @@ -268,8 +268,8 @@ class Symbol; AsyncGeneratorAwaitRejectSharedFun) \ V(SharedFunctionInfo, async_generator_await_resolve_shared_fun, \ AsyncGeneratorAwaitResolveSharedFun) \ - V(SharedFunctionInfo, async_generator_yield_resolve_shared_fun, \ - AsyncGeneratorYieldResolveSharedFun) \ + V(SharedFunctionInfo, async_generator_yield_with_await_resolve_shared_fun, \ + AsyncGeneratorYieldWithAwaitResolveSharedFun) \ V(SharedFunctionInfo, async_generator_return_resolve_shared_fun, \ AsyncGeneratorReturnResolveSharedFun) \ V(SharedFunctionInfo, async_generator_return_closed_reject_shared_fun, \ @@ -334,8 +334,11 @@ class Symbol; PendingOptimizeForTestBytecode) \ V(ArrayList, basic_block_profiling_data, BasicBlockProfilingData) \ V(WeakArrayList, shared_wasm_memories, SharedWasmMemories) \ + /* EphemeronHashTable for debug scopes (local debug evaluate) */ \ + V(HeapObject, locals_block_list_cache, DebugLocalsBlockListCache) \ IF_WASM(V, HeapObject, active_continuation, ActiveContinuation) \ IF_WASM(V, HeapObject, active_suspender, ActiveSuspender) \ + IF_WASM(V, WeakArrayList, js_to_wasm_wrappers, JSToWasmWrappers) \ IF_WASM(V, WeakArrayList, wasm_canonical_rtts, WasmCanonicalRtts) // Entries in this list are limited to Smis and are not visited during GC. diff --git a/deps/v8/src/runtime/runtime-array.cc b/deps/v8/src/runtime/runtime-array.cc index 7d8ba833e3e9d3..11cb61eeefbda7 100644 --- a/deps/v8/src/runtime/runtime-array.cc +++ b/deps/v8/src/runtime/runtime-array.cc @@ -155,13 +155,18 @@ RUNTIME_FUNCTION(Runtime_NormalizeElements) { return *array; } -// GrowArrayElements returns a sentinel Smi if the object was normalized or if -// the key is negative. +// GrowArrayElements grows fast kind elements and returns a sentinel Smi if the +// object was normalized or if the key is negative. RUNTIME_FUNCTION(Runtime_GrowArrayElements) { HandleScope scope(isolate); DCHECK_EQ(2, args.length()); Handle<JSObject> object = args.at<JSObject>(0); Handle<Object> key = args.at(1); + ElementsKind kind = object->GetElementsKind(); + CHECK(IsFastElementsKind(kind)); + const intptr_t kMaxLength = IsDoubleElementsKind(kind) + ? FixedDoubleArray::kMaxLength + : FixedArray::kMaxLength; uint32_t index; if (key->IsSmi()) { int value = Smi::ToInt(*key); @@ -170,7 +175,7 @@ RUNTIME_FUNCTION(Runtime_GrowArrayElements) { } else { CHECK(key->IsHeapNumber()); double value = HeapNumber::cast(*key).value(); - if (value < 0 || value > std::numeric_limits<uint32_t>::max()) { + if (value < 0 || value > kMaxLength) { return Smi::zero(); } index = static_cast<uint32_t>(value); diff --git a/deps/v8/src/runtime/runtime-classes.cc b/deps/v8/src/runtime/runtime-classes.cc index 79dd4cbe41a89f..2c3037e1353e90 100644 --- a/deps/v8/src/runtime/runtime-classes.cc +++ b/deps/v8/src/runtime/runtime-classes.cc @@ -638,7 +638,7 @@ MaybeHandle<Object> DefineClass(Isolate* isolate, DCHECK(isolate->has_pending_exception()); return MaybeHandle<Object>(); } - if (FLAG_log_maps) { + if (v8_flags.log_maps) { Handle<Map> empty_map; LOG(isolate, MapEvent("InitialMap", empty_map, handle(constructor->map(), isolate), diff --git a/deps/v8/src/runtime/runtime-collections.cc b/deps/v8/src/runtime/runtime-collections.cc index cdf822c0c0e769..a326efaa5e7505 100644 --- a/deps/v8/src/runtime/runtime-collections.cc +++ b/deps/v8/src/runtime/runtime-collections.cc @@ -79,7 +79,7 @@ RUNTIME_FUNCTION(Runtime_WeakCollectionDelete) { int hash = args.smi_value_at(2); #ifdef DEBUG - DCHECK(key->IsJSReceiver()); + DCHECK(key->CanBeHeldWeakly()); DCHECK(EphemeronHashTable::IsKey(ReadOnlyRoots(isolate), *key)); Handle<EphemeronHashTable> table( EphemeronHashTable::cast(weak_collection->table()), isolate); @@ -102,7 +102,7 @@ RUNTIME_FUNCTION(Runtime_WeakCollectionSet) { int hash = args.smi_value_at(3); #ifdef DEBUG - DCHECK(key->IsJSReceiver()); + DCHECK(key->CanBeHeldWeakly()); DCHECK(EphemeronHashTable::IsKey(ReadOnlyRoots(isolate), *key)); Handle<EphemeronHashTable> table( EphemeronHashTable::cast(weak_collection->table()), isolate); diff --git a/deps/v8/src/runtime/runtime-compiler.cc b/deps/v8/src/runtime/runtime-compiler.cc index d7288130ed3727..3676f7814d0b60 100644 --- a/deps/v8/src/runtime/runtime-compiler.cc +++ b/deps/v8/src/runtime/runtime-compiler.cc @@ -56,7 +56,7 @@ RUNTIME_FUNCTION(Runtime_CompileLazy) { DCHECK(!function->is_compiled()); #ifdef DEBUG - if (FLAG_trace_lazy && sfi->is_compiled()) { + if (v8_flags.trace_lazy && sfi->is_compiled()) { PrintF("[unoptimized: %s]\n", function->DebugNameCStr().get()); } #endif @@ -276,7 +276,7 @@ void DeoptAllOsrLoopsContainingDeoptExit(Isolate* isolate, JSFunction function, DisallowGarbageCollection no_gc; DCHECK(!deopt_exit_offset.IsNone()); - if (!FLAG_use_ic || + if (!v8_flags.use_ic || !function.feedback_vector().maybe_has_optimized_osr_code()) { return; } @@ -467,7 +467,7 @@ Object CompileOptimizedOSR(Isolate* isolate, Handle<JSFunction> function, BytecodeOffset osr_offset) { const ConcurrencyMode mode = V8_LIKELY(isolate->concurrent_recompilation_enabled() && - FLAG_concurrent_osr) + v8_flags.concurrent_osr) ? ConcurrencyMode::kConcurrent : ConcurrencyMode::kSynchronous; @@ -519,7 +519,7 @@ Object CompileOptimizedOSR(Isolate* isolate, Handle<JSFunction> function, RUNTIME_FUNCTION(Runtime_CompileOptimizedOSR) { HandleScope handle_scope(isolate); DCHECK_EQ(0, args.length()); - DCHECK(FLAG_use_osr); + DCHECK(v8_flags.use_osr); BytecodeOffset osr_offset = BytecodeOffset::None(); Handle<JSFunction> function; @@ -531,7 +531,7 @@ RUNTIME_FUNCTION(Runtime_CompileOptimizedOSR) { RUNTIME_FUNCTION(Runtime_CompileOptimizedOSRFromMaglev) { HandleScope handle_scope(isolate); DCHECK_EQ(1, args.length()); - DCHECK(FLAG_use_osr); + DCHECK(v8_flags.use_osr); const BytecodeOffset osr_offset(args.positive_smi_value_at(0)); @@ -546,13 +546,13 @@ RUNTIME_FUNCTION(Runtime_CompileOptimizedOSRFromMaglev) { RUNTIME_FUNCTION(Runtime_LogOrTraceOptimizedOSREntry) { HandleScope handle_scope(isolate); DCHECK_EQ(0, args.length()); - CHECK(FLAG_trace_osr || v8_flags.log_function_events); + CHECK(v8_flags.trace_osr || v8_flags.log_function_events); BytecodeOffset osr_offset = BytecodeOffset::None(); Handle<JSFunction> function; GetOsrOffsetAndFunctionForOSR(isolate, &osr_offset, &function); - if (FLAG_trace_osr) { + if (v8_flags.trace_osr) { PrintF(CodeTracer::Scope{isolate->GetCodeTracer()}.file(), "[OSR - entry. function: %s, osr offset: %d]\n", function->DebugNameCStr().get(), osr_offset.ToInt()); diff --git a/deps/v8/src/runtime/runtime-debug.cc b/deps/v8/src/runtime/runtime-debug.cc index 347329604acba8..2167a425485940 100644 --- a/deps/v8/src/runtime/runtime-debug.cc +++ b/deps/v8/src/runtime/runtime-debug.cc @@ -910,7 +910,7 @@ RUNTIME_FUNCTION(Runtime_ProfileCreateSnapshotDataBlob) { // Used only by the test/memory/Memory.json benchmark. This creates a snapshot // blob and outputs various statistics around it. - DCHECK(FLAG_profile_deserialization && FLAG_serialization_statistics); + DCHECK(v8_flags.profile_deserialization && v8_flags.serialization_statistics); DisableEmbeddedBlobRefcounting(); diff --git a/deps/v8/src/runtime/runtime-forin.cc b/deps/v8/src/runtime/runtime-forin.cc index 031ad03baf8f65..8ae6898555b094 100644 --- a/deps/v8/src/runtime/runtime-forin.cc +++ b/deps/v8/src/runtime/runtime-forin.cc @@ -77,6 +77,10 @@ MaybeHandle<Object> HasEnumerableProperty(Isolate* isolate, return it.GetName(); } } + case LookupIterator::WASM_OBJECT: + THROW_NEW_ERROR(isolate, + NewTypeError(MessageTemplate::kWasmObjectsAreOpaque), + Object); case LookupIterator::INTERCEPTOR: { result = JSObject::GetPropertyAttributesWithInterceptor(&it); if (result.IsNothing()) return MaybeHandle<Object>(); diff --git a/deps/v8/src/runtime/runtime-generator.cc b/deps/v8/src/runtime/runtime-generator.cc index 362249c55df88e..555367c47baafd 100644 --- a/deps/v8/src/runtime/runtime-generator.cc +++ b/deps/v8/src/runtime/runtime-generator.cc @@ -110,7 +110,7 @@ RUNTIME_FUNCTION(Runtime_AsyncGeneratorReject) { UNREACHABLE(); } -RUNTIME_FUNCTION(Runtime_AsyncGeneratorYield) { +RUNTIME_FUNCTION(Runtime_AsyncGeneratorYieldWithAwait) { // Runtime call is implemented in InterpreterIntrinsics and lowered in // JSIntrinsicLowering UNREACHABLE(); diff --git a/deps/v8/src/runtime/runtime-internal.cc b/deps/v8/src/runtime/runtime-internal.cc index a4178ef983bf1d..aafb9fe18f42b0 100644 --- a/deps/v8/src/runtime/runtime-internal.cc +++ b/deps/v8/src/runtime/runtime-internal.cc @@ -19,8 +19,8 @@ #include "src/utils/ostreams.h" #if V8_ENABLE_WEBASSEMBLY -// TODO(jkummerow): Drop this when the "SaveAndClearThreadInWasmFlag" -// short-term mitigation is no longer needed. +// TODO(chromium:1236668): Drop this when the "SaveAndClearThreadInWasmFlag" +// approach is no longer needed. #include "src/trap-handler/trap-handler.h" #endif // V8_ENABLE_WEBASSEMBLY @@ -104,7 +104,7 @@ RUNTIME_FUNCTION(Runtime_TerminateExecution) { THROW_NEW_ERROR_RETURN_FAILURE(isolate, call(message_id, arg0, arg1, arg2)); RUNTIME_FUNCTION(Runtime_ThrowRangeError) { - if (FLAG_correctness_fuzzer_suppressions) { + if (v8_flags.correctness_fuzzer_suppressions) { DCHECK_LE(1, args.length()); int message_id_smi = args.smi_value_at(0); @@ -418,7 +418,7 @@ RUNTIME_FUNCTION(Runtime_BytecodeBudgetInterruptWithStackCheck_Maglev) { namespace { #if V8_ENABLE_WEBASSEMBLY -class SaveAndClearThreadInWasmFlag { +class V8_NODISCARD SaveAndClearThreadInWasmFlag { public: SaveAndClearThreadInWasmFlag() { if (trap_handler::IsTrapHandlerEnabled()) { @@ -446,7 +446,8 @@ class SaveAndClearThreadInWasmFlag {}; RUNTIME_FUNCTION(Runtime_AllocateInYoungGeneration) { HandleScope scope(isolate); DCHECK_EQ(2, args.length()); - int size = args.smi_value_at(0); + // TODO(v8:13070): Align allocations in the builtins that call this. + int size = ALIGN_TO_ALLOCATION_ALIGNMENT(args.smi_value_at(0)); int flags = args.smi_value_at(1); AllocationAlignment alignment = AllocateDoubleAlignFlag::decode(flags) ? kDoubleAligned : kTaggedAligned; @@ -459,10 +460,10 @@ RUNTIME_FUNCTION(Runtime_AllocateInYoungGeneration) { } #if V8_ENABLE_WEBASSEMBLY - // Short-term mitigation for crbug.com/1236668. When this is called from - // WasmGC code, clear the "thread in wasm" flag, which is important in case - // any GC needs to happen. - // TODO(jkummerow): Find a better fix, likely by replacing the global flag. + // When this is called from WasmGC code, clear the "thread in wasm" flag, + // which is important in case any GC needs to happen. + // TODO(chromium:1236668): Find a better fix, likely by replacing the global + // flag. SaveAndClearThreadInWasmFlag clear_wasm_flag; #endif // V8_ENABLE_WEBASSEMBLY @@ -478,7 +479,8 @@ RUNTIME_FUNCTION(Runtime_AllocateInYoungGeneration) { RUNTIME_FUNCTION(Runtime_AllocateInOldGeneration) { HandleScope scope(isolate); DCHECK_EQ(2, args.length()); - int size = args.smi_value_at(0); + // TODO(v8:13070): Align allocations in the builtins that call this. + int size = ALIGN_TO_ALLOCATION_ALIGNMENT(args.smi_value_at(0)); int flags = args.smi_value_at(1); AllocationAlignment alignment = AllocateDoubleAlignFlag::decode(flags) ? kDoubleAligned : kTaggedAligned; @@ -614,7 +616,7 @@ RUNTIME_FUNCTION(Runtime_GetAndResetRuntimeCallStats) { HandleScope scope(isolate); DCHECK_LE(args.length(), 2); #ifdef V8_RUNTIME_CALL_STATS - if (!FLAG_runtime_call_stats) { + if (!v8_flags.runtime_call_stats) { THROW_NEW_ERROR_RETURN_FAILURE( isolate, NewTypeError(MessageTemplate::kInvalid, isolate->factory()->NewStringFromAsciiChecked( diff --git a/deps/v8/src/runtime/runtime-literals.cc b/deps/v8/src/runtime/runtime-literals.cc index fcfb9563703507..bb698867c88f20 100644 --- a/deps/v8/src/runtime/runtime-literals.cc +++ b/deps/v8/src/runtime/runtime-literals.cc @@ -263,14 +263,14 @@ class AllocationSiteCreationContext : public AllocationSiteContext { // AllocationSite. InitializeTraversal(isolate()->factory()->NewAllocationSite(true)); scope_site = Handle<AllocationSite>(*top(), isolate()); - if (FLAG_trace_creation_allocation_sites) { + if (v8_flags.trace_creation_allocation_sites) { PrintF("*** Creating top level %s AllocationSite %p\n", "Fat", reinterpret_cast<void*>(scope_site->ptr())); } } else { DCHECK(!current().is_null()); scope_site = isolate()->factory()->NewAllocationSite(false); - if (FLAG_trace_creation_allocation_sites) { + if (v8_flags.trace_creation_allocation_sites) { PrintF( "*** Creating nested %s AllocationSite (top, current, new) (%p, " "%p, " @@ -288,7 +288,7 @@ class AllocationSiteCreationContext : public AllocationSiteContext { void ExitScope(Handle<AllocationSite> scope_site, Handle<JSObject> object) { if (object.is_null()) return; scope_site->set_boilerplate(*object, kReleaseStore); - if (FLAG_trace_creation_allocation_sites) { + if (v8_flags.trace_creation_allocation_sites) { bool top_level = !scope_site.is_null() && top().is_identical_to(scope_site); if (top_level) { diff --git a/deps/v8/src/runtime/runtime-object.cc b/deps/v8/src/runtime/runtime-object.cc index e369e7720f6f15..56e58bea3e1c7a 100644 --- a/deps/v8/src/runtime/runtime-object.cc +++ b/deps/v8/src/runtime/runtime-object.cc @@ -517,6 +517,7 @@ RUNTIME_FUNCTION(Runtime_ObjectCreate) { THROW_NEW_ERROR_RETURN_FAILURE( isolate, NewTypeError(MessageTemplate::kProtoObjectOrNull, prototype)); } + // 2. Let obj be ObjectCreate(O). ASSIGN_RETURN_FAILURE_ON_EXCEPTION( isolate, obj, JSObject::ObjectCreate(isolate, prototype)); @@ -1174,35 +1175,6 @@ RUNTIME_FUNCTION(Runtime_DefineKeyedOwnPropertyInLiteral) { return *value; } -RUNTIME_FUNCTION(Runtime_CollectTypeProfile) { - HandleScope scope(isolate); - DCHECK_EQ(3, args.length()); - int position = args.smi_value_at(0); - Handle<Object> value = args.at(1); - Handle<HeapObject> maybe_vector = args.at<HeapObject>(2); - - if (maybe_vector->IsUndefined()) { - return ReadOnlyRoots(isolate).undefined_value(); - } - Handle<FeedbackVector> vector = args.at<FeedbackVector>(2); - - Handle<String> type = Object::TypeOf(isolate, value); - if (value->IsJSReceiver()) { - Handle<JSReceiver> object = Handle<JSReceiver>::cast(value); - type = JSReceiver::GetConstructorName(isolate, object); - } else if (value->IsNull(isolate)) { - // typeof(null) is object. But it's more user-friendly to annotate - // null as type "null". - type = Handle<String>(ReadOnlyRoots(isolate).null_string(), isolate); - } - - DCHECK(vector->metadata().HasTypeProfileSlot()); - FeedbackNexus nexus(vector, vector->GetTypeProfileSlot()); - nexus.Collect(type, position); - - return ReadOnlyRoots(isolate).undefined_value(); -} - RUNTIME_FUNCTION(Runtime_HasFastPackedElements) { SealHandleScope shs(isolate); DCHECK_EQ(1, args.length()); diff --git a/deps/v8/src/runtime/runtime-regexp.cc b/deps/v8/src/runtime/runtime-regexp.cc index d9ec9c598a4894..2fb47682a8130b 100644 --- a/deps/v8/src/runtime/runtime-regexp.cc +++ b/deps/v8/src/runtime/runtime-regexp.cc @@ -1177,9 +1177,9 @@ static Object SearchRegExpMultiple(Isolate* isolate, Handle<String> subject, // native code expects an array to store all the matches, and the bytecode // matches one at a time, so it's easier to tier-up to native code from the // start. - if (FLAG_regexp_tier_up && regexp->type_tag() == JSRegExp::IRREGEXP) { + if (v8_flags.regexp_tier_up && regexp->type_tag() == JSRegExp::IRREGEXP) { regexp->MarkTierUpForNextExec(); - if (FLAG_trace_regexp_tier_up) { + if (v8_flags.trace_regexp_tier_up) { PrintF("Forcing tier-up of JSRegExp object %p in SearchRegExpMultiple\n", reinterpret_cast<void*>(regexp->ptr())); } @@ -1426,9 +1426,9 @@ V8_WARN_UNUSED_RESULT MaybeHandle<String> RegExpReplace( // native code expects an array to store all the matches, and the bytecode // matches one at a time, so it's easier to tier-up to native code from the // start. - if (FLAG_regexp_tier_up && regexp->type_tag() == JSRegExp::IRREGEXP) { + if (v8_flags.regexp_tier_up && regexp->type_tag() == JSRegExp::IRREGEXP) { regexp->MarkTierUpForNextExec(); - if (FLAG_trace_regexp_tier_up) { + if (v8_flags.trace_regexp_tier_up) { PrintF("Forcing tier-up of JSRegExp object %p in RegExpReplace\n", reinterpret_cast<void*>(regexp->ptr())); } diff --git a/deps/v8/src/runtime/runtime-strings.cc b/deps/v8/src/runtime/runtime-strings.cc index 621788f98c574e..1f2da1cd19d8b2 100644 --- a/deps/v8/src/runtime/runtime-strings.cc +++ b/deps/v8/src/runtime/runtime-strings.cc @@ -11,9 +11,46 @@ #include "src/objects/smi.h" #include "src/strings/string-builder-inl.h" +#if V8_ENABLE_WEBASSEMBLY +// TODO(chromium:1236668): Drop this when the "SaveAndClearThreadInWasmFlag" +// approach is no longer needed. +#include "src/trap-handler/trap-handler.h" +#endif // V8_ENABLE_WEBASSEMBLY + namespace v8 { namespace internal { +namespace { + +#if V8_ENABLE_WEBASSEMBLY +class V8_NODISCARD SaveAndClearThreadInWasmFlag { + public: + explicit SaveAndClearThreadInWasmFlag(Isolate* isolate) : isolate_(isolate) { + if (trap_handler::IsTrapHandlerEnabled()) { + if (trap_handler::IsThreadInWasm()) { + thread_was_in_wasm_ = true; + trap_handler::ClearThreadInWasm(); + } + } + } + ~SaveAndClearThreadInWasmFlag() { + if (thread_was_in_wasm_ && !isolate_->has_pending_exception()) { + trap_handler::SetThreadInWasm(); + } + } + + private: + bool thread_was_in_wasm_{false}; + Isolate* isolate_; +}; +#define CLEAR_THREAD_IN_WASM_SCOPE \ + SaveAndClearThreadInWasmFlag non_wasm_scope(isolate) +#else +#define CLEAR_THREAD_IN_WASM_SCOPE (void)0 +#endif // V8_ENABLE_WEBASSEMBLY + +} // namespace + RUNTIME_FUNCTION(Runtime_GetSubstitution) { HandleScope scope(isolate); DCHECK_EQ(5, args.length()); @@ -154,6 +191,8 @@ RUNTIME_FUNCTION(Runtime_StringSubstring) { } RUNTIME_FUNCTION(Runtime_StringAdd) { + // This is used by Wasm stringrefs. + CLEAR_THREAD_IN_WASM_SCOPE; HandleScope scope(isolate); DCHECK_EQ(2, args.length()); Handle<String> str1 = args.at<String>(0); diff --git a/deps/v8/src/runtime/runtime-test.cc b/deps/v8/src/runtime/runtime-test.cc index 5e39cffc4f70a4..793b0d8776299b 100644 --- a/deps/v8/src/runtime/runtime-test.cc +++ b/deps/v8/src/runtime/runtime-test.cc @@ -20,9 +20,10 @@ #include "src/execution/isolate-inl.h" #include "src/execution/protectors-inl.h" #include "src/execution/tiering-manager.h" -#include "src/heap/heap-inl.h" // For ToBoolean. TODO(jkummerow): Drop. #include "src/heap/heap-write-barrier-inl.h" +#include "src/heap/pretenuring-handler-inl.h" #include "src/ic/stub-cache.h" +#include "src/objects/js-collection-inl.h" #ifdef V8_ENABLE_MAGLEV #include "src/maglev/maglev-concurrent-dispatcher.h" #endif // V8_ENABLE_MAGLEV @@ -48,19 +49,19 @@ namespace internal { namespace { V8_WARN_UNUSED_RESULT Object CrashUnlessFuzzing(Isolate* isolate) { - CHECK(FLAG_fuzzing); + CHECK(v8_flags.fuzzing); return ReadOnlyRoots(isolate).undefined_value(); } V8_WARN_UNUSED_RESULT bool CrashUnlessFuzzingReturnFalse(Isolate* isolate) { - CHECK(FLAG_fuzzing); + CHECK(v8_flags.fuzzing); return false; } // Returns |value| unless correctness-fuzzer-supressions is enabled, // otherwise returns undefined_value. V8_WARN_UNUSED_RESULT Object ReturnFuzzSafe(Object value, Isolate* isolate) { - return FLAG_correctness_fuzzer_suppressions + return v8_flags.correctness_fuzzer_suppressions ? ReadOnlyRoots(isolate).undefined_value() : value; } @@ -228,7 +229,7 @@ RUNTIME_FUNCTION(Runtime_RuntimeEvaluateREPL) { RUNTIME_FUNCTION(Runtime_ICsAreEnabled) { SealHandleScope shs(isolate); DCHECK_EQ(0, args.length()); - return isolate->heap()->ToBoolean(FLAG_use_ic); + return isolate->heap()->ToBoolean(v8_flags.use_ic); } RUNTIME_FUNCTION(Runtime_IsConcurrentRecompilationSupported) { @@ -268,7 +269,7 @@ bool CanOptimizeFunction<CodeKind::TURBOFAN>( return CrashUnlessFuzzingReturnFalse(isolate); } - if (!FLAG_turbofan) return false; + if (!v8_flags.turbofan) return false; if (function->shared().optimization_disabled() && function->shared().disabled_optimization_reason() == @@ -280,7 +281,7 @@ bool CanOptimizeFunction<CodeKind::TURBOFAN>( return CrashUnlessFuzzingReturnFalse(isolate); } - if (FLAG_testing_d8_test_runner) { + if (v8_flags.testing_d8_test_runner) { PendingOptimizationTable::MarkedForOptimization(isolate, function); } @@ -289,7 +290,7 @@ bool CanOptimizeFunction<CodeKind::TURBOFAN>( function->HasAvailableCodeKind(kind)) { DCHECK(function->HasAttachedOptimizedCode() || function->ChecksTieringState()); - if (FLAG_testing_d8_test_runner) { + if (v8_flags.testing_d8_test_runner) { PendingOptimizationTable::FunctionWasOptimized(isolate, function); } return false; @@ -303,7 +304,7 @@ template <> bool CanOptimizeFunction<CodeKind::MAGLEV>(Handle<JSFunction> function, Isolate* isolate, IsCompiledScope* is_compiled_scope) { - if (!FLAG_maglev) return false; + if (!v8_flags.maglev) return false; CHECK(!IsAsmWasmFunction(isolate, *function)); @@ -372,7 +373,7 @@ bool EnsureFeedbackVector(Isolate* isolate, Handle<JSFunction> function) { // If the JSFunction isn't compiled but it has a initialized feedback cell // then no need to compile. CompileLazy builtin would handle these cases by // installing the code from SFI. Calling compile here may cause another - // optimization if FLAG_always_turbofan is set. + // optimization if v8_flags.always_turbofan is set. bool needs_compilation = !function->is_compiled() && !function->has_closure_feedback_cell_array(); if (needs_compilation && @@ -482,17 +483,17 @@ RUNTIME_FUNCTION(Runtime_ActiveTierIsTurbofan) { RUNTIME_FUNCTION(Runtime_IsSparkplugEnabled) { DCHECK_EQ(args.length(), 0); - return isolate->heap()->ToBoolean(FLAG_sparkplug); + return isolate->heap()->ToBoolean(v8_flags.sparkplug); } RUNTIME_FUNCTION(Runtime_IsMaglevEnabled) { DCHECK_EQ(args.length(), 0); - return isolate->heap()->ToBoolean(FLAG_maglev); + return isolate->heap()->ToBoolean(v8_flags.maglev); } RUNTIME_FUNCTION(Runtime_IsTurbofanEnabled) { DCHECK_EQ(args.length(), 0); - return isolate->heap()->ToBoolean(FLAG_turbofan); + return isolate->heap()->ToBoolean(v8_flags.turbofan); } RUNTIME_FUNCTION(Runtime_CurrentFrameIsTurbofan) { @@ -582,7 +583,7 @@ RUNTIME_FUNCTION(Runtime_PrepareFunctionForOptimization) { // Hold onto the bytecode array between marking and optimization to ensure // it's not flushed. - if (FLAG_testing_d8_test_runner) { + if (v8_flags.testing_d8_test_runner) { PendingOptimizationTable::PreparedForOptimization( isolate, function, allow_heuristic_optimization); } @@ -657,7 +658,7 @@ RUNTIME_FUNCTION(Runtime_OptimizeOsr) { if (!it.done()) function = handle(it.frame()->function(), isolate); if (function.is_null()) return CrashUnlessFuzzing(isolate); - if (V8_UNLIKELY(!FLAG_turbofan) || V8_UNLIKELY(!FLAG_use_osr)) { + if (V8_UNLIKELY(!v8_flags.turbofan) || V8_UNLIKELY(!v8_flags.use_osr)) { return ReadOnlyRoots(isolate).undefined_value(); } @@ -671,7 +672,7 @@ RUNTIME_FUNCTION(Runtime_OptimizeOsr) { return CrashUnlessFuzzing(isolate); } - if (FLAG_testing_d8_test_runner) { + if (v8_flags.testing_d8_test_runner) { PendingOptimizationTable::MarkedForOptimization(isolate, function); } @@ -680,7 +681,7 @@ RUNTIME_FUNCTION(Runtime_OptimizeOsr) { function->ChecksTieringState()); // If function is already optimized, remove the bytecode array from the // pending optimize for test table and return. - if (FLAG_testing_d8_test_runner) { + if (v8_flags.testing_d8_test_runner) { PendingOptimizationTable::FunctionWasOptimized(isolate, function); } return ReadOnlyRoots(isolate).undefined_value(); @@ -706,7 +707,7 @@ RUNTIME_FUNCTION(Runtime_OptimizeOsr) { // If not (e.g. because we enter a nested loop first), the next JumpLoop will // see the cached OSR code with a mismatched offset, and trigger // non-concurrent OSR compilation and installation. - if (isolate->concurrent_recompilation_enabled() && FLAG_concurrent_osr) { + if (isolate->concurrent_recompilation_enabled() && v8_flags.concurrent_osr) { const BytecodeOffset osr_offset = OffsetOfNextJumpLoop(isolate, UnoptimizedFrame::cast(it.frame())); if (osr_offset.IsNone()) { @@ -740,7 +741,7 @@ RUNTIME_FUNCTION(Runtime_BaselineOsr) { JavaScriptFrameIterator it(isolate); Handle<JSFunction> function = handle(it.frame()->function(), isolate); if (function.is_null()) return CrashUnlessFuzzing(isolate); - if (!FLAG_sparkplug || !FLAG_use_osr) { + if (!v8_flags.sparkplug || !v8_flags.use_osr) { return ReadOnlyRoots(isolate).undefined_value(); } if (!it.frame()->is_unoptimized()) { @@ -787,7 +788,7 @@ RUNTIME_FUNCTION(Runtime_GetOptimizationStatus) { DCHECK_EQ(args.length(), 1); int status = 0; - if (FLAG_lite_mode || FLAG_jitless) { + if (v8_flags.lite_mode || v8_flags.jitless) { // Both jitless and lite modes cannot optimize. Unit tests should handle // these the same way. In the future, the two flags may become synonyms. status |= static_cast<int>(OptimizationStatus::kLiteMode); @@ -795,10 +796,10 @@ RUNTIME_FUNCTION(Runtime_GetOptimizationStatus) { if (!isolate->use_optimizer()) { status |= static_cast<int>(OptimizationStatus::kNeverOptimize); } - if (FLAG_always_turbofan || FLAG_prepare_always_turbofan) { + if (v8_flags.always_turbofan || v8_flags.prepare_always_turbofan) { status |= static_cast<int>(OptimizationStatus::kAlwaysOptimize); } - if (FLAG_deopt_every_n_times) { + if (v8_flags.deopt_every_n_times) { status |= static_cast<int>(OptimizationStatus::kMaybeDeopted); } @@ -1004,7 +1005,7 @@ int GetSpaceRemainingOnCurrentPage(v8::internal::NewSpace* space) { } void FillUpOneNewSpacePage(Isolate* isolate, Heap* heap) { - DCHECK(!FLAG_single_generation); + DCHECK(!v8_flags.single_generation); PauseAllocationObserversScope pause_observers(heap); NewSpace* space = heap->new_space(); // We cannot rely on `space->limit()` to point to the end of the current page @@ -1072,7 +1073,7 @@ class FileOutputStream : public v8::OutputStream { }; RUNTIME_FUNCTION(Runtime_TakeHeapSnapshot) { - if (FLAG_fuzzing) { + if (v8_flags.fuzzing) { // We don't want to create snapshots in fuzzers. return ReadOnlyRoots(isolate).undefined_value(); } @@ -1180,7 +1181,7 @@ RUNTIME_FUNCTION(Runtime_DebugTrackRetainingPath) { HandleScope scope(isolate); DCHECK_LE(1, args.length()); DCHECK_GE(2, args.length()); - CHECK(FLAG_track_retaining_path); + CHECK(v8_flags.track_retaining_path); Handle<HeapObject> object = args.at<HeapObject>(0); RetainingPathOption option = RetainingPathOption::kDefault; if (args.length() == 2) { @@ -1248,7 +1249,7 @@ RUNTIME_FUNCTION(Runtime_AbortJS) { HandleScope scope(isolate); DCHECK_EQ(1, args.length()); Handle<String> message = args.at<String>(0); - if (FLAG_disable_abortjs) { + if (v8_flags.disable_abortjs) { base::OS::PrintError("[disabled] abort: %s\n", message->ToCString().get()); return Object(); } @@ -1378,12 +1379,15 @@ RUNTIME_FUNCTION(Runtime_PretenureAllocationSite) { return ReturnFuzzSafe(ReadOnlyRoots(isolate).false_value(), isolate); } + PretenturingHandler* pretenuring_handler = heap->pretenuring_handler(); AllocationMemento memento = - heap->FindAllocationMemento<Heap::kForRuntime>(object.map(), object); + pretenuring_handler + ->FindAllocationMemento<PretenturingHandler::kForRuntime>( + object.map(), object); if (memento.is_null()) return ReturnFuzzSafe(ReadOnlyRoots(isolate).false_value(), isolate); AllocationSite site = memento.GetAllocationSite(); - heap->PretenureAllocationSiteOnNextCollection(site); + pretenuring_handler->PretenureAllocationSiteOnNextCollection(site); return ReturnFuzzSafe(ReadOnlyRoots(isolate).true_value(), isolate); } @@ -1735,7 +1739,8 @@ RUNTIME_FUNCTION(Runtime_IsInternalizedString) { RUNTIME_FUNCTION(Runtime_SharedGC) { SealHandleScope scope(isolate); - isolate->heap()->CollectSharedGarbage(GarbageCollectionReason::kTesting); + isolate->heap()->CollectGarbageShared(isolate->main_thread_local_heap(), + GarbageCollectionReason::kTesting); return ReadOnlyRoots(isolate).undefined_value(); } @@ -1746,5 +1751,14 @@ RUNTIME_FUNCTION(Runtime_AtomicsConditionNumWaitersForTesting) { return cv->NumWaitersForTesting(isolate); } +RUNTIME_FUNCTION(Runtime_GetWeakCollectionSize) { + HandleScope scope(isolate); + DCHECK_EQ(1, args.length()); + Handle<JSWeakCollection> collection = args.at<JSWeakCollection>(0); + + return Smi::FromInt( + EphemeronHashTable::cast(collection->table()).NumberOfElements()); +} + } // namespace internal } // namespace v8 diff --git a/deps/v8/src/runtime/runtime-trace.cc b/deps/v8/src/runtime/runtime-trace.cc index 536d0eed11c7af..7d82f9d5386374 100644 --- a/deps/v8/src/runtime/runtime-trace.cc +++ b/deps/v8/src/runtime/runtime-trace.cc @@ -61,7 +61,7 @@ void PrintRegisters(UnoptimizedFrame* frame, std::ostream& os, bool is_input, static const char* kOutputColourCode = "\033[0;35m"; static const char* kNormalColourCode = "\033[0;m"; const char* kArrowDirection = is_input ? " -> " : " <- "; - if (FLAG_log_colour) { + if (v8_flags.log_colour) { os << (is_input ? kInputColourCode : kOutputColourCode); } @@ -97,7 +97,7 @@ void PrintRegisters(UnoptimizedFrame* frame, std::ostream& os, bool is_input, kArrowDirection, interpreter::Register::FromShortStar(bytecode), 1); } - if (FLAG_log_colour) { + if (v8_flags.log_colour) { os << kNormalColourCode; } } @@ -105,7 +105,7 @@ void PrintRegisters(UnoptimizedFrame* frame, std::ostream& os, bool is_input, } // namespace RUNTIME_FUNCTION(Runtime_TraceUnoptimizedBytecodeEntry) { - if (!FLAG_trace_ignition && !FLAG_trace_baseline_exec) { + if (!v8_flags.trace_ignition && !v8_flags.trace_baseline_exec) { return ReadOnlyRoots(isolate).undefined_value(); } @@ -113,10 +113,10 @@ RUNTIME_FUNCTION(Runtime_TraceUnoptimizedBytecodeEntry) { UnoptimizedFrame* frame = reinterpret_cast<UnoptimizedFrame*>(frame_iterator.frame()); - if (frame->is_interpreted() && !FLAG_trace_ignition) { + if (frame->is_interpreted() && !v8_flags.trace_ignition) { return ReadOnlyRoots(isolate).undefined_value(); } - if (frame->is_baseline() && !FLAG_trace_baseline_exec) { + if (frame->is_baseline() && !v8_flags.trace_baseline_exec) { return ReadOnlyRoots(isolate).undefined_value(); } @@ -155,7 +155,7 @@ RUNTIME_FUNCTION(Runtime_TraceUnoptimizedBytecodeEntry) { } RUNTIME_FUNCTION(Runtime_TraceUnoptimizedBytecodeExit) { - if (!FLAG_trace_ignition && !FLAG_trace_baseline_exec) { + if (!v8_flags.trace_ignition && !v8_flags.trace_baseline_exec) { return ReadOnlyRoots(isolate).undefined_value(); } @@ -163,10 +163,10 @@ RUNTIME_FUNCTION(Runtime_TraceUnoptimizedBytecodeExit) { UnoptimizedFrame* frame = reinterpret_cast<UnoptimizedFrame*>(frame_iterator.frame()); - if (frame->is_interpreted() && !FLAG_trace_ignition) { + if (frame->is_interpreted() && !v8_flags.trace_ignition) { return ReadOnlyRoots(isolate).undefined_value(); } - if (frame->is_baseline() && !FLAG_trace_baseline_exec) { + if (frame->is_baseline() && !v8_flags.trace_baseline_exec) { return ReadOnlyRoots(isolate).undefined_value(); } @@ -199,7 +199,7 @@ RUNTIME_FUNCTION(Runtime_TraceUnoptimizedBytecodeExit) { #ifdef V8_TRACE_FEEDBACK_UPDATES RUNTIME_FUNCTION(Runtime_TraceUpdateFeedback) { - if (!FLAG_trace_feedback_updates) { + if (!v8_flags.trace_feedback_updates) { return ReadOnlyRoots(isolate).undefined_value(); } diff --git a/deps/v8/src/runtime/runtime-typedarray.cc b/deps/v8/src/runtime/runtime-typedarray.cc index 399df74228ab66..71b31f5633a503 100644 --- a/deps/v8/src/runtime/runtime-typedarray.cc +++ b/deps/v8/src/runtime/runtime-typedarray.cc @@ -90,7 +90,7 @@ RUNTIME_FUNCTION(Runtime_TypedArraySortFast) { DCHECK(!array->IsOutOfBounds()); #if MULTI_MAPPED_ALLOCATOR_AVAILABLE - if (FLAG_multi_mapped_mock_allocator) { + if (v8_flags.multi_mapped_mock_allocator) { // Sorting is meaningless with the mock allocator, and std::sort // might crash (because aliasing elements violate its assumptions). return *array; diff --git a/deps/v8/src/runtime/runtime-wasm.cc b/deps/v8/src/runtime/runtime-wasm.cc index d6c9bd33c6bb80..a574e037bb14f2 100644 --- a/deps/v8/src/runtime/runtime-wasm.cc +++ b/deps/v8/src/runtime/runtime-wasm.cc @@ -243,19 +243,19 @@ RUNTIME_FUNCTION(Runtime_WasmCompileLazy) { Handle<WasmInstanceObject> instance(WasmInstanceObject::cast(args[0]), isolate); int func_index = args.smi_value_at(1); + + // Save the native_module on the stack, where the GC will use it to scan + // WasmCompileLazy stack frames. wasm::NativeModule** native_module_stack_slot = reinterpret_cast<wasm::NativeModule**>(args.address_of_arg_at(2)); - *native_module_stack_slot = nullptr; + *native_module_stack_slot = instance->module_object().native_module(); DCHECK(isolate->context().is_null()); isolate->set_context(instance->native_context()); - bool success = wasm::CompileLazy(isolate, instance, func_index, - native_module_stack_slot); + bool success = wasm::CompileLazy(isolate, instance, func_index); if (!success) { - { - wasm::ThrowLazyCompilationError( - isolate, instance->module_object().native_module(), func_index); - } + wasm::ThrowLazyCompilationError( + isolate, instance->module_object().native_module(), func_index); DCHECK(isolate->has_pending_exception()); return ReadOnlyRoots{isolate}.exception(); } @@ -293,6 +293,8 @@ RUNTIME_FUNCTION(Runtime_WasmCompileWrapper) { const int function_index = function_data->function_index(); const wasm::WasmFunction& function = module->functions[function_index]; const wasm::FunctionSig* sig = function.sig; + const uint32_t canonical_sig_index = + module->isorecursive_canonical_type_ids[function.sig_index]; // The start function is not guaranteed to be registered as // an exported function (although it is called as one). @@ -307,7 +309,7 @@ RUNTIME_FUNCTION(Runtime_WasmCompileWrapper) { Handle<CodeT> wrapper_code = wasm::JSToWasmWrapperCompilationUnit::CompileSpecificJSToWasmWrapper( - isolate, sig, module); + isolate, sig, canonical_sig_index, module); // Replace the wrapper for the function that triggered the tier-up. // This is to verify that the wrapper is replaced, even if the function @@ -868,6 +870,27 @@ RUNTIME_FUNCTION(Runtime_WasmCreateResumePromise) { return *result; } +#define RETURN_RESULT_OR_TRAP(call) \ + do { \ + Handle<Object> result; \ + if (!(call).ToHandle(&result)) { \ + DCHECK(isolate->has_pending_exception()); \ + /* Mark any exception as uncatchable by Wasm. */ \ + Handle<JSObject> exception(JSObject::cast(isolate->pending_exception()), \ + isolate); \ + Handle<Name> uncatchable = \ + isolate->factory()->wasm_uncatchable_symbol(); \ + LookupIterator it(isolate, exception, uncatchable, LookupIterator::OWN); \ + if (!JSReceiver::HasProperty(&it).FromJust()) { \ + JSObject::AddProperty(isolate, exception, uncatchable, \ + isolate->factory()->true_value(), NONE); \ + } \ + return ReadOnlyRoots(isolate).exception(); \ + } \ + DCHECK(!isolate->has_pending_exception()); \ + return *result; \ + } while (false) + // Returns the new string if the operation succeeds. Otherwise throws an // exception and returns an empty result. RUNTIME_FUNCTION(Runtime_WasmStringNewWtf8) { @@ -894,8 +917,8 @@ RUNTIME_FUNCTION(Runtime_WasmStringNewWtf8) { const base::Vector<const uint8_t> bytes{instance.memory_start() + offset, size}; - RETURN_RESULT_OR_FAILURE( - isolate, isolate->factory()->NewStringFromUtf8(bytes, utf8_variant)); + RETURN_RESULT_OR_TRAP( + isolate->factory()->NewStringFromUtf8(bytes, utf8_variant)); } RUNTIME_FUNCTION(Runtime_WasmStringNewWtf8Array) { @@ -911,8 +934,8 @@ RUNTIME_FUNCTION(Runtime_WasmStringNewWtf8Array) { static_cast<uint32_t>(unibrow::Utf8Variant::kLastUtf8Variant)); auto utf8_variant = static_cast<unibrow::Utf8Variant>(utf8_variant_value); - RETURN_RESULT_OR_FAILURE(isolate, isolate->factory()->NewStringFromUtf8( - array, start, end, utf8_variant)); + RETURN_RESULT_OR_TRAP( + isolate->factory()->NewStringFromUtf8(array, start, end, utf8_variant)); } RUNTIME_FUNCTION(Runtime_WasmStringNewWtf16) { @@ -938,10 +961,8 @@ RUNTIME_FUNCTION(Runtime_WasmStringNewWtf16) { const byte* bytes = instance.memory_start() + offset; const base::uc16* codeunits = reinterpret_cast<const base::uc16*>(bytes); - // TODO(12868): Override any exception with an uncatchable-by-wasm trap. - RETURN_RESULT_OR_FAILURE(isolate, - isolate->factory()->NewStringFromTwoByteLittleEndian( - {codeunits, size_in_codeunits})); + RETURN_RESULT_OR_TRAP(isolate->factory()->NewStringFromTwoByteLittleEndian( + {codeunits, size_in_codeunits})); } RUNTIME_FUNCTION(Runtime_WasmStringNewWtf16Array) { @@ -952,9 +973,8 @@ RUNTIME_FUNCTION(Runtime_WasmStringNewWtf16Array) { uint32_t start = NumberToUint32(args[1]); uint32_t end = NumberToUint32(args[2]); - // TODO(12868): Override any exception with an uncatchable-by-wasm trap. - RETURN_RESULT_OR_FAILURE( - isolate, isolate->factory()->NewStringFromUtf16(array, start, end)); + RETURN_RESULT_OR_TRAP( + isolate->factory()->NewStringFromUtf16(array, start, end)); } // Returns the new string if the operation succeeds. Otherwise traps. @@ -1289,9 +1309,12 @@ RUNTIME_FUNCTION(Runtime_WasmStringViewWtf8Slice) { DCHECK_LT(start, end); DCHECK(base::IsInBounds<size_t>(start, end - start, array->length())); - RETURN_RESULT_OR_FAILURE(isolate, - isolate->factory()->NewStringFromUtf8( - array, start, end, unibrow::Utf8Variant::kWtf8)); + // This can't throw because the result can't be too long if the input wasn't, + // and encoding failures are ruled out too because {start}/{end} are aligned. + return *isolate->factory() + ->NewStringFromUtf8(array, start, end, + unibrow::Utf8Variant::kWtf8) + .ToHandleChecked(); } } // namespace internal diff --git a/deps/v8/src/runtime/runtime-weak-refs.cc b/deps/v8/src/runtime/runtime-weak-refs.cc index f3c6f63ebcb6d7..ff60813b434bef 100644 --- a/deps/v8/src/runtime/runtime-weak-refs.cc +++ b/deps/v8/src/runtime/runtime-weak-refs.cc @@ -2,10 +2,9 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -#include "src/runtime/runtime-utils.h" - #include "src/execution/arguments-inl.h" #include "src/objects/js-weak-refs-inl.h" +#include "src/runtime/runtime-utils.h" namespace v8 { namespace internal { @@ -44,7 +43,8 @@ RUNTIME_FUNCTION( RUNTIME_FUNCTION(Runtime_JSWeakRefAddToKeptObjects) { HandleScope scope(isolate); DCHECK_EQ(1, args.length()); - Handle<JSReceiver> object = args.at<JSReceiver>(0); + Handle<HeapObject> object = args.at<HeapObject>(0); + DCHECK(object->CanBeHeldWeakly()); isolate->heap()->KeepDuringJob(object); diff --git a/deps/v8/src/runtime/runtime.cc b/deps/v8/src/runtime/runtime.cc index 8f6ff8ec45f725..b878193eee3c65 100644 --- a/deps/v8/src/runtime/runtime.cc +++ b/deps/v8/src/runtime/runtime.cc @@ -193,7 +193,7 @@ bool Runtime::MayAllocate(FunctionId id) { } bool Runtime::IsAllowListedForFuzzing(FunctionId id) { - CHECK(FLAG_fuzzing); + CHECK(v8_flags.fuzzing); switch (id) { // Runtime functions allowlisted for all fuzzers. Only add functions that // help increase coverage. @@ -219,10 +219,10 @@ bool Runtime::IsAllowListedForFuzzing(FunctionId id) { case Runtime::kGetOptimizationStatus: case Runtime::kHeapObjectVerify: case Runtime::kIsBeingInterpreted: - return !FLAG_allow_natives_for_differential_fuzzing; + return !v8_flags.allow_natives_for_differential_fuzzing; case Runtime::kVerifyType: - return !FLAG_allow_natives_for_differential_fuzzing && - !FLAG_concurrent_recompilation; + return !v8_flags.allow_natives_for_differential_fuzzing && + !v8_flags.concurrent_recompilation; case Runtime::kBaselineOsr: case Runtime::kCompileBaseline: return ENABLE_SPARKPLUG; diff --git a/deps/v8/src/runtime/runtime.h b/deps/v8/src/runtime/runtime.h index 61f3d2a41d2c15..b15ed40d20bfa3 100644 --- a/deps/v8/src/runtime/runtime.h +++ b/deps/v8/src/runtime/runtime.h @@ -191,7 +191,7 @@ namespace internal { F(AsyncGeneratorHasCatchHandlerForPC, 1, 1) \ I(AsyncGeneratorReject, 2, 1) \ I(AsyncGeneratorResolve, 3, 1) \ - I(AsyncGeneratorYield, 3, 1) \ + I(AsyncGeneratorYieldWithAwait, 3, 1) \ I(CreateJSGeneratorObject, 2, 1) \ I(GeneratorClose, 1, 1) \ F(GeneratorGetFunction, 1, 1) \ @@ -299,7 +299,6 @@ namespace internal { F(AddDictionaryProperty, 3, 1) \ F(AddPrivateBrand, 4, 1) \ F(AllocateHeapNumber, 0, 1) \ - F(CollectTypeProfile, 3, 1) \ F(CompleteInobjectSlackTrackingForMap, 1, 1) \ I(CopyDataProperties, 2, 1) \ I(CopyDataPropertiesWithExcludedPropertiesOnStack, -1 /* >= 1 */, 1) \ @@ -512,6 +511,7 @@ namespace internal { F(GetInitializerFunction, 1, 1) \ F(GetOptimizationStatus, 1, 1) \ F(GetUndetectable, 0, 1) \ + F(GetWeakCollectionSize, 1, 1) \ F(GlobalPrint, 1, 1) \ F(HasDictionaryElements, 1, 1) \ F(HasDoubleElements, 1, 1) \ diff --git a/deps/v8/src/sandbox/bounded-size-inl.h b/deps/v8/src/sandbox/bounded-size-inl.h new file mode 100644 index 00000000000000..49525ed403dd4d --- /dev/null +++ b/deps/v8/src/sandbox/bounded-size-inl.h @@ -0,0 +1,36 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef V8_SANDBOX_BOUNDED_SIZE_INL_H_ +#define V8_SANDBOX_BOUNDED_SIZE_INL_H_ + +#include "include/v8-internal.h" +#include "src/common/ptr-compr-inl.h" +#include "src/sandbox/sandbox.h" +#include "src/sandbox/sandboxed-pointer.h" + +namespace v8::internal { + +V8_INLINE size_t ReadBoundedSizeField(Address field_address) { +#ifdef V8_ENABLE_SANDBOX + size_t raw_value = base::ReadUnalignedValue<size_t>(field_address); + return raw_value >> kBoundedSizeShift; +#else + return ReadMaybeUnalignedValue<size_t>(field_address); +#endif +} + +V8_INLINE void WriteBoundedSizeField(Address field_address, size_t value) { +#ifdef V8_ENABLE_SANDBOX + DCHECK_LE(value, kMaxSafeBufferSizeForSandbox); + size_t raw_value = value << kBoundedSizeShift; + base::WriteUnalignedValue<size_t>(field_address, raw_value); +#else + WriteMaybeUnalignedValue<size_t>(field_address, value); +#endif +} + +} // namespace v8::internal + +#endif // V8_SANDBOX_BOUNDED_SIZE_INL_H_ diff --git a/deps/v8/src/sandbox/bounded-size.h b/deps/v8/src/sandbox/bounded-size.h new file mode 100644 index 00000000000000..06e98b0489fb58 --- /dev/null +++ b/deps/v8/src/sandbox/bounded-size.h @@ -0,0 +1,28 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef V8_SANDBOX_BOUNDED_SIZE_H_ +#define V8_SANDBOX_BOUNDED_SIZE_H_ + +#include "src/common/globals.h" + +namespace v8::internal { + +// +// BoundedSize accessors. +// +// A BoundedSize is just a regular size_t when the sandbox is disabled. +// However, when the sandbox is enabled, a BoundedLength is guaranteed to be in +// the range [0, kMaxSafeBufferSizeForSandbox]. This property is required to +// ensure safe access to variable-sized buffers, in particular ArrayBuffers and +// their views, located inside the sandbox. +// + +V8_INLINE size_t ReadBoundedLengthField(Address field_address); + +V8_INLINE void WriteBoundedLengthField(Address field_address, size_t value); + +} // namespace v8::internal + +#endif // V8_SANDBOX_BOUNDED_SIZE_H_ diff --git a/deps/v8/src/sandbox/sandbox.cc b/deps/v8/src/sandbox/sandbox.cc index 33d7046b066714..8738690f9c87bb 100644 --- a/deps/v8/src/sandbox/sandbox.cc +++ b/deps/v8/src/sandbox/sandbox.cc @@ -215,8 +215,8 @@ bool Sandbox::InitializeAsPartiallyReservedSandbox(v8::VirtualAddressSpace* vas, // ourselves, and so are potentially better positioned to determine a good // base address for the sandbox than the embedder. base::RandomNumberGenerator rng; - if (FLAG_random_seed != 0) { - rng.SetSeed(FLAG_random_seed); + if (v8_flags.random_seed != 0) { + rng.SetSeed(v8_flags.random_seed); } // We try to ensure that base + size is still (mostly) within the process' diff --git a/deps/v8/src/snapshot/deserializer.cc b/deps/v8/src/snapshot/deserializer.cc index b01a197b605728..d6592a5eeb0503 100644 --- a/deps/v8/src/snapshot/deserializer.cc +++ b/deps/v8/src/snapshot/deserializer.cc @@ -403,8 +403,9 @@ void Deserializer<IsolateT>::PostProcessNewJSReceiver( auto bs = backing_store(store_index); SharedFlag shared = bs && bs->is_shared() ? SharedFlag::kShared : SharedFlag::kNotShared; - DCHECK_IMPLIES(bs, buffer.is_resizable() == bs->is_resizable()); - ResizableFlag resizable = bs && bs->is_resizable() + DCHECK_IMPLIES(bs, + buffer.is_resizable_by_js() == bs->is_resizable_by_js()); + ResizableFlag resizable = bs && bs->is_resizable_by_js() ? ResizableFlag::kResizable : ResizableFlag::kNotResizable; buffer.Setup(shared, resizable, bs); @@ -452,7 +453,11 @@ void Deserializer<IsolateT>::PostProcessNewObject(Handle<Map> map, String result = *isolate()->string_table()->LookupKey(isolate(), &key); if (result != raw_obj) { - String::cast(raw_obj).MakeThin(isolate(), result); + // Updating invalidated object size from a background thread would + // race. We are allowed to skip this here since this string hasn't + // transitioned so far. + String::cast(raw_obj).MakeThin(isolate(), result, + UpdateInvalidatedObjectSize::kNo); // Mutate the given object handle so that the backreference entry is // also updated. obj.PatchValue(result); @@ -732,7 +737,6 @@ class DeserializerRelocInfoVisitor { void VisitCodeTarget(Code host, RelocInfo* rinfo); void VisitEmbeddedPointer(Code host, RelocInfo* rinfo); - void VisitRuntimeEntry(Code host, RelocInfo* rinfo); void VisitExternalReference(Code host, RelocInfo* rinfo); void VisitInternalReference(Code host, RelocInfo* rinfo); void VisitOffHeapTarget(Code host, RelocInfo* rinfo); @@ -759,12 +763,6 @@ void DeserializerRelocInfoVisitor::VisitEmbeddedPointer(Code host, rinfo->set_target_object(isolate()->heap(), object); } -void DeserializerRelocInfoVisitor::VisitRuntimeEntry(Code host, - RelocInfo* rinfo) { - // We no longer serialize code that contains runtime entries. - UNREACHABLE(); -} - void DeserializerRelocInfoVisitor::VisitExternalReference(Code host, RelocInfo* rinfo) { byte data = source().Get(); diff --git a/deps/v8/src/snapshot/deserializer.h b/deps/v8/src/snapshot/deserializer.h index 130125a2319b9d..b19f88130802df 100644 --- a/deps/v8/src/snapshot/deserializer.h +++ b/deps/v8/src/snapshot/deserializer.h @@ -32,7 +32,7 @@ class Object; #if defined(V8_TARGET_ARCH_MIPS64) || defined(V8_TARGET_ARCH_PPC) || \ defined(V8_TARGET_ARCH_S390) || defined(V8_TARGET_ARCH_PPC64) || \ defined(V8_TARGET_ARCH_RISCV32) || defined(V8_TARGET_ARCH_RISCV64) || \ - V8_EMBEDDED_CONSTANT_POOL + V8_EMBEDDED_CONSTANT_POOL_BOOL #define V8_CODE_EMBEDS_OBJECT_POINTER 1 #else #define V8_CODE_EMBEDS_OBJECT_POINTER 0 @@ -245,7 +245,7 @@ class Deserializer : public SerializerDeserializer { // be in an invalid state class V8_NODISCARD DisableGCStats { public: - explicit DisableGCStats() { + DisableGCStats() { original_gc_stats_ = TracingFlags::gc_stats; TracingFlags::gc_stats = 0; } diff --git a/deps/v8/src/snapshot/embedded/embedded-data-inl.h b/deps/v8/src/snapshot/embedded/embedded-data-inl.h index e2ebb85263e36b..028bdd0713476f 100644 --- a/deps/v8/src/snapshot/embedded/embedded-data-inl.h +++ b/deps/v8/src/snapshot/embedded/embedded-data-inl.h @@ -69,7 +69,7 @@ Address EmbeddedData::SafepointTableStartOf(Builtin builtin) const { uint32_t EmbeddedData::SafepointTableSizeOf(Builtin builtin) const { DCHECK(Builtins::IsBuiltinId(builtin)); const struct LayoutDescription& desc = LayoutDescription(builtin); -#if V8_EMBEDDED_CONSTANT_POOL +#if V8_EMBEDDED_CONSTANT_POOL_BOOL DCHECK_LE(desc.handler_table_offset, desc.constant_pool_offset); #else DCHECK_LE(desc.handler_table_offset, desc.code_comments_offset_offset); @@ -88,7 +88,7 @@ Address EmbeddedData::HandlerTableStartOf(Builtin builtin) const { uint32_t EmbeddedData::HandlerTableSizeOf(Builtin builtin) const { DCHECK(Builtins::IsBuiltinId(builtin)); const struct LayoutDescription& desc = LayoutDescription(builtin); -#if V8_EMBEDDED_CONSTANT_POOL +#if V8_EMBEDDED_CONSTANT_POOL_BOOL DCHECK_LE(desc.handler_table_offset, desc.constant_pool_offset); return desc.constant_pool_offset - desc.handler_table_offset; #else @@ -99,7 +99,7 @@ uint32_t EmbeddedData::HandlerTableSizeOf(Builtin builtin) const { Address EmbeddedData::ConstantPoolStartOf(Builtin builtin) const { DCHECK(Builtins::IsBuiltinId(builtin)); -#if V8_EMBEDDED_CONSTANT_POOL +#if V8_EMBEDDED_CONSTANT_POOL_BOOL const struct LayoutDescription& desc = LayoutDescription(builtin); const uint8_t* result = RawMetadata() + desc.constant_pool_offset; DCHECK_LE(desc.constant_pool_offset, data_size_); @@ -111,7 +111,7 @@ Address EmbeddedData::ConstantPoolStartOf(Builtin builtin) const { uint32_t EmbeddedData::ConstantPoolSizeOf(Builtin builtin) const { DCHECK(Builtins::IsBuiltinId(builtin)); -#if V8_EMBEDDED_CONSTANT_POOL +#if V8_EMBEDDED_CONSTANT_POOL_BOOL const struct LayoutDescription& desc = LayoutDescription(builtin); DCHECK_LE(desc.constant_pool_offset, desc.code_comments_offset_offset); return desc.code_comments_offset_offset - desc.constant_pool_offset; diff --git a/deps/v8/src/snapshot/embedded/embedded-data.cc b/deps/v8/src/snapshot/embedded/embedded-data.cc index 118eb8b581ae89..1260f1a6427858 100644 --- a/deps/v8/src/snapshot/embedded/embedded-data.cc +++ b/deps/v8/src/snapshot/embedded/embedded-data.cc @@ -317,7 +317,7 @@ EmbeddedData EmbeddedData::FromIsolate(Isolate* isolate) { layout_desc.handler_table_offset = raw_data_size + static_cast<uint32_t>(code.handler_table_offset()); -#if V8_EMBEDDED_CONSTANT_POOL +#if V8_EMBEDDED_CONSTANT_POOL_BOOL layout_desc.constant_pool_offset = raw_data_size + static_cast<uint32_t>(code.constant_pool_offset()); #endif diff --git a/deps/v8/src/snapshot/embedded/embedded-data.h b/deps/v8/src/snapshot/embedded/embedded-data.h index a78c030ba88233..4c5a1f998ac744 100644 --- a/deps/v8/src/snapshot/embedded/embedded-data.h +++ b/deps/v8/src/snapshot/embedded/embedded-data.h @@ -197,7 +197,7 @@ class EmbeddedData final { // The offsets describing inline metadata tables, relative to the start // of the embedded data section. uint32_t handler_table_offset; -#if V8_EMBEDDED_CONSTANT_POOL +#if V8_EMBEDDED_CONSTANT_POOL_BOOL uint32_t constant_pool_offset; #endif uint32_t code_comments_offset_offset; @@ -215,7 +215,7 @@ class EmbeddedData final { 3 * kUInt32Size); static_assert(offsetof(LayoutDescription, handler_table_offset) == 4 * kUInt32Size); -#if V8_EMBEDDED_CONSTANT_POOL +#if V8_EMBEDDED_CONSTANT_POOL_BOOL static_assert(offsetof(LayoutDescription, constant_pool_offset) == 5 * kUInt32Size); static_assert(offsetof(LayoutDescription, code_comments_offset_offset) == diff --git a/deps/v8/src/snapshot/serializer.cc b/deps/v8/src/snapshot/serializer.cc index 4410790f1925a8..13dcfdcc82ea18 100644 --- a/deps/v8/src/snapshot/serializer.cc +++ b/deps/v8/src/snapshot/serializer.cc @@ -522,7 +522,7 @@ void Serializer::ObjectSerializer::SerializeJSTypedArray() { CHECK_LE(byte_length_size, size_t{std::numeric_limits<int32_t>::max()}); int32_t byte_length = static_cast<int32_t>(byte_length_size); Maybe<int32_t> max_byte_length = Nothing<int32_t>(); - if (buffer.is_resizable()) { + if (buffer.is_resizable_by_js()) { CHECK_LE(buffer.max_byte_length(), std::numeric_limits<int32_t>::max()); max_byte_length = @@ -558,7 +558,7 @@ void Serializer::ObjectSerializer::SerializeJSArrayBuffer() { CHECK_LE(buffer.byte_length(), std::numeric_limits<int32_t>::max()); int32_t byte_length = static_cast<int32_t>(buffer.byte_length()); Maybe<int32_t> max_byte_length = Nothing<int32_t>(); - if (buffer.is_resizable()) { + if (buffer.is_resizable_by_js()) { CHECK_LE(buffer.max_byte_length(), std::numeric_limits<int32_t>::max()); max_byte_length = Just(static_cast<int32_t>(buffer.max_byte_length())); } @@ -788,6 +788,8 @@ SnapshotSpace GetSnapshotSpace(HeapObject object) { return SnapshotSpace::kCode; case MAP_SPACE: return SnapshotSpace::kMap; + case SHARED_SPACE: + case SHARED_LO_SPACE: case CODE_LO_SPACE: case RO_SPACE: UNREACHABLE(); @@ -1037,7 +1039,6 @@ class Serializer::ObjectSerializer::RelocInfoObjectPreSerializer { void VisitExternalReference(Code host, RelocInfo* rinfo) {} void VisitInternalReference(Code host, RelocInfo* rinfo) {} - void VisitRuntimeEntry(Code host, RelocInfo* reloc) { UNREACHABLE(); } void VisitOffHeapTarget(Code host, RelocInfo* target) {} int num_serialized_objects() const { return num_serialized_objects_; } @@ -1124,12 +1125,6 @@ void Serializer::ObjectSerializer::VisitExternalPointer( } } -void Serializer::ObjectSerializer::VisitRuntimeEntry(Code host, - RelocInfo* rinfo) { - // We no longer serialize code that contains runtime entries. - UNREACHABLE(); -} - void Serializer::ObjectSerializer::VisitOffHeapTarget(Code host, RelocInfo* rinfo) { static_assert(EmbeddedData::kTableSize == Builtins::kBuiltinCount); @@ -1268,8 +1263,7 @@ void Serializer::ObjectSerializer::SerializeCode(Map map, int size) { RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) | RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE) | RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE_ENCODED) | - RelocInfo::ModeMask(RelocInfo::OFF_HEAP_TARGET) | - RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY); + RelocInfo::ModeMask(RelocInfo::OFF_HEAP_TARGET); DCHECK_EQ(HeapObject::kHeaderSize, bytes_processed_so_far_); Handle<Code> on_heap_code = Handle<Code>::cast(object_); diff --git a/deps/v8/src/snapshot/serializer.h b/deps/v8/src/snapshot/serializer.h index 9fdd059e479e58..edd3dfea3103f9 100644 --- a/deps/v8/src/snapshot/serializer.h +++ b/deps/v8/src/snapshot/serializer.h @@ -447,7 +447,6 @@ class Serializer::ObjectSerializer : public ObjectVisitor { void VisitExternalReference(Code host, RelocInfo* rinfo) override; void VisitInternalReference(Code host, RelocInfo* rinfo) override; void VisitCodeTarget(Code host, RelocInfo* target) override; - void VisitRuntimeEntry(Code host, RelocInfo* reloc) override; void VisitOffHeapTarget(Code host, RelocInfo* target) override; void VisitExternalPointer(HeapObject host, ExternalPointerSlot slot, diff --git a/deps/v8/src/snapshot/shared-heap-deserializer.cc b/deps/v8/src/snapshot/shared-heap-deserializer.cc index 3514aa09733faf..d71220be73a614 100644 --- a/deps/v8/src/snapshot/shared-heap-deserializer.cc +++ b/deps/v8/src/snapshot/shared-heap-deserializer.cc @@ -12,9 +12,10 @@ namespace internal { void SharedHeapDeserializer::DeserializeIntoIsolate() { // Don't deserialize into client Isolates. If there are client Isolates, the // shared heap object cache should already be populated. - DCHECK_IMPLIES(isolate()->shared_isolate() != nullptr, - !isolate()->shared_heap_object_cache()->empty()); - if (isolate()->shared_isolate() != nullptr) return; + if (isolate()->has_shared_heap() && !isolate()->is_shared_space_isolate()) { + DCHECK(!isolate()->shared_heap_object_cache()->empty()); + return; + } DCHECK(isolate()->shared_heap_object_cache()->empty()); HandleScope scope(isolate()); diff --git a/deps/v8/src/snapshot/shared-heap-serializer.cc b/deps/v8/src/snapshot/shared-heap-serializer.cc index 90b2ae07cb2c77..e8768f4ce17d55 100644 --- a/deps/v8/src/snapshot/shared-heap-serializer.cc +++ b/deps/v8/src/snapshot/shared-heap-serializer.cc @@ -95,7 +95,7 @@ bool SharedHeapSerializer::SerializeUsingSharedHeapObjectCache( // not present in the startup snapshot to be serialized. if (ShouldReconstructSharedHeapObjectCacheForTesting()) { std::vector<Object>* existing_cache = - isolate()->shared_isolate()->shared_heap_object_cache(); + isolate()->shared_heap_isolate()->shared_heap_object_cache(); const size_t existing_cache_size = existing_cache->size(); // This is strictly < because the existing cache contains the terminating // undefined value, which the reconstructed cache does not. @@ -201,12 +201,12 @@ bool SharedHeapSerializer::ShouldReconstructSharedHeapObjectCacheForTesting() // need to reconstruct the shared heap object cache because it is not actually // shared. return reconstruct_read_only_and_shared_object_caches_for_testing() && - isolate()->shared_isolate() != nullptr; + isolate()->has_shared_heap(); } void SharedHeapSerializer::ReconstructSharedHeapObjectCacheForTesting() { std::vector<Object>* cache = - isolate()->shared_isolate()->shared_heap_object_cache(); + isolate()->shared_heap_isolate()->shared_heap_object_cache(); // Don't reconstruct the final element, which is always undefined and marks // the end of the cache, since serializing the live Isolate may extend the // shared object cache. diff --git a/deps/v8/src/snapshot/snapshot.cc b/deps/v8/src/snapshot/snapshot.cc index 8cdaa55e62032a..aa6e34a7e51184 100644 --- a/deps/v8/src/snapshot/snapshot.cc +++ b/deps/v8/src/snapshot/snapshot.cc @@ -321,7 +321,7 @@ void Snapshot::SerializeDeserializeAndVerifyForTesting( Snapshot::SerializerFlags flags( Snapshot::kAllowUnknownExternalReferencesForTesting | Snapshot::kAllowActiveIsolateForTesting | - ((isolate->shared_isolate() || ReadOnlyHeap::IsReadOnlySpaceShared()) + ((isolate->has_shared_heap() || ReadOnlyHeap::IsReadOnlySpaceShared()) ? Snapshot::kReconstructReadOnlyAndSharedObjectCachesForTesting : 0)); serialized_data = Snapshot::Create(isolate, *default_context, diff --git a/deps/v8/src/temporal/temporal-parser.cc b/deps/v8/src/temporal/temporal-parser.cc index 450502aa06969d..229144e3d58784 100644 --- a/deps/v8/src/temporal/temporal-parser.cc +++ b/deps/v8/src/temporal/temporal-parser.cc @@ -707,6 +707,10 @@ int32_t ScanTimeZoneBracketedAnnotation(base::Vector<Char> str, int32_t s, int32_t len = ScanTimeZoneIdentifier(str, cur, r); cur += len; if (len == 0 || str.length() < (cur + 1) || (str[cur] != ']')) { + // Only ScanTimeZoneBracketedAnnotation know the post condition of + // TimeZoneIdentifier is not matched so we need to reset here. + r->tzi_name_start = 0; + r->tzi_name_length = 0; return 0; } cur++; @@ -878,6 +882,10 @@ int32_t ScanCalendar(base::Vector<Char> str, int32_t s, int32_t len = ScanCalendarName(str, cur, r); if (len == 0) return 0; if ((str.length() < (cur + len + 1)) || (str[cur + len] != ']')) { + // Only ScanCalendar know the post condition of CalendarName is not met and + // need to reset here. + r->calendar_name_start = 0; + r->calendar_name_length = 0; return 0; } return 6 + len + 1; @@ -1047,31 +1055,6 @@ int32_t ScanTemporalZonedDateTimeString(base::Vector<Char> str, int32_t s, SCAN_FORWARD(TemporalDateTimeString, CalendarDateTime, ParsedISO8601Result) -// Date [TimeSpecSeparator] TimeZone [Calendar] -template <typename Char> -int32_t ScanDate_TimeSpecSeparator_TimeZone_Calendar(base::Vector<Char> str, - int32_t s, - ParsedISO8601Result* r) { - int32_t cur = s; - int32_t len = ScanDate(str, cur, r); - if (len == 0) return 0; - cur = len; - cur += ScanTimeSpecSeparator(str, cur, r); - len = ScanTimeZone(str, cur, r); - if (len == 0) return 0; - cur += len; - cur += ScanCalendar(str, cur, r); - return cur - s; -} - -// TemporalTimeZoneString: -// TimeZoneIdentifier -// Date [TimeSpecSeparator] TimeZone [Calendar] -// The lookahead is at most 8 chars. -SCAN_EITHER_FORWARD(TemporalTimeZoneString, TimeZoneIdentifier, - Date_TimeSpecSeparator_TimeZone_Calendar, - ParsedISO8601Result) - // TemporalMonthDayString // DateSpecMonthDay // CalendarDateTime @@ -1132,7 +1115,6 @@ SATISIFY(TemporalDateTimeString, ParsedISO8601Result) SATISIFY(DateTime, ParsedISO8601Result) SATISIFY(DateSpecYearMonth, ParsedISO8601Result) SATISIFY(DateSpecMonthDay, ParsedISO8601Result) -SATISIFY(Date_TimeSpecSeparator_TimeZone_Calendar, ParsedISO8601Result) SATISIFY(CalendarDateTime, ParsedISO8601Result) SATISIFY(CalendarTime_L1, ParsedISO8601Result) SATISIFY(CalendarTime_L2, ParsedISO8601Result) @@ -1152,32 +1134,11 @@ SATISIFY_EITHER(TemporalMonthDayString, DateSpecMonthDay, CalendarDateTime, ParsedISO8601Result) SATISIFY(TimeZoneNumericUTCOffset, ParsedISO8601Result) SATISIFY(TimeZoneIdentifier, ParsedISO8601Result) -SATISIFY_EITHER(TemporalTimeZoneString, TimeZoneIdentifier, - Date_TimeSpecSeparator_TimeZone_Calendar, ParsedISO8601Result) SATISIFY(TemporalInstantString, ParsedISO8601Result) SATISIFY(TemporalZonedDateTimeString, ParsedISO8601Result) SATISIFY(CalendarName, ParsedISO8601Result) -// TemporalCalendarString : -// CalendarName -// TemporalInstantString -// CalendarDateTime -// CalendarTime -// DateSpecYearMonth -// DateSpecMonthDay -template <typename Char> -bool SatisfyTemporalCalendarString(base::Vector<Char> str, - ParsedISO8601Result* r) { - IF_SATISFY_RETURN(CalendarName) - IF_SATISFY_RETURN(TemporalInstantString) - IF_SATISFY_RETURN(CalendarDateTime) - IF_SATISFY_RETURN(CalendarTime) - IF_SATISFY_RETURN(DateSpecYearMonth) - IF_SATISFY_RETURN(DateSpecMonthDay) - return false; -} - // Duration // Digits : Digit [Digits] @@ -1437,8 +1398,8 @@ IMPL_PARSE_METHOD(ParsedISO8601Result, TemporalMonthDayString) IMPL_PARSE_METHOD(ParsedISO8601Result, TemporalTimeString) IMPL_PARSE_METHOD(ParsedISO8601Result, TemporalInstantString) IMPL_PARSE_METHOD(ParsedISO8601Result, TemporalZonedDateTimeString) -IMPL_PARSE_METHOD(ParsedISO8601Result, TemporalTimeZoneString) -IMPL_PARSE_METHOD(ParsedISO8601Result, TemporalCalendarString) +IMPL_PARSE_METHOD(ParsedISO8601Result, TimeZoneIdentifier) +IMPL_PARSE_METHOD(ParsedISO8601Result, CalendarName) IMPL_PARSE_METHOD(ParsedISO8601Result, TimeZoneNumericUTCOffset) IMPL_PARSE_METHOD(ParsedISO8601Duration, TemporalDurationString) diff --git a/deps/v8/src/temporal/temporal-parser.h b/deps/v8/src/temporal/temporal-parser.h index 593741bfcfcd11..bfcd90aef0e644 100644 --- a/deps/v8/src/temporal/temporal-parser.h +++ b/deps/v8/src/temporal/temporal-parser.h @@ -15,9 +15,9 @@ namespace internal { * ParsedISO8601Result contains the parsed result of ISO 8601 grammar * documented in #sec-temporal-iso8601grammar * for TemporalInstantString, TemporalZonedDateTimeString, - * TemporalCalendarString, TemporalDateString, TemporalDateTimeString, + * CalendarName, TemporalDateString, TemporalDateTimeString, * TemporalMonthDayString, TemporalRelativeToString, TemporalTimeString, - * TemporalTimeZoneString, and TemporalYearMonthString. For all the fields + * TimeZoneIdentifier, and TemporalYearMonthString. For all the fields * represented by int32_t, a special value kMinInt31 is used to represent the * field is "undefined" after parsing. */ @@ -143,9 +143,9 @@ class V8_EXPORT_PRIVATE TemporalParser { DEFINE_PARSE_METHOD(ParsedISO8601Result, TemporalMonthDayString); DEFINE_PARSE_METHOD(ParsedISO8601Result, TemporalInstantString); DEFINE_PARSE_METHOD(ParsedISO8601Result, TemporalZonedDateTimeString); - DEFINE_PARSE_METHOD(ParsedISO8601Result, TemporalTimeZoneString); + DEFINE_PARSE_METHOD(ParsedISO8601Result, TimeZoneIdentifier); DEFINE_PARSE_METHOD(ParsedISO8601Result, TemporalRelativeToString); - DEFINE_PARSE_METHOD(ParsedISO8601Result, TemporalCalendarString); + DEFINE_PARSE_METHOD(ParsedISO8601Result, CalendarName); DEFINE_PARSE_METHOD(ParsedISO8601Duration, TemporalDurationString); DEFINE_PARSE_METHOD(ParsedISO8601Result, TimeZoneNumericUTCOffset); }; diff --git a/deps/v8/src/torque/implementation-visitor.cc b/deps/v8/src/torque/implementation-visitor.cc index fd964253a82ee5..1bfa898a1e3d24 100644 --- a/deps/v8/src/torque/implementation-visitor.cc +++ b/deps/v8/src/torque/implementation-visitor.cc @@ -1536,7 +1536,7 @@ VisitResult ImplementationVisitor::GenerateArrayLength(VisitResult object, {f.name_and_type.name, f.const_qualified ? (before_current - ? LocalValue{[=]() { + ? LocalValue{[this, object, f, class_type]() { return GenerateFieldReference(object, f, class_type); }} : LocalValue("Array lengths may only refer to fields " diff --git a/deps/v8/src/wasm/baseline/liftoff-assembler.cc b/deps/v8/src/wasm/baseline/liftoff-assembler.cc index 5d88d60039da31..efcd583f4f2692 100644 --- a/deps/v8/src/wasm/baseline/liftoff-assembler.cc +++ b/deps/v8/src/wasm/baseline/liftoff-assembler.cc @@ -14,7 +14,6 @@ #include "src/compiler/wasm-compiler.h" #include "src/utils/ostreams.h" #include "src/wasm/baseline/liftoff-register.h" -#include "src/wasm/function-body-decoder-impl.h" #include "src/wasm/object-access.h" #include "src/wasm/wasm-linkage.h" #include "src/wasm/wasm-opcodes.h" diff --git a/deps/v8/src/wasm/baseline/liftoff-compiler.cc b/deps/v8/src/wasm/baseline/liftoff-compiler.cc index 09ae03ba21830b..88e2da86cdb6e0 100644 --- a/deps/v8/src/wasm/baseline/liftoff-compiler.cc +++ b/deps/v8/src/wasm/baseline/liftoff-compiler.cc @@ -556,12 +556,6 @@ class LiftoffCompiler { return __ GetTotalFrameSlotCountForGC(); } - int GetFeedbackVectorSlots() const { - // The number of call instructions is capped by max function size. - static_assert(kV8MaxWasmFunctionSize < std::numeric_limits<int>::max() / 2); - return static_cast<int>(encountered_call_instructions_.size()) * 2; - } - void unsupported(FullDecoder* decoder, LiftoffBailoutReason reason, const char* detail) { DCHECK_NE(kSuccess, reason); @@ -5919,6 +5913,20 @@ class LiftoffCompiler { } Register tmp1 = scratch_null; // Done with null checks. + // Add Smi check if the source type may store a Smi (i31ref or JS Smi). + ValueType i31ref = ValueType::Ref(HeapType::kI31); + // Ref.extern can also contain Smis, however there isn't any type that + // could downcast to ref.extern. + DCHECK(!rtt_type.is_reference_to(HeapType::kExtern)); + // Ref.i31 check has its own implementation. + DCHECK(!rtt_type.is_reference_to(HeapType::kI31)); + if (IsSubtypeOf(i31ref, obj_type, module)) { + Label* i31_target = + IsSubtypeOf(i31ref, rtt_type, module) ? &match : no_match; + __ emit_smi_check(obj_reg, i31_target, LiftoffAssembler::kJumpOnSmi, + frozen); + } + __ LoadMap(tmp1, obj_reg); // {tmp1} now holds the object's map. @@ -5955,7 +5963,7 @@ class LiftoffCompiler { } void RefTest(FullDecoder* decoder, const Value& obj, const Value& rtt, - Value* /* result_val */) { + Value* /* result_val */, bool null_succeeds) { Label return_false, done; LiftoffRegList pinned; LiftoffRegister rtt_reg = pinned.set(__ PopToRegister(pinned)); @@ -5969,7 +5977,7 @@ class LiftoffCompiler { FREEZE_STATE(frozen); SubtypeCheck(decoder->module_, obj_reg.gp(), obj.type, rtt_reg.gp(), rtt.type, scratch_null, result.gp(), &return_false, - kNullFails, frozen); + null_succeeds ? kNullSucceeds : kNullFails, frozen); __ LoadConstant(result, WasmValue(1)); // TODO(jkummerow): Emit near jumps on platforms that have them. @@ -5982,6 +5990,25 @@ class LiftoffCompiler { __ PushRegister(kI32, result); } + void RefTestAbstract(FullDecoder* decoder, const Value& obj, HeapType type, + Value* result_val, bool null_succeeds) { + switch (type.representation()) { + case HeapType::kEq: + return RefIsEq(decoder, obj, result_val, null_succeeds); + case HeapType::kI31: + return RefIsI31(decoder, obj, result_val, null_succeeds); + case HeapType::kData: + return RefIsData(decoder, obj, result_val, null_succeeds); + case HeapType::kArray: + return RefIsArray(decoder, obj, result_val, null_succeeds); + case HeapType::kAny: + // Any may never need a cast as it is either implicitly convertible or + // never convertible for any given type. + default: + UNREACHABLE(); + } + } + void RefCast(FullDecoder* decoder, const Value& obj, const Value& rtt, Value* result) { if (v8_flags.experimental_wasm_assume_ref_cast_succeeds) { @@ -6068,9 +6095,12 @@ class LiftoffCompiler { Register tmp1 = no_reg; Register tmp2 = no_reg; Label* no_match; + bool null_succeeds; - TypeCheck(ValueType obj_type, Label* no_match) - : obj_type(obj_type), no_match(no_match) {} + TypeCheck(ValueType obj_type, Label* no_match, bool null_succeeds) + : obj_type(obj_type), + no_match(no_match), + null_succeeds(null_succeeds) {} Register null_reg() { return tmp1; } // After {Initialize}. Register instance_type() { return tmp1; } // After {LoadInstanceType}. @@ -6091,13 +6121,17 @@ class LiftoffCompiler { LoadNullValue(check.null_reg(), pinned); } } - void LoadInstanceType(TypeCheck& check, const FreezeCacheState& frozen) { - if (check.obj_type.is_nullable()) { + void LoadInstanceType(TypeCheck& check, const FreezeCacheState& frozen, + Label* on_smi) { + // The check for null_succeeds == true has to be handled by the caller! + // TODO(mliedtke): Reiterate the null_succeeds case once all generic cast + // instructions are implemented. + if (!check.null_succeeds && check.obj_type.is_nullable()) { __ emit_cond_jump(kEqual, check.no_match, kRefNull, check.obj_reg, check.null_reg(), frozen); } - __ emit_smi_check(check.obj_reg, check.no_match, - LiftoffAssembler::kJumpOnSmi, frozen); + __ emit_smi_check(check.obj_reg, on_smi, LiftoffAssembler::kJumpOnSmi, + frozen); __ LoadMap(check.instance_type(), check.obj_reg); __ Load(LiftoffRegister(check.instance_type()), check.instance_type(), no_reg, wasm::ObjectAccess::ToTagged(Map::kInstanceTypeOffset), @@ -6106,7 +6140,7 @@ class LiftoffCompiler { // Abstract type checkers. They all fall through on match. void DataCheck(TypeCheck& check, const FreezeCacheState& frozen) { - LoadInstanceType(check, frozen); + LoadInstanceType(check, frozen, check.no_match); // We're going to test a range of WasmObject instance types with a single // unsigned comparison. Register tmp = check.instance_type(); @@ -6117,7 +6151,7 @@ class LiftoffCompiler { } void ArrayCheck(TypeCheck& check, const FreezeCacheState& frozen) { - LoadInstanceType(check, frozen); + LoadInstanceType(check, frozen, check.no_match); LiftoffRegister instance_type(check.instance_type()); __ emit_i32_cond_jumpi(kUnequal, check.no_match, check.instance_type(), WASM_ARRAY_TYPE, frozen); @@ -6128,18 +6162,36 @@ class LiftoffCompiler { LiftoffAssembler::kJumpOnNotSmi, frozen); } + void EqCheck(TypeCheck& check, const FreezeCacheState& frozen) { + Label match; + LoadInstanceType(check, frozen, &match); + // We're going to test a range of WasmObject instance types with a single + // unsigned comparison. + Register tmp = check.instance_type(); + __ emit_i32_subi(tmp, tmp, FIRST_WASM_OBJECT_TYPE); + __ emit_i32_cond_jumpi(kUnsignedGreaterThan, check.no_match, tmp, + LAST_WASM_OBJECT_TYPE - FIRST_WASM_OBJECT_TYPE, + frozen); + __ bind(&match); + } + using TypeChecker = void (LiftoffCompiler::*)(TypeCheck& check, const FreezeCacheState& frozen); template <TypeChecker type_checker> - void AbstractTypeCheck(const Value& object) { + void AbstractTypeCheck(const Value& object, bool null_succeeds) { Label match, no_match, done; - TypeCheck check(object.type, &no_match); + TypeCheck check(object.type, &no_match, null_succeeds); Initialize(check, kPop); LiftoffRegister result(check.tmp1); { FREEZE_STATE(frozen); + if (null_succeeds && check.obj_type.is_nullable()) { + __ emit_cond_jump(kEqual, &match, kRefNull, check.obj_reg, + check.null_reg(), frozen); + } + (this->*type_checker)(check, frozen); __ bind(&match); @@ -6155,26 +6207,32 @@ class LiftoffCompiler { } void RefIsData(FullDecoder* /* decoder */, const Value& object, - Value* /* result_val */) { - AbstractTypeCheck<&LiftoffCompiler::DataCheck>(object); + Value* /* result_val */, bool null_succeeds = false) { + AbstractTypeCheck<&LiftoffCompiler::DataCheck>(object, null_succeeds); + } + + void RefIsEq(FullDecoder* /* decoder */, const Value& object, + Value* /* result_val */, bool null_succeeds) { + AbstractTypeCheck<&LiftoffCompiler::EqCheck>(object, null_succeeds); } void RefIsArray(FullDecoder* /* decoder */, const Value& object, - Value* /* result_val */) { - AbstractTypeCheck<&LiftoffCompiler::ArrayCheck>(object); + Value* /* result_val */, bool null_succeeds = false) { + AbstractTypeCheck<&LiftoffCompiler::ArrayCheck>(object, null_succeeds); } - void RefIsI31(FullDecoder* decoder, const Value& object, - Value* /* result */) { - AbstractTypeCheck<&LiftoffCompiler::I31Check>(object); + void RefIsI31(FullDecoder* decoder, const Value& object, Value* /* result */, + bool null_succeeds = false) { + AbstractTypeCheck<&LiftoffCompiler::I31Check>(object, null_succeeds); } template <TypeChecker type_checker> void AbstractTypeCast(const Value& object, FullDecoder* decoder, ValueKind result_kind) { + bool null_succeeds = false; // TODO(mliedtke): Use parameter. Label* trap_label = AddOutOfLineTrap(decoder, WasmCode::kThrowWasmTrapIllegalCast); - TypeCheck check(object.type, trap_label); + TypeCheck check(object.type, trap_label, null_succeeds); Initialize(check, kPeek); FREEZE_STATE(frozen); (this->*type_checker)(check, frozen); @@ -6196,13 +6254,14 @@ class LiftoffCompiler { template <TypeChecker type_checker> void BrOnAbstractType(const Value& object, FullDecoder* decoder, uint32_t br_depth) { + bool null_succeeds = false; // TODO(mliedtke): Use parameter. // Avoid having sequences of branches do duplicate work. if (br_depth != decoder->control_depth() - 1) { __ PrepareForBranch(decoder->control_at(br_depth)->br_merge()->arity, {}); } Label no_match; - TypeCheck check(object.type, &no_match); + TypeCheck check(object.type, &no_match, null_succeeds); Initialize(check, kPeek); FREEZE_STATE(frozen); @@ -6215,13 +6274,14 @@ class LiftoffCompiler { template <TypeChecker type_checker> void BrOnNonAbstractType(const Value& object, FullDecoder* decoder, uint32_t br_depth) { + bool null_succeeds = false; // TODO(mliedtke): Use parameter. // Avoid having sequences of branches do duplicate work. if (br_depth != decoder->control_depth() - 1) { __ PrepareForBranch(decoder->control_at(br_depth)->br_merge()->arity, {}); } Label no_match, end; - TypeCheck check(object.type, &no_match); + TypeCheck check(object.type, &no_match, null_succeeds); Initialize(check, kPeek); FREEZE_STATE(frozen); @@ -7188,18 +7248,10 @@ class LiftoffCompiler { nullptr, false, false, true); // Compare against expected signature. - if (v8_flags.wasm_type_canonicalization) { - LOAD_INSTANCE_FIELD(tmp_const, IsorecursiveCanonicalTypes, - kSystemPointerSize, pinned); - __ Load(LiftoffRegister(tmp_const), tmp_const, no_reg, - imm.sig_imm.index * kInt32Size, LoadType::kI32Load); - } else { - uint32_t canonical_sig_num = - env_->module->per_module_canonical_type_ids[imm.sig_imm.index]; - DCHECK_GE(canonical_sig_num, 0); - DCHECK_GE(kMaxInt, canonical_sig_num); - __ LoadConstant(LiftoffRegister(tmp_const), WasmValue(canonical_sig_num)); - } + LOAD_INSTANCE_FIELD(tmp_const, IsorecursiveCanonicalTypes, + kSystemPointerSize, pinned); + __ Load(LiftoffRegister(tmp_const), tmp_const, no_reg, + imm.sig_imm.index * kInt32Size, LoadType::kI32Load); Label* sig_mismatch_label = AddOutOfLineTrap(decoder, WasmCode::kThrowWasmTrapFuncSigMismatch); @@ -7279,10 +7331,11 @@ class LiftoffCompiler { ValueKind kIntPtrKind = kPointerKind; LiftoffRegList pinned; + LiftoffRegister func_ref = pinned.set(__ PopToRegister(pinned)); LiftoffRegister vector = pinned.set(__ GetUnusedRegister(kGpReg, pinned)); - LiftoffAssembler::VarState funcref = - __ cache_state()->stack_state.end()[-1]; - if (funcref.is_reg()) pinned.set(funcref.reg()); + MaybeEmitNullCheck(decoder, func_ref.gp(), pinned, func_ref_type); + LiftoffAssembler::VarState func_ref_var(kRef, func_ref, 0); + __ Fill(vector, liftoff::kFeedbackVectorOffset, kPointerKind); LiftoffAssembler::VarState vector_var(kPointerKind, vector, 0); LiftoffRegister index = pinned.set(__ GetUnusedRegister(kGpReg, pinned)); @@ -7297,9 +7350,9 @@ class LiftoffCompiler { CallRuntimeStub(WasmCode::kCallRefIC, MakeSig::Returns(kPointerKind, kPointerKind) .Params(kPointerKind, kIntPtrKind, kPointerKind), - {vector_var, index_var, funcref}, decoder->position()); + {vector_var, index_var, func_ref_var}, + decoder->position()); - __ cache_state()->stack_state.pop_back(1); // Drop funcref. target_reg = LiftoffRegister(kReturnRegister0).gp(); instance_reg = LiftoffRegister(kReturnRegister1).gp(); @@ -7721,7 +7774,6 @@ WasmCompilationResult ExecuteLiftoffCompilation( if (auto* debug_sidetable = compiler_options.debug_sidetable) { *debug_sidetable = debug_sidetable_builder->GenerateDebugSideTable(); } - result.feedback_vector_slots = compiler->GetFeedbackVectorSlots(); if (V8_UNLIKELY(v8_flags.trace_wasm_compilation_times)) { base::TimeDelta time = base::TimeTicks::Now() - start_time; diff --git a/deps/v8/src/wasm/baseline/ppc/liftoff-assembler-ppc.h b/deps/v8/src/wasm/baseline/ppc/liftoff-assembler-ppc.h index feec970bf58e0d..a24575ee3d3c08 100644 --- a/deps/v8/src/wasm/baseline/ppc/liftoff-assembler-ppc.h +++ b/deps/v8/src/wasm/baseline/ppc/liftoff-assembler-ppc.h @@ -46,11 +46,9 @@ namespace liftoff { // constexpr int32_t kInstanceOffset = - (v8_flags.enable_embedded_constant_pool.value() ? 3 : 2) * - kSystemPointerSize; + (V8_EMBEDDED_CONSTANT_POOL_BOOL ? 3 : 2) * kSystemPointerSize; constexpr int kFeedbackVectorOffset = - (v8_flags.enable_embedded_constant_pool.value() ? 4 : 3) * - kSystemPointerSize; + (V8_EMBEDDED_CONSTANT_POOL_BOOL ? 4 : 3) * kSystemPointerSize; inline MemOperand GetHalfStackSlot(int offset, RegPairHalf half) { int32_t half_offset = @@ -143,7 +141,7 @@ void LiftoffAssembler::PatchPrepareStackFrame( int offset, SafepointTableBuilder* safepoint_table_builder) { int frame_size = GetTotalFrameSize() - - (v8_flags.enable_embedded_constant_pool ? 3 : 2) * kSystemPointerSize; + (V8_EMBEDDED_CONSTANT_POOL_BOOL ? 3 : 2) * kSystemPointerSize; Assembler patching_assembler( AssemblerOptions{}, @@ -1769,56 +1767,64 @@ bool LiftoffAssembler::emit_select(LiftoffRegister dst, Register condition, return false; } -#define SIMD_BINOP_LIST(V) \ - V(f64x2_add, F64x2Add) \ - V(f64x2_sub, F64x2Sub) \ - V(f64x2_mul, F64x2Mul) \ - V(f64x2_div, F64x2Div) \ - V(f64x2_eq, F64x2Eq) \ - V(f64x2_lt, F64x2Lt) \ - V(f64x2_le, F64x2Le) \ - V(f32x4_add, F32x4Add) \ - V(f32x4_sub, F32x4Sub) \ - V(f32x4_mul, F32x4Mul) \ - V(f32x4_div, F32x4Div) \ - V(f32x4_min, F32x4Min) \ - V(f32x4_max, F32x4Max) \ - V(f32x4_eq, F32x4Eq) \ - V(f32x4_lt, F32x4Lt) \ - V(f32x4_le, F32x4Le) \ - V(i64x2_add, I64x2Add) \ - V(i64x2_sub, I64x2Sub) \ - V(i64x2_eq, I64x2Eq) \ - V(i64x2_gt_s, I64x2GtS) \ - V(i32x4_add, I32x4Add) \ - V(i32x4_sub, I32x4Sub) \ - V(i32x4_mul, I32x4Mul) \ - V(i32x4_min_s, I32x4MinS) \ - V(i32x4_min_u, I32x4MinU) \ - V(i32x4_max_s, I32x4MaxS) \ - V(i32x4_max_u, I32x4MaxU) \ - V(i32x4_eq, I32x4Eq) \ - V(i32x4_gt_s, I32x4GtS) \ - V(i32x4_gt_u, I32x4GtU) \ - V(i16x8_add, I16x8Add) \ - V(i16x8_sub, I16x8Sub) \ - V(i16x8_mul, I16x8Mul) \ - V(i16x8_min_s, I16x8MinS) \ - V(i16x8_min_u, I16x8MinU) \ - V(i16x8_max_s, I16x8MaxS) \ - V(i16x8_max_u, I16x8MaxU) \ - V(i16x8_eq, I16x8Eq) \ - V(i16x8_gt_s, I16x8GtS) \ - V(i16x8_gt_u, I16x8GtU) \ - V(i8x16_add, I8x16Add) \ - V(i8x16_sub, I8x16Sub) \ - V(i8x16_min_s, I8x16MinS) \ - V(i8x16_min_u, I8x16MinU) \ - V(i8x16_max_s, I8x16MaxS) \ - V(i8x16_max_u, I8x16MaxU) \ - V(i8x16_eq, I8x16Eq) \ - V(i8x16_gt_s, I8x16GtS) \ - V(i8x16_gt_u, I8x16GtU) +#define SIMD_BINOP_LIST(V) \ + V(f64x2_add, F64x2Add) \ + V(f64x2_sub, F64x2Sub) \ + V(f64x2_mul, F64x2Mul) \ + V(f64x2_div, F64x2Div) \ + V(f64x2_eq, F64x2Eq) \ + V(f64x2_lt, F64x2Lt) \ + V(f64x2_le, F64x2Le) \ + V(f32x4_add, F32x4Add) \ + V(f32x4_sub, F32x4Sub) \ + V(f32x4_mul, F32x4Mul) \ + V(f32x4_div, F32x4Div) \ + V(f32x4_min, F32x4Min) \ + V(f32x4_max, F32x4Max) \ + V(f32x4_eq, F32x4Eq) \ + V(f32x4_lt, F32x4Lt) \ + V(f32x4_le, F32x4Le) \ + V(i64x2_add, I64x2Add) \ + V(i64x2_sub, I64x2Sub) \ + V(i64x2_eq, I64x2Eq) \ + V(i64x2_gt_s, I64x2GtS) \ + V(i32x4_add, I32x4Add) \ + V(i32x4_sub, I32x4Sub) \ + V(i32x4_mul, I32x4Mul) \ + V(i32x4_min_s, I32x4MinS) \ + V(i32x4_min_u, I32x4MinU) \ + V(i32x4_max_s, I32x4MaxS) \ + V(i32x4_max_u, I32x4MaxU) \ + V(i32x4_eq, I32x4Eq) \ + V(i32x4_gt_s, I32x4GtS) \ + V(i32x4_gt_u, I32x4GtU) \ + V(i16x8_add, I16x8Add) \ + V(i16x8_sub, I16x8Sub) \ + V(i16x8_mul, I16x8Mul) \ + V(i16x8_min_s, I16x8MinS) \ + V(i16x8_min_u, I16x8MinU) \ + V(i16x8_max_s, I16x8MaxS) \ + V(i16x8_max_u, I16x8MaxU) \ + V(i16x8_eq, I16x8Eq) \ + V(i16x8_gt_s, I16x8GtS) \ + V(i16x8_gt_u, I16x8GtU) \ + V(i16x8_add_sat_s, I16x8AddSatS) \ + V(i16x8_sub_sat_s, I16x8SubSatS) \ + V(i16x8_add_sat_u, I16x8AddSatU) \ + V(i16x8_sub_sat_u, I16x8SubSatU) \ + V(i8x16_add, I8x16Add) \ + V(i8x16_sub, I8x16Sub) \ + V(i8x16_min_s, I8x16MinS) \ + V(i8x16_min_u, I8x16MinU) \ + V(i8x16_max_s, I8x16MaxS) \ + V(i8x16_max_u, I8x16MaxU) \ + V(i8x16_eq, I8x16Eq) \ + V(i8x16_gt_s, I8x16GtS) \ + V(i8x16_gt_u, I8x16GtU) \ + V(i8x16_add_sat_s, I8x16AddSatS) \ + V(i8x16_sub_sat_s, I8x16SubSatS) \ + V(i8x16_add_sat_u, I8x16AddSatU) \ + V(i8x16_sub_sat_u, I8x16SubSatU) #define EMIT_SIMD_BINOP(name, op) \ void LiftoffAssembler::emit_##name(LiftoffRegister dst, LiftoffRegister lhs, \ @@ -1829,6 +1835,105 @@ SIMD_BINOP_LIST(EMIT_SIMD_BINOP) #undef EMIT_SIMD_BINOP #undef SIMD_BINOP_LIST +#define SIMD_BINOP_WITH_SCRATCH_LIST(V) \ + V(f64x2_ne, F64x2Ne) \ + V(f32x4_ne, F32x4Ne) \ + V(i64x2_ne, I64x2Ne) \ + V(i64x2_ge_s, I64x2GeS) \ + V(i32x4_ne, I32x4Ne) \ + V(i32x4_ge_s, I32x4GeS) \ + V(i32x4_ge_u, I32x4GeU) \ + V(i16x8_ne, I16x8Ne) \ + V(i16x8_ge_s, I16x8GeS) \ + V(i16x8_ge_u, I16x8GeU) \ + V(i8x16_ne, I8x16Ne) \ + V(i8x16_ge_s, I8x16GeS) \ + V(i8x16_ge_u, I8x16GeU) + +#define EMIT_SIMD_BINOP_WITH_SCRATCH(name, op) \ + void LiftoffAssembler::emit_##name(LiftoffRegister dst, LiftoffRegister lhs, \ + LiftoffRegister rhs) { \ + op(dst.fp().toSimd(), lhs.fp().toSimd(), rhs.fp().toSimd(), \ + kScratchSimd128Reg); \ + } +SIMD_BINOP_WITH_SCRATCH_LIST(EMIT_SIMD_BINOP_WITH_SCRATCH) +#undef EMIT_SIMD_BINOP_WITH_SCRATCH +#undef SIMD_BINOP_WITH_SCRATCH_LIST + +#define SIMD_SHIFT_RR_LIST(V) \ + V(i64x2_shl, I64x2Shl) \ + V(i64x2_shr_s, I64x2ShrS) \ + V(i64x2_shr_u, I64x2ShrU) \ + V(i32x4_shl, I32x4Shl) \ + V(i32x4_shr_s, I32x4ShrS) \ + V(i32x4_shr_u, I32x4ShrU) \ + V(i16x8_shl, I16x8Shl) \ + V(i16x8_shr_s, I16x8ShrS) \ + V(i16x8_shr_u, I16x8ShrU) \ + V(i8x16_shl, I8x16Shl) \ + V(i8x16_shr_s, I8x16ShrS) \ + V(i8x16_shr_u, I8x16ShrU) + +#define EMIT_SIMD_SHIFT_RR(name, op) \ + void LiftoffAssembler::emit_##name(LiftoffRegister dst, LiftoffRegister lhs, \ + LiftoffRegister rhs) { \ + op(dst.fp().toSimd(), lhs.fp().toSimd(), rhs.gp(), kScratchSimd128Reg); \ + } +SIMD_SHIFT_RR_LIST(EMIT_SIMD_SHIFT_RR) +#undef EMIT_SIMD_SHIFT_RR +#undef SIMD_SHIFT_RR_LIST + +#define SIMD_SHIFT_RI_LIST(V) \ + V(i64x2_shli, I64x2Shl) \ + V(i64x2_shri_s, I64x2ShrS) \ + V(i64x2_shri_u, I64x2ShrU) \ + V(i32x4_shli, I32x4Shl) \ + V(i32x4_shri_s, I32x4ShrS) \ + V(i32x4_shri_u, I32x4ShrU) \ + V(i16x8_shli, I16x8Shl) \ + V(i16x8_shri_s, I16x8ShrS) \ + V(i16x8_shri_u, I16x8ShrU) \ + V(i8x16_shli, I8x16Shl) \ + V(i8x16_shri_s, I8x16ShrS) \ + V(i8x16_shri_u, I8x16ShrU) + +#define EMIT_SIMD_SHIFT_RI(name, op) \ + void LiftoffAssembler::emit_##name(LiftoffRegister dst, LiftoffRegister lhs, \ + int32_t rhs) { \ + op(dst.fp().toSimd(), lhs.fp().toSimd(), Operand(rhs), r0, \ + kScratchSimd128Reg); \ + } +SIMD_SHIFT_RI_LIST(EMIT_SIMD_SHIFT_RI) +#undef EMIT_SIMD_SHIFT_RI +#undef SIMD_SHIFT_RI_LIST + +#define SIMD_UNOP_LIST(V) \ + V(f64x2_abs, F64x2Abs, , void) \ + V(f64x2_neg, F64x2Neg, , void) \ + V(f64x2_sqrt, F64x2Sqrt, , void) \ + V(f64x2_ceil, F64x2Ceil, true, bool) \ + V(f64x2_floor, F64x2Floor, true, bool) \ + V(f64x2_trunc, F64x2Trunc, true, bool) \ + V(f32x4_abs, F32x4Abs, , void) \ + V(f32x4_neg, F32x4Neg, , void) \ + V(i64x2_neg, I64x2Neg, , void) \ + V(i32x4_neg, I32x4Neg, , void) \ + V(f32x4_sqrt, F32x4Sqrt, , void) \ + V(f32x4_ceil, F32x4Ceil, true, bool) \ + V(f32x4_floor, F32x4Floor, true, bool) \ + V(f32x4_trunc, F32x4Trunc, true, bool) \ + V(i8x16_popcnt, I8x16Popcnt, , void) + +#define EMIT_SIMD_UNOP(name, op, return_val, return_type) \ + return_type LiftoffAssembler::emit_##name(LiftoffRegister dst, \ + LiftoffRegister src) { \ + op(dst.fp().toSimd(), src.fp().toSimd()); \ + return return_val; \ + } +SIMD_UNOP_LIST(EMIT_SIMD_UNOP) +#undef EMIT_SIMD_UNOP +#undef SIMD_UNOP_LIST + void LiftoffAssembler::emit_f64x2_splat(LiftoffRegister dst, LiftoffRegister src) { F64x2Splat(dst.fp().toSimd(), src.fp(), r0); @@ -1963,6 +2068,36 @@ void LiftoffAssembler::emit_i8x16_replace_lane(LiftoffRegister dst, imm_lane_idx, kScratchSimd128Reg); } +void LiftoffAssembler::emit_i64x2_abs(LiftoffRegister dst, + LiftoffRegister src) { + I64x2Abs(dst.fp().toSimd(), src.fp().toSimd(), kScratchSimd128Reg); +} + +void LiftoffAssembler::emit_i32x4_abs(LiftoffRegister dst, + LiftoffRegister src) { + I32x4Abs(dst.fp().toSimd(), src.fp().toSimd(), kScratchSimd128Reg); +} + +void LiftoffAssembler::emit_i16x8_abs(LiftoffRegister dst, + LiftoffRegister src) { + I16x8Abs(dst.fp().toSimd(), src.fp().toSimd(), kScratchSimd128Reg); +} + +void LiftoffAssembler::emit_i16x8_neg(LiftoffRegister dst, + LiftoffRegister src) { + I16x8Neg(dst.fp().toSimd(), src.fp().toSimd(), kScratchSimd128Reg); +} + +void LiftoffAssembler::emit_i8x16_abs(LiftoffRegister dst, + LiftoffRegister src) { + I8x16Abs(dst.fp().toSimd(), src.fp().toSimd(), kScratchSimd128Reg); +} + +void LiftoffAssembler::emit_i8x16_neg(LiftoffRegister dst, + LiftoffRegister src) { + I8x16Neg(dst.fp().toSimd(), src.fp().toSimd(), kScratchSimd128Reg); +} + void LiftoffAssembler::emit_i64x2_mul(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs) { // TODO(miladfarca): Make use of UseScratchRegisterScope. @@ -1985,82 +2120,14 @@ void LiftoffAssembler::emit_f64x2_max(LiftoffRegister dst, LiftoffRegister lhs, kScratchSimd128Reg, kScratchSimd128Reg2); } -void LiftoffAssembler::emit_f64x2_ne(LiftoffRegister dst, LiftoffRegister lhs, - LiftoffRegister rhs) { - F64x2Ne(dst.fp().toSimd(), lhs.fp().toSimd(), rhs.fp().toSimd(), - kScratchSimd128Reg); -} - -void LiftoffAssembler::emit_f32x4_ne(LiftoffRegister dst, LiftoffRegister lhs, - LiftoffRegister rhs) { - F32x4Ne(dst.fp().toSimd(), lhs.fp().toSimd(), rhs.fp().toSimd(), - kScratchSimd128Reg); -} - -void LiftoffAssembler::emit_i64x2_ne(LiftoffRegister dst, LiftoffRegister lhs, - LiftoffRegister rhs) { - I64x2Ne(dst.fp().toSimd(), lhs.fp().toSimd(), rhs.fp().toSimd(), - kScratchSimd128Reg); -} - -void LiftoffAssembler::emit_i64x2_ge_s(LiftoffRegister dst, LiftoffRegister lhs, - LiftoffRegister rhs) { - I64x2GeS(dst.fp().toSimd(), lhs.fp().toSimd(), rhs.fp().toSimd(), - kScratchSimd128Reg); -} - -void LiftoffAssembler::emit_i32x4_ne(LiftoffRegister dst, LiftoffRegister lhs, - LiftoffRegister rhs) { - I32x4Ne(dst.fp().toSimd(), lhs.fp().toSimd(), rhs.fp().toSimd(), - kScratchSimd128Reg); -} - -void LiftoffAssembler::emit_i32x4_ge_s(LiftoffRegister dst, LiftoffRegister lhs, - LiftoffRegister rhs) { - I32x4GeS(dst.fp().toSimd(), lhs.fp().toSimd(), rhs.fp().toSimd(), - kScratchSimd128Reg); -} - -void LiftoffAssembler::emit_i32x4_ge_u(LiftoffRegister dst, LiftoffRegister lhs, - LiftoffRegister rhs) { - I32x4GeU(dst.fp().toSimd(), lhs.fp().toSimd(), rhs.fp().toSimd(), - kScratchSimd128Reg); -} - -void LiftoffAssembler::emit_i16x8_ne(LiftoffRegister dst, LiftoffRegister lhs, - LiftoffRegister rhs) { - I16x8Ne(dst.fp().toSimd(), lhs.fp().toSimd(), rhs.fp().toSimd(), - kScratchSimd128Reg); -} - -void LiftoffAssembler::emit_i16x8_ge_s(LiftoffRegister dst, LiftoffRegister lhs, - LiftoffRegister rhs) { - I16x8GeS(dst.fp().toSimd(), lhs.fp().toSimd(), rhs.fp().toSimd(), - kScratchSimd128Reg); -} - -void LiftoffAssembler::emit_i16x8_ge_u(LiftoffRegister dst, LiftoffRegister lhs, - LiftoffRegister rhs) { - I16x8GeU(dst.fp().toSimd(), lhs.fp().toSimd(), rhs.fp().toSimd(), - kScratchSimd128Reg); -} - -void LiftoffAssembler::emit_i8x16_ne(LiftoffRegister dst, LiftoffRegister lhs, - LiftoffRegister rhs) { - I8x16Ne(dst.fp().toSimd(), lhs.fp().toSimd(), rhs.fp().toSimd(), - kScratchSimd128Reg); -} - -void LiftoffAssembler::emit_i8x16_ge_s(LiftoffRegister dst, LiftoffRegister lhs, - LiftoffRegister rhs) { - I8x16GeS(dst.fp().toSimd(), lhs.fp().toSimd(), rhs.fp().toSimd(), - kScratchSimd128Reg); +bool LiftoffAssembler::emit_f64x2_nearest_int(LiftoffRegister dst, + LiftoffRegister src) { + return false; } -void LiftoffAssembler::emit_i8x16_ge_u(LiftoffRegister dst, LiftoffRegister lhs, - LiftoffRegister rhs) { - I8x16GeU(dst.fp().toSimd(), lhs.fp().toSimd(), rhs.fp().toSimd(), - kScratchSimd128Reg); +bool LiftoffAssembler::emit_f32x4_nearest_int(LiftoffRegister dst, + LiftoffRegister src) { + return false; } void LiftoffAssembler::LoadTransform(LiftoffRegister dst, Register src_addr, @@ -2132,45 +2199,6 @@ void LiftoffAssembler::emit_s128_relaxed_laneselect(LiftoffRegister dst, bailout(kRelaxedSimd, "emit_s128_relaxed_laneselect"); } -void LiftoffAssembler::emit_f64x2_abs(LiftoffRegister dst, - LiftoffRegister src) { - bailout(kUnsupportedArchitecture, "emit_f64x2_abs"); -} - -void LiftoffAssembler::emit_f64x2_neg(LiftoffRegister dst, - LiftoffRegister src) { - bailout(kUnsupportedArchitecture, "emit_f64x2neg"); -} - -void LiftoffAssembler::emit_f64x2_sqrt(LiftoffRegister dst, - LiftoffRegister src) { - bailout(kUnsupportedArchitecture, "emit_f64x2sqrt"); -} - -bool LiftoffAssembler::emit_f64x2_ceil(LiftoffRegister dst, - LiftoffRegister src) { - bailout(kSimd, "f64x2.ceil"); - return true; -} - -bool LiftoffAssembler::emit_f64x2_floor(LiftoffRegister dst, - LiftoffRegister src) { - bailout(kSimd, "f64x2.floor"); - return true; -} - -bool LiftoffAssembler::emit_f64x2_trunc(LiftoffRegister dst, - LiftoffRegister src) { - bailout(kSimd, "f64x2.trunc"); - return true; -} - -bool LiftoffAssembler::emit_f64x2_nearest_int(LiftoffRegister dst, - LiftoffRegister src) { - bailout(kSimd, "f64x2.nearest_int"); - return true; -} - void LiftoffAssembler::emit_f64x2_pmin(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs) { bailout(kSimd, "pmin unimplemented"); @@ -2208,45 +2236,6 @@ void LiftoffAssembler::emit_f64x2_promote_low_f32x4(LiftoffRegister dst, bailout(kSimd, "f64x2.promote_low_f32x4"); } -void LiftoffAssembler::emit_f32x4_abs(LiftoffRegister dst, - LiftoffRegister src) { - bailout(kUnsupportedArchitecture, "emit_f32x4_abs"); -} - -void LiftoffAssembler::emit_f32x4_neg(LiftoffRegister dst, - LiftoffRegister src) { - bailout(kUnsupportedArchitecture, "emit_f32x4neg"); -} - -void LiftoffAssembler::emit_f32x4_sqrt(LiftoffRegister dst, - LiftoffRegister src) { - bailout(kUnsupportedArchitecture, "emit_f32x4sqrt"); -} - -bool LiftoffAssembler::emit_f32x4_ceil(LiftoffRegister dst, - LiftoffRegister src) { - bailout(kSimd, "f32x4.ceil"); - return true; -} - -bool LiftoffAssembler::emit_f32x4_floor(LiftoffRegister dst, - LiftoffRegister src) { - bailout(kSimd, "f32x4.floor"); - return true; -} - -bool LiftoffAssembler::emit_f32x4_trunc(LiftoffRegister dst, - LiftoffRegister src) { - bailout(kSimd, "f32x4.trunc"); - return true; -} - -bool LiftoffAssembler::emit_f32x4_nearest_int(LiftoffRegister dst, - LiftoffRegister src) { - bailout(kSimd, "f32x4.nearest_int"); - return true; -} - void LiftoffAssembler::emit_f32x4_relaxed_min(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs) { @@ -2269,48 +2258,11 @@ void LiftoffAssembler::emit_f32x4_pmax(LiftoffRegister dst, LiftoffRegister lhs, bailout(kSimd, "pmax unimplemented"); } -void LiftoffAssembler::emit_i64x2_neg(LiftoffRegister dst, - LiftoffRegister src) { - bailout(kUnsupportedArchitecture, "emit_i64x2neg"); -} - void LiftoffAssembler::emit_i64x2_alltrue(LiftoffRegister dst, LiftoffRegister src) { bailout(kSimd, "i64x2_alltrue"); } -void LiftoffAssembler::emit_i64x2_shl(LiftoffRegister dst, LiftoffRegister lhs, - LiftoffRegister rhs) { - bailout(kSimd, "i64x2_shl"); -} - -void LiftoffAssembler::emit_i64x2_shli(LiftoffRegister dst, LiftoffRegister lhs, - int32_t rhs) { - bailout(kSimd, "i64x2_shli"); -} - -void LiftoffAssembler::emit_i64x2_shr_s(LiftoffRegister dst, - LiftoffRegister lhs, - LiftoffRegister rhs) { - bailout(kSimd, "i64x2_shr_s"); -} - -void LiftoffAssembler::emit_i64x2_shri_s(LiftoffRegister dst, - LiftoffRegister lhs, int32_t rhs) { - bailout(kSimd, "i64x2_shri_s"); -} - -void LiftoffAssembler::emit_i64x2_shr_u(LiftoffRegister dst, - LiftoffRegister lhs, - LiftoffRegister rhs) { - bailout(kSimd, "i64x2_shr_u"); -} - -void LiftoffAssembler::emit_i64x2_shri_u(LiftoffRegister dst, - LiftoffRegister lhs, int32_t rhs) { - bailout(kSimd, "i64x2_shri_u"); -} - void LiftoffAssembler::emit_i64x2_extmul_low_i32x4_s(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2) { @@ -2360,11 +2312,6 @@ void LiftoffAssembler::emit_i64x2_extmul_high_i32x4_u(LiftoffRegister dst, bailout(kSimd, "i64x2_extmul_high_i32x4_u unsupported"); } -void LiftoffAssembler::emit_i32x4_neg(LiftoffRegister dst, - LiftoffRegister src) { - bailout(kUnsupportedArchitecture, "emit_i32x4neg"); -} - void LiftoffAssembler::emit_i32x4_alltrue(LiftoffRegister dst, LiftoffRegister src) { bailout(kSimd, "i32x4_alltrue"); @@ -2375,38 +2322,6 @@ void LiftoffAssembler::emit_i32x4_bitmask(LiftoffRegister dst, bailout(kSimd, "i32x4_bitmask"); } -void LiftoffAssembler::emit_i32x4_shl(LiftoffRegister dst, LiftoffRegister lhs, - LiftoffRegister rhs) { - bailout(kSimd, "i32x4_shl"); -} - -void LiftoffAssembler::emit_i32x4_shli(LiftoffRegister dst, LiftoffRegister lhs, - int32_t rhs) { - bailout(kSimd, "i32x4_shli"); -} - -void LiftoffAssembler::emit_i32x4_shr_s(LiftoffRegister dst, - LiftoffRegister lhs, - LiftoffRegister rhs) { - bailout(kSimd, "i32x4_shr_s"); -} - -void LiftoffAssembler::emit_i32x4_shri_s(LiftoffRegister dst, - LiftoffRegister lhs, int32_t rhs) { - bailout(kSimd, "i32x4_shri_s"); -} - -void LiftoffAssembler::emit_i32x4_shr_u(LiftoffRegister dst, - LiftoffRegister lhs, - LiftoffRegister rhs) { - bailout(kSimd, "i32x4_shr_u"); -} - -void LiftoffAssembler::emit_i32x4_shri_u(LiftoffRegister dst, - LiftoffRegister lhs, int32_t rhs) { - bailout(kSimd, "i32x4_shri_u"); -} - void LiftoffAssembler::emit_i32x4_dot_i16x8_s(LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs) { @@ -2447,11 +2362,6 @@ void LiftoffAssembler::emit_i32x4_extmul_high_i16x8_u(LiftoffRegister dst, bailout(kSimd, "i32x4_extmul_high_i16x8_u unsupported"); } -void LiftoffAssembler::emit_i16x8_neg(LiftoffRegister dst, - LiftoffRegister src) { - bailout(kUnsupportedArchitecture, "emit_i16x8neg"); -} - void LiftoffAssembler::emit_i16x8_alltrue(LiftoffRegister dst, LiftoffRegister src) { bailout(kSimd, "i16x8_alltrue"); @@ -2462,62 +2372,6 @@ void LiftoffAssembler::emit_i16x8_bitmask(LiftoffRegister dst, bailout(kSimd, "i16x8_bitmask"); } -void LiftoffAssembler::emit_i16x8_shl(LiftoffRegister dst, LiftoffRegister lhs, - LiftoffRegister rhs) { - bailout(kSimd, "i16x8_shl"); -} - -void LiftoffAssembler::emit_i16x8_shli(LiftoffRegister dst, LiftoffRegister lhs, - int32_t rhs) { - bailout(kSimd, "i16x8_shli"); -} - -void LiftoffAssembler::emit_i16x8_shr_s(LiftoffRegister dst, - LiftoffRegister lhs, - LiftoffRegister rhs) { - bailout(kSimd, "i16x8_shr_s"); -} - -void LiftoffAssembler::emit_i16x8_shri_s(LiftoffRegister dst, - LiftoffRegister lhs, int32_t rhs) { - bailout(kSimd, "i16x8_shri_s"); -} - -void LiftoffAssembler::emit_i16x8_shr_u(LiftoffRegister dst, - LiftoffRegister lhs, - LiftoffRegister rhs) { - bailout(kSimd, "i16x8_shr_u"); -} - -void LiftoffAssembler::emit_i16x8_shri_u(LiftoffRegister dst, - LiftoffRegister lhs, int32_t rhs) { - bailout(kSimd, "i16x8_shri_u"); -} - -void LiftoffAssembler::emit_i16x8_add_sat_s(LiftoffRegister dst, - LiftoffRegister lhs, - LiftoffRegister rhs) { - bailout(kUnsupportedArchitecture, "emit_i16x8addsaturate_s"); -} - -void LiftoffAssembler::emit_i16x8_sub_sat_s(LiftoffRegister dst, - LiftoffRegister lhs, - LiftoffRegister rhs) { - bailout(kUnsupportedArchitecture, "emit_i16x8subsaturate_s"); -} - -void LiftoffAssembler::emit_i16x8_sub_sat_u(LiftoffRegister dst, - LiftoffRegister lhs, - LiftoffRegister rhs) { - bailout(kUnsupportedArchitecture, "emit_i16x8subsaturate_u"); -} - -void LiftoffAssembler::emit_i16x8_add_sat_u(LiftoffRegister dst, - LiftoffRegister lhs, - LiftoffRegister rhs) { - bailout(kUnsupportedArchitecture, "emit_i16x8addsaturate_u"); -} - void LiftoffAssembler::emit_i16x8_extadd_pairwise_i8x16_s(LiftoffRegister dst, LiftoffRegister src) { bailout(kSimd, "i16x8.extadd_pairwise_i8x16_s"); @@ -2585,16 +2439,6 @@ void LiftoffAssembler::emit_i8x16_shuffle(LiftoffRegister dst, bailout(kSimd, "i8x16_shuffle"); } -void LiftoffAssembler::emit_i8x16_popcnt(LiftoffRegister dst, - LiftoffRegister src) { - bailout(kSimd, "i8x16.popcnt"); -} - -void LiftoffAssembler::emit_i8x16_neg(LiftoffRegister dst, - LiftoffRegister src) { - bailout(kUnsupportedArchitecture, "emit_i8x16neg"); -} - void LiftoffAssembler::emit_v128_anytrue(LiftoffRegister dst, LiftoffRegister src) { bailout(kSimd, "v8x16_anytrue"); @@ -2610,44 +2454,6 @@ void LiftoffAssembler::emit_i8x16_bitmask(LiftoffRegister dst, bailout(kSimd, "i8x16_bitmask"); } -void LiftoffAssembler::emit_i8x16_shl(LiftoffRegister dst, LiftoffRegister lhs, - LiftoffRegister rhs) { - bailout(kSimd, "i8x16_shl"); -} - -void LiftoffAssembler::emit_i8x16_shli(LiftoffRegister dst, LiftoffRegister lhs, - int32_t rhs) { - bailout(kSimd, "i8x16_shli"); -} - -void LiftoffAssembler::emit_i8x16_shr_s(LiftoffRegister dst, - LiftoffRegister lhs, - LiftoffRegister rhs) { - bailout(kSimd, "i8x16_shr_s"); -} - -void LiftoffAssembler::emit_i8x16_shri_s(LiftoffRegister dst, - LiftoffRegister lhs, int32_t rhs) { - bailout(kSimd, "i8x16_shri_s"); -} - -void LiftoffAssembler::emit_i8x16_shr_u(LiftoffRegister dst, - LiftoffRegister lhs, - LiftoffRegister rhs) { - bailout(kSimd, "i8x16_shr_u"); -} - -void LiftoffAssembler::emit_i8x16_shri_u(LiftoffRegister dst, - LiftoffRegister lhs, int32_t rhs) { - bailout(kSimd, "i8x16_shri_u"); -} - -void LiftoffAssembler::emit_i8x16_add_sat_s(LiftoffRegister dst, - LiftoffRegister lhs, - LiftoffRegister rhs) { - bailout(kUnsupportedArchitecture, "emit_i8x16addsaturate_s"); -} - void LiftoffAssembler::emit_s128_const(LiftoffRegister dst, const uint8_t imms[16]) { bailout(kUnsupportedArchitecture, "emit_s128_const"); @@ -2796,44 +2602,6 @@ void LiftoffAssembler::emit_i16x8_rounding_average_u(LiftoffRegister dst, bailout(kUnsupportedArchitecture, "emit_i16x8_rounding_average_u"); } -void LiftoffAssembler::emit_i8x16_abs(LiftoffRegister dst, - LiftoffRegister src) { - bailout(kUnsupportedArchitecture, "emit_i8x16_abs"); -} - -void LiftoffAssembler::emit_i16x8_abs(LiftoffRegister dst, - LiftoffRegister src) { - bailout(kUnsupportedArchitecture, "emit_i16x8_abs"); -} - -void LiftoffAssembler::emit_i32x4_abs(LiftoffRegister dst, - LiftoffRegister src) { - bailout(kUnsupportedArchitecture, "emit_i32x4_abs"); -} - -void LiftoffAssembler::emit_i64x2_abs(LiftoffRegister dst, - LiftoffRegister src) { - bailout(kSimd, "i64x2.abs"); -} - -void LiftoffAssembler::emit_i8x16_sub_sat_s(LiftoffRegister dst, - LiftoffRegister lhs, - LiftoffRegister rhs) { - bailout(kUnsupportedArchitecture, "emit_i8x16subsaturate_s"); -} - -void LiftoffAssembler::emit_i8x16_sub_sat_u(LiftoffRegister dst, - LiftoffRegister lhs, - LiftoffRegister rhs) { - bailout(kUnsupportedArchitecture, "emit_i8x16subsaturate_u"); -} - -void LiftoffAssembler::emit_i8x16_add_sat_u(LiftoffRegister dst, - LiftoffRegister lhs, - LiftoffRegister rhs) { - bailout(kUnsupportedArchitecture, "emit_i8x16addsaturate_u"); -} - void LiftoffAssembler::emit_f32x4_qfma(LiftoffRegister dst, LiftoffRegister src1, LiftoffRegister src2, @@ -2946,6 +2714,9 @@ void LiftoffAssembler::CallC(const ValueKindSig* sig, case kF64: StoreF64(args->fp(), MemOperand(sp, arg_bytes), r0); break; + case kS128: + StoreSimd128(args->fp().toSimd(), MemOperand(sp, arg_bytes), r0); + break; default: UNREACHABLE(); } @@ -2992,6 +2763,9 @@ void LiftoffAssembler::CallC(const ValueKindSig* sig, case kF64: LoadF64(result_reg->fp(), MemOperand(sp)); break; + case kS128: + LoadSimd128(result_reg->fp().toSimd(), MemOperand(sp), r0); + break; default: UNREACHABLE(); } diff --git a/deps/v8/src/wasm/baseline/s390/liftoff-assembler-s390.h b/deps/v8/src/wasm/baseline/s390/liftoff-assembler-s390.h index 8a45f09bf6fd54..f3e7f90b998929 100644 --- a/deps/v8/src/wasm/baseline/s390/liftoff-assembler-s390.h +++ b/deps/v8/src/wasm/baseline/s390/liftoff-assembler-s390.h @@ -3110,6 +3110,9 @@ void LiftoffAssembler::CallC(const ValueKindSig* sig, case kF64: StoreF64(args->fp(), MemOperand(sp, arg_bytes)); break; + case kS128: + StoreV128(args->fp(), MemOperand(sp, arg_bytes), r0); + break; default: UNREACHABLE(); } @@ -3156,6 +3159,9 @@ void LiftoffAssembler::CallC(const ValueKindSig* sig, case kF64: LoadF64(result_reg->fp(), MemOperand(sp)); break; + case kS128: + LoadV128(result_reg->fp(), MemOperand(sp), r0); + break; default: UNREACHABLE(); } diff --git a/deps/v8/src/wasm/baseline/x64/liftoff-assembler-x64.h b/deps/v8/src/wasm/baseline/x64/liftoff-assembler-x64.h index 3d3c16b1872e88..ace71919e5abf4 100644 --- a/deps/v8/src/wasm/baseline/x64/liftoff-assembler-x64.h +++ b/deps/v8/src/wasm/baseline/x64/liftoff-assembler-x64.h @@ -95,8 +95,8 @@ inline Operand GetMemOp(LiftoffAssembler* assm, Register addr, return Operand(addr, scratch, scale_factor, 0); } -inline void Load(LiftoffAssembler* assm, LiftoffRegister dst, Operand src, - ValueKind kind) { +inline void LoadFromStack(LiftoffAssembler* assm, LiftoffRegister dst, + Operand src, ValueKind kind) { switch (kind) { case kI32: assm->movl(dst.gp(), src); @@ -105,6 +105,7 @@ inline void Load(LiftoffAssembler* assm, LiftoffRegister dst, Operand src, case kRefNull: case kRef: case kRtt: + // Stack slots are uncompressed even when heap pointers are compressed. assm->movq(dst.gp(), src); break; case kF32: @@ -121,19 +122,18 @@ inline void Load(LiftoffAssembler* assm, LiftoffRegister dst, Operand src, } } -inline void Store(LiftoffAssembler* assm, Operand dst, LiftoffRegister src, - ValueKind kind) { +inline void StoreToStack(LiftoffAssembler* assm, Operand dst, + LiftoffRegister src, ValueKind kind) { switch (kind) { case kI32: assm->movl(dst, src.gp()); break; case kI64: - assm->movq(dst, src.gp()); - break; case kRefNull: case kRef: case kRtt: - assm->StoreTaggedField(dst, src.gp()); + // Stack slots are uncompressed even when heap pointers are compressed. + assm->movq(dst, src.gp()); break; case kF32: assm->Movss(dst, src.fp()); @@ -860,20 +860,20 @@ void LiftoffAssembler::LoadCallerFrameSlot(LiftoffRegister dst, uint32_t caller_slot_idx, ValueKind kind) { Operand src(rbp, kSystemPointerSize * (caller_slot_idx + 1)); - liftoff::Load(this, dst, src, kind); + liftoff::LoadFromStack(this, dst, src, kind); } void LiftoffAssembler::StoreCallerFrameSlot(LiftoffRegister src, uint32_t caller_slot_idx, ValueKind kind) { Operand dst(rbp, kSystemPointerSize * (caller_slot_idx + 1)); - liftoff::Store(this, dst, src, kind); + liftoff::StoreToStack(this, dst, src, kind); } void LiftoffAssembler::LoadReturnStackSlot(LiftoffRegister reg, int offset, ValueKind kind) { Operand src(rsp, offset); - liftoff::Load(this, reg, src, kind); + liftoff::LoadFromStack(this, reg, src, kind); } void LiftoffAssembler::MoveStackValue(uint32_t dst_offset, uint32_t src_offset, @@ -977,7 +977,7 @@ void LiftoffAssembler::Spill(int offset, WasmValue value) { } void LiftoffAssembler::Fill(LiftoffRegister reg, int offset, ValueKind kind) { - liftoff::Load(this, reg, liftoff::GetStackSlot(offset), kind); + liftoff::LoadFromStack(this, reg, liftoff::GetStackSlot(offset), kind); } void LiftoffAssembler::FillI64Half(Register, int offset, RegPairHalf) { @@ -4224,7 +4224,7 @@ void LiftoffAssembler::CallC(const ValueKindSig* sig, int arg_bytes = 0; for (ValueKind param_kind : sig->parameters()) { - liftoff::Store(this, Operand(rsp, arg_bytes), *args++, param_kind); + liftoff::StoreToStack(this, Operand(rsp, arg_bytes), *args++, param_kind); arg_bytes += value_kind_size(param_kind); } DCHECK_LE(arg_bytes, stack_bytes); @@ -4251,7 +4251,8 @@ void LiftoffAssembler::CallC(const ValueKindSig* sig, // Load potential output value from the buffer on the stack. if (out_argument_kind != kVoid) { - liftoff::Load(this, *next_result_reg, Operand(rsp, 0), out_argument_kind); + liftoff::LoadFromStack(this, *next_result_reg, Operand(rsp, 0), + out_argument_kind); } addq(rsp, Immediate(stack_bytes)); diff --git a/deps/v8/src/wasm/c-api.cc b/deps/v8/src/wasm/c-api.cc index 4473e205c032ed..df97940aa48649 100644 --- a/deps/v8/src/wasm/c-api.cc +++ b/deps/v8/src/wasm/c-api.cc @@ -1562,10 +1562,10 @@ void PushArgs(const i::wasm::FunctionSig* sig, const Val args[], // TODO(7748): Make sure this works for all heap types. packer->Push(WasmRefToV8(store->i_isolate(), args[i].ref())->ptr()); break; - case i::wasm::kRtt: case i::wasm::kS128: // TODO(7748): Implement. UNIMPLEMENTED(); + case i::wasm::kRtt: case i::wasm::kI8: case i::wasm::kI16: case i::wasm::kVoid: @@ -1601,10 +1601,10 @@ void PopArgs(const i::wasm::FunctionSig* sig, Val results[], results[i] = Val(V8RefValueToWasm(store, obj)); break; } - case i::wasm::kRtt: case i::wasm::kS128: // TODO(7748): Implement. UNIMPLEMENTED(); + case i::wasm::kRtt: case i::wasm::kI8: case i::wasm::kI16: case i::wasm::kVoid: @@ -1869,10 +1869,10 @@ auto Global::get() const -> Val { } return Val(V8RefValueToWasm(store, result)); } - case i::wasm::kRtt: case i::wasm::kS128: // TODO(7748): Implement these. UNIMPLEMENTED(); + case i::wasm::kRtt: case i::wasm::kI8: case i::wasm::kI16: case i::wasm::kVoid: diff --git a/deps/v8/src/wasm/canonical-types.cc b/deps/v8/src/wasm/canonical-types.cc index 43cac1758973de..c5cb34b54c77ca 100644 --- a/deps/v8/src/wasm/canonical-types.cc +++ b/deps/v8/src/wasm/canonical-types.cc @@ -10,7 +10,7 @@ namespace v8 { namespace internal { namespace wasm { -V8_EXPORT_PRIVATE TypeCanonicalizer* GetTypeCanonicalizer() { +TypeCanonicalizer* GetTypeCanonicalizer() { return GetWasmEngine()->type_canonicalizer(); } @@ -55,8 +55,34 @@ void TypeCanonicalizer::AddRecursiveGroup(WasmModule* module, uint32_t size) { } } -// An index in a type gets mapped to a relative index if it is inside the new -// canonical group, or the canonical representative if it is not. +uint32_t TypeCanonicalizer::AddRecursiveGroup(const FunctionSig* sig) { + base::MutexGuard mutex_guard(&mutex_); +// Types in the signature must be module-independent. +#if DEBUG + for (ValueType type : sig->all()) DCHECK(!type.has_index()); +#endif + CanonicalGroup group; + group.types.resize(1); + group.types[0].type_def = TypeDefinition(sig, kNoSuperType); + group.types[0].is_relative_supertype = false; + int canonical_index = FindCanonicalGroup(group); + if (canonical_index < 0) { + canonical_index = static_cast<int>(canonical_supertypes_.size()); + // We need to copy the signature in the local zone, or else we risk + // storing a dangling pointer in the future. + auto builder = FunctionSig::Builder(&zone_, sig->return_count(), + sig->parameter_count()); + for (auto type : sig->returns()) builder.AddReturn(type); + for (auto type : sig->parameters()) builder.AddParam(type); + const FunctionSig* allocated_sig = builder.Build(); + group.types[0].type_def = TypeDefinition(allocated_sig, kNoSuperType); + group.types[0].is_relative_supertype = false; + canonical_groups_.emplace(group, canonical_index); + canonical_supertypes_.emplace_back(kNoSuperType); + } + return canonical_index; +} + ValueType TypeCanonicalizer::CanonicalizeValueType( const WasmModule* module, ValueType type, uint32_t recursive_group_start) const { @@ -88,8 +114,6 @@ bool TypeCanonicalizer::IsCanonicalSubtype(uint32_t sub_index, return false; } -// Map all type indices (including supertype) inside {type} to indices relative -// to {recursive_group_start}. TypeCanonicalizer::CanonicalType TypeCanonicalizer::CanonicalizeTypeDef( const WasmModule* module, TypeDefinition type, uint32_t recursive_group_start) { diff --git a/deps/v8/src/wasm/canonical-types.h b/deps/v8/src/wasm/canonical-types.h index 47ad69108383e6..91f0576cedcdf6 100644 --- a/deps/v8/src/wasm/canonical-types.h +++ b/deps/v8/src/wasm/canonical-types.h @@ -21,8 +21,8 @@ namespace wasm { // types. // A recursive group is a subsequence of types explicitly marked in the type // section of a wasm module. Identical recursive groups have to be canonicalized -// to a single canonical group and are considered identical. Respective -// types in two identical groups are considered identical for all purposes. +// to a single canonical group. Respective types in two identical groups are +// considered identical for all purposes. // Two groups are considered identical if they have the same shape, and all // type indices referenced in the same position in both groups reference: // - identical types, if those do not belong to the rec. group, @@ -43,6 +43,11 @@ class TypeCanonicalizer { // Modifies {module->isorecursive_canonical_type_ids}. V8_EXPORT_PRIVATE void AddRecursiveGroup(WasmModule* module, uint32_t size); + // Adds a module-independent signature as a recursive group, and canonicalizes + // it if an identical is found. Returns the canonical index of the added + // signature. + V8_EXPORT_PRIVATE uint32_t AddRecursiveGroup(const FunctionSig* sig); + // Returns if the type at {sub_index} in {sub_module} is a subtype of the // type at {super_index} in {super_module} after canonicalization. V8_EXPORT_PRIVATE bool IsCanonicalSubtype(uint32_t sub_index, @@ -100,9 +105,15 @@ class TypeCanonicalizer { int FindCanonicalGroup(CanonicalGroup&) const; + // Canonicalize all types present in {type} (including supertype) according to + // {CanonicalizeValueType}. CanonicalType CanonicalizeTypeDef(const WasmModule* module, TypeDefinition type, uint32_t recursive_group_start); + + // An indexed type gets mapped to a {ValueType::CanonicalWithRelativeIndex} + // if its index points inside the new canonical group; otherwise, the index + // gets mapped to its canonical representative. ValueType CanonicalizeValueType(const WasmModule* module, ValueType type, uint32_t recursive_group_start) const; diff --git a/deps/v8/src/wasm/constant-expression-interface.cc b/deps/v8/src/wasm/constant-expression-interface.cc index f32783a0194c43..aa7a2809f38822 100644 --- a/deps/v8/src/wasm/constant-expression-interface.cc +++ b/deps/v8/src/wasm/constant-expression-interface.cc @@ -9,7 +9,6 @@ #include "src/objects/fixed-array-inl.h" #include "src/objects/oddball.h" #include "src/wasm/decoder.h" -#include "src/wasm/function-body-decoder-impl.h" #include "src/wasm/wasm-objects.h" namespace v8 { diff --git a/deps/v8/src/wasm/constant-expression-interface.h b/deps/v8/src/wasm/constant-expression-interface.h index 94501b71a1cdab..6dc225e7b7e201 100644 --- a/deps/v8/src/wasm/constant-expression-interface.h +++ b/deps/v8/src/wasm/constant-expression-interface.h @@ -21,12 +21,13 @@ class JSArrayBuffer; namespace wasm { -// An interface for WasmFullDecoder used to decode constant expressions. This -// interface has two modes: only validation (when {isolate_ == nullptr}), which -// is used in module-decoder, and code-generation (when {isolate_ != nullptr}), -// which is used in module-instantiate. We merge two distinct functionalities -// in one class to reduce the number of WasmFullDecoder instantiations, and thus -// V8 binary code size. +// An interface for WasmFullDecoder used to decode constant expressions. +// This interface has two modes: only validation (when {isolate_ == nullptr}), +// and code-generation (when {isolate_ != nullptr}). We merge two distinct +// functionalities in one class to reduce the number of WasmFullDecoder +// instantiations, and thus V8 binary code size. +// In code-generation mode, the result can be retrieved with {computed_value()} +// if {!has_error()}, or with {error()} otherwise. class V8_EXPORT_PRIVATE ConstantExpressionInterface { public: static constexpr Decoder::ValidateFlag validate = Decoder::kFullValidation; diff --git a/deps/v8/src/wasm/function-body-decoder-impl.h b/deps/v8/src/wasm/function-body-decoder-impl.h index 5ef0eed5aeda16..a8a173e0a53745 100644 --- a/deps/v8/src/wasm/function-body-decoder-impl.h +++ b/deps/v8/src/wasm/function-body-decoder-impl.h @@ -14,6 +14,8 @@ #include <inttypes.h> +#include <optional> + #include "src/base/small-vector.h" #include "src/base/strings.h" #include "src/base/v8-fallthrough.h" @@ -924,8 +926,7 @@ struct ControlBase : public PcForErrors<validate> { F(StartFunctionBody, Control* block) \ F(FinishFunction) \ F(OnFirstError) \ - F(NextInstruction, WasmOpcode) \ - F(Forward, const Value& from, Value* to) + F(NextInstruction, WasmOpcode) #define INTERFACE_CONSTANT_FUNCTIONS(F) /* force 80 columns */ \ F(I32Const, Value* result, int32_t value) \ @@ -979,6 +980,7 @@ struct ControlBase : public PcForErrors<validate> { const IndexImmediate<validate>& imm) \ F(Trap, TrapReason reason) \ F(NopForTestingUnsupportedInLiftoff) \ + F(Forward, const Value& from, Value* to) \ F(Select, const Value& cond, const Value& fval, const Value& tval, \ Value* result) \ F(BrOrRet, uint32_t depth, uint32_t drop_values) \ @@ -1064,7 +1066,10 @@ struct ControlBase : public PcForErrors<validate> { const Value& dst_index, const Value& length) \ F(I31GetS, const Value& input, Value* result) \ F(I31GetU, const Value& input, Value* result) \ - F(RefTest, const Value& obj, const Value& rtt, Value* result) \ + F(RefTest, const Value& obj, const Value& rtt, Value* result, \ + bool null_succeeds) \ + F(RefTestAbstract, const Value& obj, HeapType type, Value* result, \ + bool null_succeeds) \ F(RefCast, const Value& obj, const Value& rtt, Value* result) \ F(AssertNull, const Value& obj, Value* result) \ F(BrOnCast, const Value& obj, const Value& rtt, Value* result_on_branch, \ @@ -1072,6 +1077,7 @@ struct ControlBase : public PcForErrors<validate> { F(BrOnCastFail, const Value& obj, const Value& rtt, \ Value* result_on_fallthrough, uint32_t depth) \ F(RefIsData, const Value& object, Value* result) \ + F(RefIsEq, const Value& object, Value* result) \ F(RefIsI31, const Value& object, Value* result) \ F(RefIsArray, const Value& object, Value* result) \ F(RefAsData, const Value& object, Value* result) \ @@ -1149,7 +1155,7 @@ class WasmDecoder : public Decoder { WasmFeatures* detected, const FunctionSig* sig, const byte* start, const byte* end, uint32_t buffer_offset = 0) : Decoder(start, end, buffer_offset), - local_types_(zone), + compilation_zone_(zone), module_(module), enabled_(enabled), detected_(detected), @@ -1170,20 +1176,13 @@ class WasmDecoder : public Decoder { } } - Zone* zone() const { return local_types_.get_allocator().zone(); } - - uint32_t num_locals() const { - DCHECK_EQ(num_locals_, local_types_.size()); - return num_locals_; - } + Zone* zone() const { return compilation_zone_; } - ValueType local_type(uint32_t index) const { return local_types_[index]; } + uint32_t num_locals() const { return num_locals_; } - void InitializeLocalsFromSig() { - DCHECK_NOT_NULL(sig_); - DCHECK_EQ(0, this->local_types_.size()); - local_types_.assign(sig_->parameters().begin(), sig_->parameters().end()); - num_locals_ = static_cast<uint32_t>(sig_->parameters().size()); + ValueType local_type(uint32_t index) const { + DCHECK_GE(num_locals_, index); + return local_types_[index]; } // Decodes local definitions in the current decoder. @@ -1191,6 +1190,12 @@ class WasmDecoder : public Decoder { // The decoded locals will be appended to {this->local_types_}. // The decoder's pc is not advanced. void DecodeLocals(const byte* pc, uint32_t* total_length) { + DCHECK_NULL(local_types_); + DCHECK_EQ(0, num_locals_); + + // In a first step, count the number of locals and store the decoded + // entries. + num_locals_ = static_cast<uint32_t>(this->sig_->parameter_count()); uint32_t length; *total_length = 0; @@ -1202,7 +1207,12 @@ class WasmDecoder : public Decoder { *total_length += length; TRACE("local decls count: %u\n", entries); - while (entries-- > 0) { + struct DecodedLocalEntry { + uint32_t count; + ValueType type; + }; + base::SmallVector<DecodedLocalEntry, 8> decoded_locals(entries); + for (uint32_t entry = 0; entry < entries; ++entry) { if (!VALIDATE(more())) { return DecodeError( end(), "expected more local decls but reached end of input"); @@ -1213,21 +1223,39 @@ class WasmDecoder : public Decoder { if (!VALIDATE(ok())) { return DecodeError(pc + *total_length, "invalid local count"); } - DCHECK_LE(local_types_.size(), kV8MaxWasmFunctionLocals); - if (!VALIDATE(count <= kV8MaxWasmFunctionLocals - local_types_.size())) { + DCHECK_LE(num_locals_, kV8MaxWasmFunctionLocals); + if (!VALIDATE(count <= kV8MaxWasmFunctionLocals - num_locals_)) { return DecodeError(pc + *total_length, "local count too large"); } *total_length += length; ValueType type = value_type_reader::read_value_type<validate>( this, pc + *total_length, &length, this->module_, enabled_); - if (!VALIDATE(type != kWasmBottom)) return; + if (!VALIDATE(ok())) return; *total_length += length; - local_types_.insert(local_types_.end(), count, type); num_locals_ += count; + decoded_locals[entry] = DecodedLocalEntry{count, type}; } DCHECK(ok()); + + if (num_locals_ == 0) return; + + // Now build the array of local types from the parsed entries. + local_types_ = compilation_zone_->NewArray<ValueType>(num_locals_); + ValueType* locals_ptr = local_types_; + + if (sig_->parameter_count() > 0) { + std::copy(sig_->parameters().begin(), sig_->parameters().end(), + locals_ptr); + locals_ptr += sig_->parameter_count(); + } + + for (auto& entry : decoded_locals) { + std::fill_n(locals_ptr, entry.count, entry.type); + locals_ptr += entry.count; + } + DCHECK_EQ(locals_ptr, local_types_ + num_locals_); } // Shorthand that forwards to the {DecodeError} functions above, passing our @@ -2027,6 +2055,13 @@ class WasmDecoder : public Decoder { return length + imm.length; } case kExprRefTest: + case kExprRefTestNull: { + HeapTypeImmediate<validate> imm(WasmFeatures::All(), decoder, + pc + length, nullptr); + if (io) io->HeapType(imm); + return length + imm.length; + } + case kExprRefTestDeprecated: case kExprRefCast: case kExprRefCastNop: { IndexImmediate<validate> imm(decoder, pc + length, "type index"); @@ -2255,6 +2290,8 @@ class WasmDecoder : public Decoder { case kExprArrayLenDeprecated: case kExprArrayLen: case kExprRefTest: + case kExprRefTestNull: + case kExprRefTestDeprecated: case kExprRefCast: case kExprRefCastNop: case kExprBrOnCast: @@ -2344,13 +2381,9 @@ class WasmDecoder : public Decoder { // clang-format on } - // The {Zone} is implicitly stored in the {ZoneAllocator} which is part of - // this {ZoneVector}. Hence save one field and just get it from there if - // needed (see {zone()} accessor below). - ZoneVector<ValueType> local_types_; + Zone* const compilation_zone_; - // Cached value, for speed (yes, it's measurably faster to load this value - // than to load the start and end pointer from a vector, subtract and shift). + ValueType* local_types_ = nullptr; uint32_t num_locals_ = 0; const WasmModule* module_; @@ -2422,13 +2455,13 @@ class WasmFullDecoder : public WasmDecoder<validate, decoding_mode> { DCHECK_EQ(this->num_locals(), 0); locals_offset_ = this->pc_offset(); - this->InitializeLocalsFromSig(); - uint32_t params_count = this->num_locals(); uint32_t locals_length; this->DecodeLocals(this->pc(), &locals_length); if (this->failed()) return TraceFailed(); this->consume_bytes(locals_length); int non_defaultable = 0; + uint32_t params_count = + static_cast<uint32_t>(this->sig_->parameter_count()); for (uint32_t index = params_count; index < this->num_locals(); index++) { if (!this->local_type(index).is_defaultable()) non_defaultable++; } @@ -4241,20 +4274,37 @@ class WasmFullDecoder : public WasmDecoder<validate, decoding_mode> { } } - // Checks if types are unrelated, thus type checking will always fail. Does - // not account for nullability. + // Returns true if type checking will always fail, either because the types + // are unrelated or because the target_type is one of the null sentinels and + // conversion to null does not succeed. + bool TypeCheckAlwaysFails(Value obj, HeapType expected_type, + bool null_succeeds) { + bool types_unrelated = + !IsSubtypeOf(ValueType::Ref(expected_type), obj.type, this->module_) && + !IsSubtypeOf(obj.type, ValueType::RefNull(expected_type), + this->module_); + // For "unrelated" types the check can still succeed for the null value on + // instructions treating null as a successful check. + return (types_unrelated && (!null_succeeds || !obj.type.is_nullable())) || + (!null_succeeds && + (expected_type.representation() == HeapType::kNone || + expected_type.representation() == HeapType::kNoFunc || + expected_type.representation() == HeapType::kNoExtern)); + } bool TypeCheckAlwaysFails(Value obj, Value rtt) { - return !IsSubtypeOf(ValueType::Ref(rtt.type.ref_index()), obj.type, - this->module_) && - !IsSubtypeOf(obj.type, ValueType::RefNull(rtt.type.ref_index()), - this->module_); + // All old casts / checks treat null as failure. + const bool kNullSucceeds = false; + return TypeCheckAlwaysFails(obj, HeapType(rtt.type.ref_index()), + kNullSucceeds); } - // Checks it {obj} is a subtype of {rtt}'s type, thus checking will always - // succeed. Does not account for nullability. + // Checks if {obj} is a subtype of type, thus checking will always + // succeed. + bool TypeCheckAlwaysSucceeds(Value obj, HeapType type) { + return IsSubtypeOf(obj.type, ValueType::RefNull(type), this->module_); + } bool TypeCheckAlwaysSucceeds(Value obj, Value rtt) { - return IsSubtypeOf(obj.type, ValueType::RefNull(rtt.type.ref_index()), - this->module_); + return TypeCheckAlwaysSucceeds(obj, HeapType(rtt.type.ref_index())); } #define NON_CONST_ONLY \ @@ -4681,7 +4731,80 @@ class WasmFullDecoder : public WasmDecoder<validate, decoding_mode> { Push(value); return opcode_length; } + case kExprRefTestNull: case kExprRefTest: { + NON_CONST_ONLY + HeapTypeImmediate<validate> imm( + this->enabled_, this, this->pc_ + opcode_length, this->module_); + if (!VALIDATE(this->ok())) return 0; + opcode_length += imm.length; + + std::optional<Value> rtt; + HeapType target_type = imm.type; + if (imm.type.is_index()) { + rtt = CreateValue(ValueType::Rtt(imm.type.ref_index())); + CALL_INTERFACE_IF_OK_AND_REACHABLE(RttCanon, imm.type.ref_index(), + &rtt.value()); + Push(rtt.value()); + } + + Value obj = Peek(rtt.has_value() ? 1 : 0); + Value value = CreateValue(kWasmI32); + + if (!VALIDATE((obj.type.is_object_reference() && + IsSameTypeHierarchy(obj.type.heap_type(), target_type, + this->module_)) || + obj.type.is_bottom())) { + this->DecodeError( + obj.pc(), + "Invalid types for ref.test: %s of type %s has to " + "be in the same reference type hierarchy as (ref %s)", + SafeOpcodeNameAt(obj.pc()), obj.type.name().c_str(), + target_type.name().c_str()); + return 0; + } + bool null_succeeds = opcode == kExprRefTestNull; + if (V8_LIKELY(current_code_reachable_and_ok_)) { + // This logic ensures that code generation can assume that functions + // can only be cast to function types, and data objects to data types. + if (V8_UNLIKELY(TypeCheckAlwaysSucceeds(obj, target_type))) { + if (rtt.has_value()) { + // Drop rtt. + CALL_INTERFACE(Drop); + } + // Type checking can still fail for null. + if (obj.type.is_nullable() && !null_succeeds) { + // We abuse ref.as_non_null, which isn't otherwise used as a unary + // operator, as a sentinel for the negation of ref.is_null. + CALL_INTERFACE(UnOp, kExprRefAsNonNull, obj, &value); + } else { + CALL_INTERFACE(Drop); + CALL_INTERFACE(I32Const, &value, 1); + } + } else if (V8_UNLIKELY(TypeCheckAlwaysFails(obj, target_type, + null_succeeds))) { + if (rtt.has_value()) { + // Drop rtt. + CALL_INTERFACE(Drop); + } + CALL_INTERFACE(Drop); + CALL_INTERFACE(I32Const, &value, 0); + } else { + if (rtt.has_value()) { + // RTT => Cast to concrete (index) type. + CALL_INTERFACE(RefTest, obj, rtt.value(), &value, null_succeeds); + } else { + // No RTT => Cast to abstract (non-index) types. + CALL_INTERFACE(RefTestAbstract, obj, target_type, &value, + null_succeeds); + } + } + } + Drop(1 + rtt.has_value()); + Push(value); + return opcode_length; + } + case kExprRefTestDeprecated: { NON_CONST_ONLY IndexImmediate<validate> imm(this, this->pc_ + opcode_length, "type index"); @@ -4718,7 +4841,7 @@ class WasmFullDecoder : public WasmDecoder<validate, decoding_mode> { CALL_INTERFACE(Drop); CALL_INTERFACE(I32Const, &value, 0); } else { - CALL_INTERFACE(RefTest, obj, rtt, &value); + CALL_INTERFACE(RefTest, obj, rtt, &value, /*null_succeeds*/ false); } } Drop(2); diff --git a/deps/v8/src/wasm/function-body-decoder.cc b/deps/v8/src/wasm/function-body-decoder.cc index 86e8b793c0a274..2d25af7e275e66 100644 --- a/deps/v8/src/wasm/function-body-decoder.cc +++ b/deps/v8/src/wasm/function-body-decoder.cc @@ -18,11 +18,11 @@ namespace wasm { bool DecodeLocalDecls(const WasmFeatures& enabled, BodyLocalDecls* decls, const WasmModule* module, const byte* start, - const byte* end) { + const byte* end, Zone* zone) { WasmFeatures no_features = WasmFeatures::None(); - Zone* zone = decls->type_list.get_allocator().zone(); + constexpr FixedSizeSignature<ValueType, 0, 0> kNoSig; WasmDecoder<Decoder::kFullValidation> decoder( - zone, module, enabled, &no_features, nullptr, start, end, 0); + zone, module, enabled, &no_features, &kNoSig, start, end, 0); uint32_t length; decoder.DecodeLocals(decoder.pc(), &length); if (decoder.failed()) { @@ -31,27 +31,32 @@ bool DecodeLocalDecls(const WasmFeatures& enabled, BodyLocalDecls* decls, } DCHECK(decoder.ok()); decls->encoded_size = length; - // Copy the decoded locals types into {decls->type_list}. - DCHECK(decls->type_list.empty()); - decls->type_list = std::move(decoder.local_types_); + // Copy the decoded locals types into {decls->local_types}. + DCHECK_NULL(decls->local_types); + decls->num_locals = decoder.num_locals_; + decls->local_types = decoder.local_types_; return true; } +BytecodeIterator::BytecodeIterator(const byte* start, const byte* end) + : Decoder(start, end) {} + BytecodeIterator::BytecodeIterator(const byte* start, const byte* end, - BodyLocalDecls* decls) + BodyLocalDecls* decls, Zone* zone) : Decoder(start, end) { - if (decls != nullptr) { - if (DecodeLocalDecls(WasmFeatures::All(), decls, nullptr, start, end)) { - pc_ += decls->encoded_size; - if (pc_ > end_) pc_ = end_; - } + DCHECK_NOT_NULL(decls); + DCHECK_NOT_NULL(zone); + if (DecodeLocalDecls(WasmFeatures::All(), decls, nullptr, start, end, zone)) { + pc_ += decls->encoded_size; + if (pc_ > end_) pc_ = end_; } } -DecodeResult VerifyWasmCode(AccountingAllocator* allocator, - const WasmFeatures& enabled, - const WasmModule* module, WasmFeatures* detected, - const FunctionBody& body) { +DecodeResult ValidateFunctionBody(AccountingAllocator* allocator, + const WasmFeatures& enabled, + const WasmModule* module, + WasmFeatures* detected, + const FunctionBody& body) { Zone zone(allocator, ZONE_NAME); WasmFullDecoder<Decoder::kFullValidation, EmptyInterface> decoder( &zone, module, enabled, detected, body); @@ -140,19 +145,19 @@ bool PrintRawWasmCode(AccountingAllocator* allocator, const FunctionBody& body, } // Print the local declarations. - BodyLocalDecls decls(&zone); - BytecodeIterator i(body.start, body.end, &decls); + BodyLocalDecls decls; + BytecodeIterator i(body.start, body.end, &decls, &zone); if (body.start != i.pc() && print_locals == kPrintLocals) { os << "// locals:"; - if (!decls.type_list.empty()) { - ValueType type = decls.type_list[0]; + if (decls.num_locals > 0) { + ValueType type = decls.local_types[0]; uint32_t count = 0; - for (size_t pos = 0; pos < decls.type_list.size(); ++pos) { - if (decls.type_list[pos] == type) { + for (size_t pos = 0; pos < decls.num_locals; ++pos) { + if (decls.local_types[pos] == type) { ++count; } else { os << " " << count << " " << type.name(); - type = decls.type_list[pos]; + type = decls.local_types[pos]; count = 1; } } diff --git a/deps/v8/src/wasm/function-body-decoder.h b/deps/v8/src/wasm/function-body-decoder.h index 0c36f1ddce0541..d6fb2cfd99a757 100644 --- a/deps/v8/src/wasm/function-body-decoder.h +++ b/deps/v8/src/wasm/function-body-decoder.h @@ -42,11 +42,9 @@ struct FunctionBody { enum class LoadTransformationKind : uint8_t { kSplat, kExtend, kZeroExtend }; -V8_EXPORT_PRIVATE DecodeResult VerifyWasmCode(AccountingAllocator* allocator, - const WasmFeatures& enabled, - const WasmModule* module, - WasmFeatures* detected, - const FunctionBody& body); +V8_EXPORT_PRIVATE DecodeResult ValidateFunctionBody( + AccountingAllocator* allocator, const WasmFeatures& enabled, + const WasmModule* module, WasmFeatures* detected, const FunctionBody& body); enum PrintLocals { kPrintLocals, kOmitLocals }; V8_EXPORT_PRIVATE @@ -66,15 +64,15 @@ struct BodyLocalDecls { // The size of the encoded declarations. uint32_t encoded_size = 0; // size of encoded declarations - ZoneVector<ValueType> type_list; - - explicit BodyLocalDecls(Zone* zone) : type_list(zone) {} + uint32_t num_locals = 0; + ValueType* local_types = nullptr; }; V8_EXPORT_PRIVATE bool DecodeLocalDecls(const WasmFeatures& enabled, BodyLocalDecls* decls, const WasmModule* module, - const byte* start, const byte* end); + const byte* start, const byte* end, + Zone* zone); V8_EXPORT_PRIVATE BitVector* AnalyzeLoopAssignmentForTesting( Zone* zone, uint32_t num_locals, const byte* start, const byte* end); @@ -152,11 +150,12 @@ class V8_EXPORT_PRIVATE BytecodeIterator : public NON_EXPORTED_BASE(Decoder) { : iterator_base(ptr, end), start_(start) {} }; - // Create a new {BytecodeIterator}. If the {decls} pointer is non-null, - // assume the bytecode starts with local declarations and decode them. - // Otherwise, do not decode local decls. - BytecodeIterator(const byte* start, const byte* end, - BodyLocalDecls* decls = nullptr); + // Create a new {BytecodeIterator}, starting after the locals declarations. + BytecodeIterator(const byte* start, const byte* end); + + // Create a new {BytecodeIterator}, starting with locals declarations. + BytecodeIterator(const byte* start, const byte* end, BodyLocalDecls* decls, + Zone* zone); base::iterator_range<opcode_iterator> opcodes() { return base::iterator_range<opcode_iterator>(opcode_iterator(pc_, end_), diff --git a/deps/v8/src/wasm/function-compiler.cc b/deps/v8/src/wasm/function-compiler.cc index d4230e3b46047a..46a4f545b37dfb 100644 --- a/deps/v8/src/wasm/function-compiler.cc +++ b/deps/v8/src/wasm/function-compiler.cc @@ -66,7 +66,7 @@ WasmCompilationResult WasmCompilationUnit::ExecuteFunctionCompilation( base::Optional<TimedHistogramScope> wasm_compile_function_time_scope; base::Optional<TimedHistogramScope> wasm_compile_huge_function_time_scope; - if (counters) { + if (counters && base::TimeTicks::IsHighResolution()) { if (func_body.end - func_body.start >= 100 * KB) { auto huge_size_histogram = SELECT_WASM_COUNTER( counters, env->module->origin, wasm, huge_function_size_bytes); @@ -204,12 +204,13 @@ bool UseGenericWrapper(const FunctionSig* sig) { } // namespace JSToWasmWrapperCompilationUnit::JSToWasmWrapperCompilationUnit( - Isolate* isolate, const FunctionSig* sig, const WasmModule* module, - bool is_import, const WasmFeatures& enabled_features, - AllowGeneric allow_generic) + Isolate* isolate, const FunctionSig* sig, uint32_t canonical_sig_index, + const WasmModule* module, bool is_import, + const WasmFeatures& enabled_features, AllowGeneric allow_generic) : isolate_(isolate), is_import_(is_import), sig_(sig), + canonical_sig_index_(canonical_sig_index), use_generic_wrapper_(allow_generic && UseGenericWrapper(sig) && !is_import), job_(use_generic_wrapper_ @@ -248,24 +249,27 @@ Handle<CodeT> JSToWasmWrapperCompilationUnit::Finalize() { // static Handle<CodeT> JSToWasmWrapperCompilationUnit::CompileJSToWasmWrapper( - Isolate* isolate, const FunctionSig* sig, const WasmModule* module, - bool is_import) { + Isolate* isolate, const FunctionSig* sig, uint32_t canonical_sig_index, + const WasmModule* module, bool is_import) { // Run the compilation unit synchronously. WasmFeatures enabled_features = WasmFeatures::FromIsolate(isolate); - JSToWasmWrapperCompilationUnit unit(isolate, sig, module, is_import, - enabled_features, kAllowGeneric); + JSToWasmWrapperCompilationUnit unit(isolate, sig, canonical_sig_index, module, + is_import, enabled_features, + kAllowGeneric); unit.Execute(); return unit.Finalize(); } // static Handle<CodeT> JSToWasmWrapperCompilationUnit::CompileSpecificJSToWasmWrapper( - Isolate* isolate, const FunctionSig* sig, const WasmModule* module) { + Isolate* isolate, const FunctionSig* sig, uint32_t canonical_sig_index, + const WasmModule* module) { // Run the compilation unit synchronously. const bool is_import = false; WasmFeatures enabled_features = WasmFeatures::FromIsolate(isolate); - JSToWasmWrapperCompilationUnit unit(isolate, sig, module, is_import, - enabled_features, kDontAllowGeneric); + JSToWasmWrapperCompilationUnit unit(isolate, sig, canonical_sig_index, module, + is_import, enabled_features, + kDontAllowGeneric); unit.Execute(); return unit.Finalize(); } diff --git a/deps/v8/src/wasm/function-compiler.h b/deps/v8/src/wasm/function-compiler.h index 71744db8ee854c..fa70073291b630 100644 --- a/deps/v8/src/wasm/function-compiler.h +++ b/deps/v8/src/wasm/function-compiler.h @@ -44,7 +44,7 @@ struct WasmCompilationResult { bool succeeded() const { return code_desc.buffer != nullptr; } bool failed() const { return !succeeded(); } - operator bool() const { return succeeded(); } + explicit operator bool() const { return succeeded(); } CodeDesc code_desc; std::unique_ptr<AssemblerBuffer> instr_buffer; @@ -57,7 +57,6 @@ struct WasmCompilationResult { ExecutionTier result_tier; Kind kind = kFunction; ForDebugging for_debugging = kNoDebugging; - int feedback_vector_slots = 0; }; class V8_EXPORT_PRIVATE WasmCompilationUnit final { @@ -104,6 +103,7 @@ class V8_EXPORT_PRIVATE JSToWasmWrapperCompilationUnit final { enum AllowGeneric : bool { kAllowGeneric = true, kDontAllowGeneric = false }; JSToWasmWrapperCompilationUnit(Isolate* isolate, const FunctionSig* sig, + uint32_t canonical_sig_index, const wasm::WasmModule* module, bool is_import, const WasmFeatures& enabled_features, AllowGeneric allow_generic); @@ -116,18 +116,20 @@ class V8_EXPORT_PRIVATE JSToWasmWrapperCompilationUnit final { bool is_import() const { return is_import_; } const FunctionSig* sig() const { return sig_; } + uint32_t canonical_sig_index() const { return canonical_sig_index_; } // Run a compilation unit synchronously. static Handle<CodeT> CompileJSToWasmWrapper(Isolate* isolate, const FunctionSig* sig, + uint32_t canonical_sig_index, const WasmModule* module, bool is_import); // Run a compilation unit synchronously, but ask for the specific // wrapper. - static Handle<CodeT> CompileSpecificJSToWasmWrapper(Isolate* isolate, - const FunctionSig* sig, - const WasmModule* module); + static Handle<CodeT> CompileSpecificJSToWasmWrapper( + Isolate* isolate, const FunctionSig* sig, uint32_t canonical_sig_index, + const WasmModule* module); private: // Wrapper compilation is bound to an isolate. Concurrent accesses to the @@ -137,6 +139,7 @@ class V8_EXPORT_PRIVATE JSToWasmWrapperCompilationUnit final { Isolate* isolate_; bool is_import_; const FunctionSig* sig_; + uint32_t canonical_sig_index_; bool use_generic_wrapper_; std::unique_ptr<TurbofanCompilationJob> job_; }; diff --git a/deps/v8/src/wasm/graph-builder-interface.cc b/deps/v8/src/wasm/graph-builder-interface.cc index 1279b426b15002..fb1f19c2b13604 100644 --- a/deps/v8/src/wasm/graph-builder-interface.cc +++ b/deps/v8/src/wasm/graph-builder-interface.cc @@ -1234,10 +1234,12 @@ class WasmGraphBuildingInterface { WasmTypeCheckConfig ComputeWasmTypeCheckConfig(ValueType object_type, ValueType rtt_type, - const WasmModule* module) { + const WasmModule* module, + bool null_succeeds) { WasmTypeCheckConfig result; result.object_can_be_null = object_type.is_nullable(); DCHECK(object_type.is_object_reference()); // Checked by validation. + result.null_succeeds = null_succeeds; // In the bottom case, the result is irrelevant. result.rtt_depth = rtt_type.is_bottom() ? 0 /* unused */ @@ -1247,16 +1249,24 @@ class WasmGraphBuildingInterface { } void RefTest(FullDecoder* decoder, const Value& object, const Value& rtt, - Value* result) { - WasmTypeCheckConfig config = - ComputeWasmTypeCheckConfig(object.type, rtt.type, decoder->module_); + Value* result, bool null_succeeds) { + WasmTypeCheckConfig config = ComputeWasmTypeCheckConfig( + object.type, rtt.type, decoder->module_, null_succeeds); SetAndTypeNode(result, builder_->RefTest(object.node, rtt.node, config)); } + void RefTestAbstract(FullDecoder* decoder, const Value& object, + wasm::HeapType type, Value* result, bool null_succeeds) { + SetAndTypeNode(result, + builder_->RefTestAbstract(object.node, type, null_succeeds)); + } + void RefCast(FullDecoder* decoder, const Value& object, const Value& rtt, Value* result) { - WasmTypeCheckConfig config = - ComputeWasmTypeCheckConfig(object.type, rtt.type, decoder->module_); + // TODO(mliedtke): Should be a parameter for generic ref.cast instructions. + const bool null_succeeds = false; + WasmTypeCheckConfig config = ComputeWasmTypeCheckConfig( + object.type, rtt.type, decoder->module_, null_succeeds); TFNode* cast_node = v8_flags.experimental_wasm_assume_ref_cast_succeeds ? builder_->TypeGuard(object.node, result->type) : builder_->RefCast(object.node, rtt.node, config, @@ -1270,8 +1280,11 @@ class WasmGraphBuildingInterface { void BrOnCastAbs(FullDecoder* decoder, const Value& object, const Value& rtt, Value* forwarding_value, uint32_t br_depth, bool branch_on_match) { - WasmTypeCheckConfig config = - ComputeWasmTypeCheckConfig(object.type, rtt.type, decoder->module_); + // TODO(mliedtke): Should be a parameter for generic br_on_cast + // instructions. + const bool null_succeeds = false; + WasmTypeCheckConfig config = ComputeWasmTypeCheckConfig( + object.type, rtt.type, decoder->module_, null_succeeds); SsaEnv* branch_env = Split(decoder->zone(), ssa_env_); SsaEnv* no_branch_env = Steal(decoder->zone(), ssa_env_); no_branch_env->SetNotMerged(); @@ -1302,9 +1315,18 @@ class WasmGraphBuildingInterface { decoder, object, rtt, value_on_fallthrough, br_depth, false); } + void RefIsEq(FullDecoder* decoder, const Value& object, Value* result) { + bool null_succeeds = false; + SetAndTypeNode(result, + builder_->RefIsEq(object.node, object.type.is_nullable(), + null_succeeds)); + } + void RefIsData(FullDecoder* decoder, const Value& object, Value* result) { + bool null_succeeds = false; SetAndTypeNode(result, - builder_->RefIsData(object.node, object.type.is_nullable())); + builder_->RefIsData(object.node, object.type.is_nullable(), + null_succeeds)); } void RefAsData(FullDecoder* decoder, const Value& object, Value* result) { @@ -1329,8 +1351,10 @@ class WasmGraphBuildingInterface { } void RefIsArray(FullDecoder* decoder, const Value& object, Value* result) { - SetAndTypeNode( - result, builder_->RefIsArray(object.node, object.type.is_nullable())); + bool null_succeeds = false; + SetAndTypeNode(result, + builder_->RefIsArray(object.node, object.type.is_nullable(), + null_succeeds)); } void RefAsArray(FullDecoder* decoder, const Value& object, Value* result) { @@ -1355,7 +1379,8 @@ class WasmGraphBuildingInterface { } void RefIsI31(FullDecoder* decoder, const Value& object, Value* result) { - SetAndTypeNode(result, builder_->RefIsI31(object.node)); + bool null_succeeds = false; + SetAndTypeNode(result, builder_->RefIsI31(object.node, null_succeeds)); } void RefAsI31(FullDecoder* decoder, const Value& object, Value* result) { @@ -1693,7 +1718,6 @@ class WasmGraphBuildingInterface { TFNode* if_success = nullptr; TFNode* if_exception = nullptr; - // TODO(manoskouk): Can we assign a wasm type to the exception value? if (!builder_->ThrowsException(node, &if_success, &if_exception)) { return node; } @@ -2081,7 +2105,6 @@ class WasmGraphBuildingInterface { } } if (exception_value != nullptr) { - // TODO(manoskouk): Can we assign a wasm type to the exception value? *exception_value = builder_->LoopExitValue( *exception_value, MachineRepresentation::kWord32); } diff --git a/deps/v8/src/wasm/module-compiler.cc b/deps/v8/src/wasm/module-compiler.cc index f81e6ebdc68735..10575a7e3160ce 100644 --- a/deps/v8/src/wasm/module-compiler.cc +++ b/deps/v8/src/wasm/module-compiler.cc @@ -616,8 +616,7 @@ class CompilationStateImpl { std::shared_ptr<JSToWasmWrapperCompilationUnit> GetNextJSToWasmWrapperCompilationUnit(); - void FinalizeJSToWasmWrappers(Isolate* isolate, const WasmModule* module, - Handle<FixedArray>* export_wrappers_out); + void FinalizeJSToWasmWrappers(Isolate* isolate, const WasmModule* module); void OnFinishedUnits(base::Vector<WasmCode*>); void OnFinishedJSToWasmWrapperUnits(int num); @@ -1098,10 +1097,9 @@ DecodeResult ValidateSingleFunction(const WasmModule* module, int func_index, WasmFeatures enabled_features) { const WasmFunction* func = &module->functions[func_index]; FunctionBody body{func->sig, func->code.offset(), code.begin(), code.end()}; - DecodeResult result; - - WasmFeatures detected; - return VerifyWasmCode(allocator, enabled_features, module, &detected, body); + WasmFeatures detected_features; + return ValidateFunctionBody(allocator, enabled_features, module, + &detected_features, body); } enum OnlyLazyFunctions : bool { @@ -1166,7 +1164,7 @@ class CompileLazyTimingScope { } // namespace bool CompileLazy(Isolate* isolate, Handle<WasmInstanceObject> instance, - int func_index, NativeModule** out_native_module) { + int func_index) { Handle<WasmModuleObject> module_object(instance->module_object(), isolate); NativeModule* native_module = module_object->native_module(); Counters* counters = isolate->counters(); @@ -1247,13 +1245,11 @@ bool CompileLazy(Isolate* isolate, Handle<WasmInstanceObject> instance, } // Allocate feedback vector if needed. - if (result.feedback_vector_slots > 0) { + int feedback_vector_slots = NumFeedbackSlots(module, func_index); + if (feedback_vector_slots > 0) { DCHECK(v8_flags.wasm_speculative_inlining); - // We have to save the native_module on the stack, in case the allocation - // triggers a GC and we need the module to scan WasmCompileLazy stack frame. - *out_native_module = native_module; - Handle<FixedArray> vector = isolate->factory()->NewFixedArrayWithZeroes( - result.feedback_vector_slots); + Handle<FixedArray> vector = + isolate->factory()->NewFixedArrayWithZeroes(feedback_vector_slots); instance->feedback_vectors().set( declared_function_index(module, func_index), *vector); } @@ -1714,7 +1710,8 @@ CompilationExecutionResult ExecuteCompilationUnits( UNREACHABLE(); } -using JSToWasmWrapperKey = std::pair<bool, FunctionSig>; +// (function is imported, canonical type index) +using JSToWasmWrapperKey = std::pair<bool, uint32_t>; // Returns the number of units added. int AddExportWrapperUnits(Isolate* isolate, NativeModule* native_module, @@ -1723,11 +1720,14 @@ int AddExportWrapperUnits(Isolate* isolate, NativeModule* native_module, for (auto exp : native_module->module()->export_table) { if (exp.kind != kExternalFunction) continue; auto& function = native_module->module()->functions[exp.index]; - JSToWasmWrapperKey key(function.imported, *function.sig); + uint32_t canonical_type_index = + native_module->module() + ->isorecursive_canonical_type_ids[function.sig_index]; + JSToWasmWrapperKey key(function.imported, canonical_type_index); if (keys.insert(key).second) { auto unit = std::make_shared<JSToWasmWrapperCompilationUnit>( - isolate, function.sig, native_module->module(), function.imported, - native_module->enabled_features(), + isolate, function.sig, canonical_type_index, native_module->module(), + function.imported, native_module->enabled_features(), JSToWasmWrapperCompilationUnit::kAllowGeneric); builder->AddJSToWasmWrapperUnit(std::move(unit)); } @@ -1744,14 +1744,18 @@ int AddImportWrapperUnits(NativeModule* native_module, keys; int num_imported_functions = native_module->num_imported_functions(); for (int func_index = 0; func_index < num_imported_functions; func_index++) { - const FunctionSig* sig = native_module->module()->functions[func_index].sig; - if (!IsJSCompatibleSignature(sig, native_module->module(), + const WasmFunction& function = + native_module->module()->functions[func_index]; + if (!IsJSCompatibleSignature(function.sig, native_module->module(), native_module->enabled_features())) { continue; } + uint32_t canonical_type_index = + native_module->module() + ->isorecursive_canonical_type_ids[function.sig_index]; WasmImportWrapperCache::CacheKey key( - compiler::kDefaultImportCallKind, sig, - static_cast<int>(sig->parameter_count()), kNoSuspend); + compiler::kDefaultImportCallKind, canonical_type_index, + static_cast<int>(function.sig->parameter_count()), kNoSuspend); auto it = keys.insert(key); if (it.second) { // Ensure that all keys exist in the cache, so that we can populate the @@ -1889,8 +1893,7 @@ class CompilationTimeCallback : public CompilationEventCallback { void CompileNativeModule(Isolate* isolate, v8::metrics::Recorder::ContextId context_id, ErrorThrower* thrower, const WasmModule* wasm_module, - std::shared_ptr<NativeModule> native_module, - Handle<FixedArray>* export_wrappers_out) { + std::shared_ptr<NativeModule> native_module) { CHECK(!v8_flags.jitless); ModuleWireBytes wire_bytes(native_module->wire_bytes()); const bool lazy_module = IsLazyModule(wasm_module); @@ -1933,8 +1936,7 @@ void CompileNativeModule(Isolate* isolate, return; } - compilation_state->FinalizeJSToWasmWrappers(isolate, native_module->module(), - export_wrappers_out); + compilation_state->FinalizeJSToWasmWrappers(isolate, native_module->module()); compilation_state->WaitForCompilationEvent( CompilationEvent::kFinishedBaselineCompilation); @@ -1986,8 +1988,7 @@ class BackgroundCompileJob final : public JobTask { std::shared_ptr<NativeModule> CompileToNativeModule( Isolate* isolate, const WasmFeatures& enabled, ErrorThrower* thrower, std::shared_ptr<const WasmModule> module, const ModuleWireBytes& wire_bytes, - Handle<FixedArray>* export_wrappers_out, int compilation_id, - v8::metrics::Recorder::ContextId context_id) { + int compilation_id, v8::metrics::Recorder::ContextId context_id) { const WasmModule* wasm_module = module.get(); WasmEngine* engine = GetWasmEngine(); base::OwnedVector<uint8_t> wire_bytes_copy = @@ -1999,13 +2000,15 @@ std::shared_ptr<NativeModule> CompileToNativeModule( std::shared_ptr<NativeModule> native_module = engine->MaybeGetNativeModule( wasm_module->origin, wire_bytes_copy.as_vector(), isolate); if (native_module) { - // TODO(thibaudm): Look into sharing export wrappers. - CompileJsToWasmWrappers(isolate, wasm_module, export_wrappers_out); + CompileJsToWasmWrappers(isolate, wasm_module); return native_module; } - TimedHistogramScope wasm_compile_module_time_scope(SELECT_WASM_COUNTER( - isolate->counters(), wasm_module->origin, wasm_compile, module_time)); + base::Optional<TimedHistogramScope> wasm_compile_module_time_scope; + if (base::TimeTicks::IsHighResolution()) { + wasm_compile_module_time_scope.emplace(SELECT_WASM_COUNTER( + isolate->counters(), wasm_module->origin, wasm_compile, module_time)); + } // Embedder usage count for declared shared memories. if (wasm_module->has_shared_memory) { @@ -2026,14 +2029,13 @@ std::shared_ptr<NativeModule> CompileToNativeModule( // Sync compilation is user blocking, so we increase the priority. native_module->compilation_state()->SetHighPriority(); - CompileNativeModule(isolate, context_id, thrower, wasm_module, native_module, - export_wrappers_out); + CompileNativeModule(isolate, context_id, thrower, wasm_module, native_module); bool cache_hit = !engine->UpdateNativeModuleCache(thrower->error(), &native_module, isolate); if (thrower->error()) return {}; if (cache_hit) { - CompileJsToWasmWrappers(isolate, wasm_module, export_wrappers_out); + CompileJsToWasmWrappers(isolate, wasm_module); return native_module; } @@ -2243,7 +2245,8 @@ void AsyncCompileJob::PrepareRuntimeObjects() { // Create heap objects for script and module bytes to be stored in the // module object. Asm.js is not compiled asynchronously. DCHECK(module_object_.is_null()); - auto source_url = stream_ ? stream_->url() : base::Vector<const char>(); + auto source_url = + stream_ ? base::VectorOf(stream_->url()) : base::Vector<const char>(); auto script = GetWasmEngine()->GetOrCreateScript(isolate_, native_module_, source_url); Handle<WasmModuleObject> module_object = @@ -2312,16 +2315,14 @@ void AsyncCompileJob::FinishCompile(bool is_after_cache_hit) { // TODO(bbudge) Allow deserialization without wrapper compilation, so we can // just compile wrappers here. if (!is_after_deserialization) { - Handle<FixedArray> export_wrappers; if (is_after_cache_hit) { // TODO(thibaudm): Look into sharing wrappers. - CompileJsToWasmWrappers(isolate_, module, &export_wrappers); + CompileJsToWasmWrappers(isolate_, module); } else { - compilation_state->FinalizeJSToWasmWrappers(isolate_, module, - &export_wrappers); + compilation_state->FinalizeJSToWasmWrappers(isolate_, module); } - module_object_->set_export_wrappers(*export_wrappers); } + // We can only update the feature counts once the entire compile is done. compilation_state->PublishDetectedFeatures(isolate_); @@ -2823,7 +2824,7 @@ bool AsyncStreamingProcessor::ProcessModuleHeader( job_->context_id(), GetWasmEngine()->allocator()); decoder_.DecodeModuleHeader(bytes, offset); if (!decoder_.ok()) { - FinishAsyncCompileJobWithError(decoder_.FinishDecoding(false).error()); + FinishAsyncCompileJobWithError(decoder_.FinishDecoding().error()); return false; } prefix_hash_ = GetWireBytesHash(bytes); @@ -2849,7 +2850,7 @@ bool AsyncStreamingProcessor::ProcessSection(SectionCode section_code, size_t bytes_consumed = ModuleDecoder::IdentifyUnknownSection( &decoder_, bytes, offset, §ion_code); if (!decoder_.ok()) { - FinishAsyncCompileJobWithError(decoder_.FinishDecoding(false).error()); + FinishAsyncCompileJobWithError(decoder_.FinishDecoding().error()); return false; } if (section_code == SectionCode::kUnknownSectionCode) { @@ -2860,10 +2861,9 @@ bool AsyncStreamingProcessor::ProcessSection(SectionCode section_code, offset += bytes_consumed; bytes = bytes.SubVector(bytes_consumed, bytes.size()); } - constexpr bool verify_functions = false; - decoder_.DecodeSection(section_code, bytes, offset, verify_functions); + decoder_.DecodeSection(section_code, bytes, offset); if (!decoder_.ok()) { - FinishAsyncCompileJobWithError(decoder_.FinishDecoding(false).error()); + FinishAsyncCompileJobWithError(decoder_.FinishDecoding().error()); return false; } return true; @@ -2882,7 +2882,7 @@ bool AsyncStreamingProcessor::ProcessCodeSectionHeader( static_cast<uint32_t>(code_section_length)); if (!decoder_.CheckFunctionsCount(static_cast<uint32_t>(num_functions), functions_mismatch_error_offset)) { - FinishAsyncCompileJobWithError(decoder_.FinishDecoding(false).error()); + FinishAsyncCompileJobWithError(decoder_.FinishDecoding().error()); return false; } @@ -2987,7 +2987,7 @@ void AsyncStreamingProcessor::OnFinishedStream( base::OwnedVector<uint8_t> bytes) { TRACE_STREAMING("Finish stream...\n"); DCHECK_EQ(NativeModuleCache::PrefixHash(bytes.as_vector()), prefix_hash_); - ModuleResult result = decoder_.FinishDecoding(false); + ModuleResult result = decoder_.FinishDecoding(); if (result.failed()) { FinishAsyncCompileJobWithError(result.error()); return; @@ -3074,16 +3074,19 @@ bool AsyncStreamingProcessor::Deserialize( base::Vector<const uint8_t> module_bytes, base::Vector<const uint8_t> wire_bytes) { TRACE_EVENT0("v8.wasm", "wasm.Deserialize"); - TimedHistogramScope time_scope( - job_->isolate()->counters()->wasm_deserialization_time(), - job_->isolate()); + base::Optional<TimedHistogramScope> time_scope; + if (base::TimeTicks::IsHighResolution()) { + time_scope.emplace(job_->isolate()->counters()->wasm_deserialization_time(), + job_->isolate()); + } // DeserializeNativeModule and FinishCompile assume that they are executed in // a HandleScope, and that a context is set on the isolate. HandleScope scope(job_->isolate_); SaveAndSwitchContext saved_context(job_->isolate_, *job_->native_context_); - MaybeHandle<WasmModuleObject> result = DeserializeNativeModule( - job_->isolate_, module_bytes, wire_bytes, job_->stream_->url()); + MaybeHandle<WasmModuleObject> result = + DeserializeNativeModule(job_->isolate_, module_bytes, wire_bytes, + base::VectorOf(job_->stream_->url())); if (result.is_null()) return false; @@ -3319,8 +3322,11 @@ void CompilationStateImpl::InitializeCompilationProgressAfterDeserialization( TRACE_EVENT2("v8.wasm", "wasm.CompilationAfterDeserialization", "num_lazy_functions", lazy_functions.size(), "num_eager_functions", eager_functions.size()); - TimedHistogramScope lazy_compile_time_scope( - counters()->wasm_compile_after_deserialize()); + base::Optional<TimedHistogramScope> lazy_compile_time_scope; + if (base::TimeTicks::IsHighResolution()) { + lazy_compile_time_scope.emplace( + counters()->wasm_compile_after_deserialize()); + } auto* module = native_module_->module(); base::Optional<CodeSpaceWriteScope> lazy_code_space_write_scope; @@ -3538,11 +3544,8 @@ CompilationStateImpl::GetNextJSToWasmWrapperCompilationUnit() { return js_to_wasm_wrapper_units_[outstanding_units - 1]; } -void CompilationStateImpl::FinalizeJSToWasmWrappers( - Isolate* isolate, const WasmModule* module, - Handle<FixedArray>* export_wrappers_out) { - *export_wrappers_out = isolate->factory()->NewFixedArray( - MaxNumExportWrappers(module), AllocationType::kOld); +void CompilationStateImpl::FinalizeJSToWasmWrappers(Isolate* isolate, + const WasmModule* module) { // TODO(6792): Wrappers below are allocated with {Factory::NewCode}. As an // optimization we create a code memory modification scope that avoids // changing the page permissions back-and-forth between RWX and RX, because @@ -3550,13 +3553,17 @@ void CompilationStateImpl::FinalizeJSToWasmWrappers( TRACE_EVENT1(TRACE_DISABLED_BY_DEFAULT("v8.wasm.detailed"), "wasm.FinalizeJSToWasmWrappers", "wrappers", js_to_wasm_wrapper_units_.size()); + + isolate->heap()->EnsureWasmCanonicalRttsSize(module->MaxCanonicalTypeIndex() + + 1); CodePageCollectionMemoryModificationScope modification_scope(isolate->heap()); for (auto& unit : js_to_wasm_wrapper_units_) { DCHECK_EQ(isolate, unit->isolate()); Handle<CodeT> code = unit->Finalize(); - int wrapper_index = - GetExportWrapperIndex(module, unit->sig(), unit->is_import()); - (*export_wrappers_out)->set(wrapper_index, *code); + uint32_t index = + GetExportWrapperIndex(unit->canonical_sig_index(), unit->is_import()); + isolate->heap()->js_to_wasm_wrappers().Set(index, + MaybeObject::FromObject(*code)); RecordStats(*code, isolate->counters()); } } @@ -3753,11 +3760,14 @@ void CompilationStateImpl::PublishCompilationResults( DCHECK_LE(0, func_index); DCHECK_LT(func_index, native_module_->num_functions()); if (func_index < num_imported_functions) { - const FunctionSig* sig = - native_module_->module()->functions[func_index].sig; + const WasmFunction& function = + native_module_->module()->functions[func_index]; + uint32_t canonical_type_index = + native_module_->module() + ->isorecursive_canonical_type_ids[function.sig_index]; WasmImportWrapperCache::CacheKey key( - compiler::kDefaultImportCallKind, sig, - static_cast<int>(sig->parameter_count()), kNoSuspend); + compiler::kDefaultImportCallKind, canonical_type_index, + static_cast<int>(function.sig->parameter_count()), kNoSuspend); // If two imported functions have the same key, only one of them should // have been added as a compilation unit. So it is always the first time // we compile a wrapper for this key here. @@ -3890,8 +3900,8 @@ void CompilationStateImpl::WaitForCompilationEvent( } namespace { -using JSToWasmWrapperQueue = - WrapperQueue<JSToWasmWrapperKey, base::hash<JSToWasmWrapperKey>>; +using JSToWasmWrapperQueue = WrapperQueue<JSToWasmWrapperKey, std::nullptr_t, + base::hash<JSToWasmWrapperKey>>; using JSToWasmWrapperUnitMap = std::unordered_map<JSToWasmWrapperKey, std::unique_ptr<JSToWasmWrapperCompilationUnit>, @@ -3906,8 +3916,10 @@ class CompileJSToWasmWrapperJob final : public JobTask { outstanding_units_(queue->size()) {} void Run(JobDelegate* delegate) override { - while (base::Optional<JSToWasmWrapperKey> key = queue_->pop()) { - JSToWasmWrapperCompilationUnit* unit = (*compilation_units_)[*key].get(); + while (base::Optional<std::pair<JSToWasmWrapperKey, std::nullptr_t>> key = + queue_->pop()) { + JSToWasmWrapperCompilationUnit* unit = + (*compilation_units_)[key->first].get(); unit->Execute(); outstanding_units_.fetch_sub(1, std::memory_order_relaxed); if (delegate && delegate->ShouldYield()) return; @@ -3930,11 +3942,11 @@ class CompileJSToWasmWrapperJob final : public JobTask { }; } // namespace -void CompileJsToWasmWrappers(Isolate* isolate, const WasmModule* module, - Handle<FixedArray>* export_wrappers_out) { +void CompileJsToWasmWrappers(Isolate* isolate, const WasmModule* module) { TRACE_EVENT0("v8.wasm", "wasm.CompileJsToWasmWrappers"); - *export_wrappers_out = isolate->factory()->NewFixedArray( - MaxNumExportWrappers(module), AllocationType::kOld); + + isolate->heap()->EnsureWasmCanonicalRttsSize(module->MaxCanonicalTypeIndex() + + 1); JSToWasmWrapperQueue queue; JSToWasmWrapperUnitMap compilation_units; @@ -3943,11 +3955,24 @@ void CompileJsToWasmWrappers(Isolate* isolate, const WasmModule* module, // Prepare compilation units in the main thread. for (auto exp : module->export_table) { if (exp.kind != kExternalFunction) continue; + auto& function = module->functions[exp.index]; - JSToWasmWrapperKey key(function.imported, *function.sig); - if (queue.insert(key)) { + uint32_t canonical_type_index = + module->isorecursive_canonical_type_ids[function.sig_index]; + int wrapper_index = + GetExportWrapperIndex(canonical_type_index, function.imported); + auto existing_wrapper = + isolate->heap()->js_to_wasm_wrappers().Get(wrapper_index); + if (existing_wrapper.IsStrongOrWeak() && + !existing_wrapper.GetHeapObject().IsUndefined()) { + continue; + } + + JSToWasmWrapperKey key(function.imported, canonical_type_index); + if (queue.insert(key, nullptr)) { auto unit = std::make_unique<JSToWasmWrapperCompilationUnit>( - isolate, function.sig, module, function.imported, enabled_features, + isolate, function.sig, canonical_type_index, module, + function.imported, enabled_features, JSToWasmWrapperCompilationUnit::kAllowGeneric); compilation_units.emplace(key, std::move(unit)); } @@ -3982,8 +4007,9 @@ void CompileJsToWasmWrappers(Isolate* isolate, const WasmModule* module, JSToWasmWrapperCompilationUnit* unit = pair.second.get(); DCHECK_EQ(isolate, unit->isolate()); Handle<CodeT> code = unit->Finalize(); - int wrapper_index = GetExportWrapperIndex(module, &key.second, key.first); - (*export_wrappers_out)->set(wrapper_index, *code); + int wrapper_index = GetExportWrapperIndex(key.second, key.first); + isolate->heap()->js_to_wasm_wrappers().Set( + wrapper_index, HeapObjectReference::Strong(*code)); RecordStats(*code, isolate->counters()); } } @@ -3991,12 +4017,13 @@ void CompileJsToWasmWrappers(Isolate* isolate, const WasmModule* module, WasmCode* CompileImportWrapper( NativeModule* native_module, Counters* counters, compiler::WasmImportCallKind kind, const FunctionSig* sig, - int expected_arity, Suspend suspend, + uint32_t canonical_type_index, int expected_arity, Suspend suspend, WasmImportWrapperCache::ModificationScope* cache_scope) { // Entry should exist, so that we don't insert a new one and invalidate // other threads' iterators/references, but it should not have been compiled // yet. - WasmImportWrapperCache::CacheKey key(kind, sig, expected_arity, suspend); + WasmImportWrapperCache::CacheKey key(kind, canonical_type_index, + expected_arity, suspend); DCHECK_NULL((*cache_scope)[key]); bool source_positions = is_asmjs_module(native_module->module()); // Keep the {WasmCode} alive until we explicitly call {IncRef}. diff --git a/deps/v8/src/wasm/module-compiler.h b/deps/v8/src/wasm/module-compiler.h index 20320899eb5073..4c3b3500460a82 100644 --- a/deps/v8/src/wasm/module-compiler.h +++ b/deps/v8/src/wasm/module-compiler.h @@ -57,15 +57,13 @@ V8_EXPORT_PRIVATE std::shared_ptr<NativeModule> CompileToNativeModule( Isolate* isolate, const WasmFeatures& enabled, ErrorThrower* thrower, std::shared_ptr<const WasmModule> module, const ModuleWireBytes& wire_bytes, - Handle<FixedArray>* export_wrappers_out, int compilation_id, - v8::metrics::Recorder::ContextId context_id); + int compilation_id, v8::metrics::Recorder::ContextId context_id); void RecompileNativeModule(NativeModule* native_module, TieringState new_tiering_state); V8_EXPORT_PRIVATE -void CompileJsToWasmWrappers(Isolate* isolate, const WasmModule* module, - Handle<FixedArray>* export_wrappers_out); +void CompileJsToWasmWrappers(Isolate* isolate, const WasmModule* module); // Compiles the wrapper for this (kind, sig) pair and sets the corresponding // cache entry. Assumes the key already exists in the cache but has not been @@ -74,14 +72,13 @@ V8_EXPORT_PRIVATE WasmCode* CompileImportWrapper( NativeModule* native_module, Counters* counters, compiler::WasmImportCallKind kind, const FunctionSig* sig, - int expected_arity, Suspend suspend, + uint32_t canonical_type_index, int expected_arity, Suspend suspend, WasmImportWrapperCache::ModificationScope* cache_scope); // Triggered by the WasmCompileLazy builtin. The return value indicates whether // compilation was successful. Lazy compilation can fail only if validation is // also lazy. -bool CompileLazy(Isolate*, Handle<WasmInstanceObject>, int func_index, - NativeModule** out_native_module); +bool CompileLazy(Isolate*, Handle<WasmInstanceObject>, int func_index); // Throws the compilation error after failed lazy compilation. void ThrowLazyCompilationError(Isolate* isolate, @@ -96,14 +93,14 @@ V8_EXPORT_PRIVATE void TriggerTierUp(WasmInstanceObject instance, void TierUpNowForTesting(Isolate* isolate, WasmInstanceObject instance, int func_index); -template <typename Key, typename Hash> +template <typename Key, typename KeyInfo, typename Hash> class WrapperQueue { public: // Removes an arbitrary key from the queue and returns it. // If the queue is empty, returns nullopt. // Thread-safe. - base::Optional<Key> pop() { - base::Optional<Key> key = base::nullopt; + base::Optional<std::pair<Key, KeyInfo>> pop() { + base::Optional<std::pair<Key, KeyInfo>> key = base::nullopt; base::MutexGuard lock(&mutex_); auto it = queue_.begin(); if (it != queue_.end()) { @@ -116,7 +113,9 @@ class WrapperQueue { // Add the given key to the queue and returns true iff the insert was // successful. // Not thread-safe. - bool insert(const Key& key) { return queue_.insert(key).second; } + bool insert(const Key& key, KeyInfo key_info) { + return queue_.insert({key, key_info}).second; + } size_t size() { base::MutexGuard lock(&mutex_); @@ -125,7 +124,7 @@ class WrapperQueue { private: base::Mutex mutex_; - std::unordered_set<Key, Hash> queue_; + std::unordered_map<Key, KeyInfo, Hash> queue_; }; // Encapsulates all the state and steps of an asynchronous compilation. diff --git a/deps/v8/src/wasm/module-decoder-impl.h b/deps/v8/src/wasm/module-decoder-impl.h index 227fc327c2b235..aeebf2633e1703 100644 --- a/deps/v8/src/wasm/module-decoder-impl.h +++ b/deps/v8/src/wasm/module-decoder-impl.h @@ -471,7 +471,7 @@ class ModuleDecoderTemplate : public Decoder { void DecodeSection(SectionCode section_code, base::Vector<const uint8_t> bytes, uint32_t offset, - bool verify_functions = true) { + bool validate_functions = true) { if (failed()) return; Reset(bytes, offset); TRACE("Section: %s\n", SectionName(section_code)); @@ -507,7 +507,7 @@ class ModuleDecoderTemplate : public Decoder { DecodeStartSection(); break; case kCodeSectionCode: - DecodeCodeSection(verify_functions); + DecodeCodeSection(validate_functions); break; case kElementSectionCode: DecodeElementSection(); @@ -684,9 +684,7 @@ class ModuleDecoderTemplate : public Decoder { const FunctionSig* sig = consume_sig(module_->signature_zone.get()); if (!ok()) break; module_->add_signature(sig, kNoSuperType); - if (v8_flags.wasm_type_canonicalization) { - type_canon->AddRecursiveGroup(module_.get(), 1); - } + type_canon->AddRecursiveGroup(module_.get(), 1); break; } case kWasmArrayTypeCode: @@ -727,17 +725,13 @@ class ModuleDecoderTemplate : public Decoder { TypeDefinition type = consume_subtype_definition(); if (ok()) module_->add_type(type); } - if (ok() && v8_flags.wasm_type_canonicalization) { - type_canon->AddRecursiveGroup(module_.get(), group_size); - } + if (ok()) type_canon->AddRecursiveGroup(module_.get(), group_size); } else { tracer_.TypeOffset(pc_offset()); TypeDefinition type = consume_subtype_definition(); if (ok()) { module_->add_type(type); - if (v8_flags.wasm_type_canonicalization) { - type_canon->AddRecursiveGroup(module_.get(), 1); - } + type_canon->AddRecursiveGroup(module_.get(), 1); } } } @@ -760,7 +754,6 @@ class ModuleDecoderTemplate : public Decoder { continue; } } - module_->signature_map.Freeze(); } void DecodeImportSection() { @@ -794,7 +787,6 @@ class ModuleDecoderTemplate : public Decoder { import->index, // func_index 0, // sig_index {0, 0}, // code - 0, // feedback slots true, // imported false, // exported false}); // declared @@ -891,7 +883,6 @@ class ModuleDecoderTemplate : public Decoder { func_index, // func_index 0, // sig_index {0, 0}, // code - 0, // feedback slots false, // imported false, // exported false}); // declared @@ -1127,7 +1118,7 @@ class ModuleDecoderTemplate : public Decoder { } } - void DecodeCodeSection(bool verify_functions) { + void DecodeCodeSection(bool validate_functions) { // Make sure global offset were calculated before they get accessed during // function compilation. CalculateGlobalOffsets(module_.get()); @@ -1157,7 +1148,7 @@ class ModuleDecoderTemplate : public Decoder { uint32_t offset = pc_offset(); consume_bytes(size, "function body"); if (failed()) break; - DecodeFunctionBody(function_index, size, offset, verify_functions); + DecodeFunctionBody(function_index, size, offset, validate_functions); // Now that the function has been decoded, we can compute module offsets. for (; inst_traces_it != this->inst_traces_.end() && @@ -1201,15 +1192,15 @@ class ModuleDecoderTemplate : public Decoder { } void DecodeFunctionBody(uint32_t index, uint32_t length, uint32_t offset, - bool verify_functions) { + bool validate_functions) { WasmFunction* function = &module_->functions[index]; function->code = {offset, length}; tracer_.FunctionBody(function, pc_ - (pc_offset() - offset)); - if (verify_functions) { + if (validate_functions) { ModuleWireBytes bytes(module_start_, module_end_); - VerifyFunctionBody(module_->signature_zone->allocator(), - index + module_->num_imported_functions, bytes, - module_.get(), function); + ValidateFunctionBody(module_->signature_zone->allocator(), + index + module_->num_imported_functions, bytes, + module_.get(), function); } } @@ -1354,7 +1345,7 @@ class ModuleDecoderTemplate : public Decoder { int64_t last_func_idx = -1; for (uint32_t i = 0; i < func_count; i++) { uint32_t func_idx = inner.consume_u32v("function index"); - if (int64_t(func_idx) <= last_func_idx) { + if (int64_t{func_idx} <= last_func_idx) { inner.errorf("Invalid function index: %d", func_idx); break; } @@ -1374,7 +1365,7 @@ class ModuleDecoderTemplate : public Decoder { for (uint32_t k = 0; k < mark_size; k++) { trace_mark_id |= inner.consume_u8("trace mark id") << k * 8; } - if (int64_t(func_off) <= last_func_off) { + if (int64_t{func_off} <= last_func_off) { inner.errorf("Invalid branch offset: %d", func_off); break; } @@ -1513,7 +1504,7 @@ class ModuleDecoderTemplate : public Decoder { int64_t last_func_idx = -1; for (uint32_t i = 0; i < func_count; i++) { uint32_t func_idx = inner.consume_u32v("function index"); - if (int64_t(func_idx) <= last_func_idx) { + if (int64_t{func_idx} <= last_func_idx) { inner.errorf("Invalid function index: %d", func_idx); break; } @@ -1526,7 +1517,7 @@ class ModuleDecoderTemplate : public Decoder { int64_t last_br_off = -1; for (uint32_t j = 0; j < num_hints; ++j) { uint32_t br_off = inner.consume_u32v("branch instruction offset"); - if (int64_t(br_off) <= last_br_off) { + if (int64_t{br_off} <= last_br_off) { inner.errorf("Invalid branch offset: %d", br_off); break; } @@ -1636,7 +1627,7 @@ class ModuleDecoderTemplate : public Decoder { return true; } - ModuleResult FinishDecoding(bool verify_functions = true) { + ModuleResult FinishDecoding() { if (ok() && CheckMismatchedCounts()) { // We calculate the global offsets here, because there may not be a // global section and code section that would have triggered the @@ -1645,23 +1636,18 @@ class ModuleDecoderTemplate : public Decoder { CalculateGlobalOffsets(module_.get()); } - ModuleResult result = toResult(std::move(module_)); - if (verify_functions && result.ok() && intermediate_error_.has_error()) { - // Copy error message and location. - return ModuleResult{std::move(intermediate_error_)}; - } - return result; + return toResult(std::move(module_)); } // Decodes an entire module. ModuleResult DecodeModule(Counters* counters, AccountingAllocator* allocator, - bool verify_functions = true) { + bool validate_functions = true) { StartDecoding(counters, allocator); uint32_t offset = 0; base::Vector<const byte> orig_bytes(start(), end() - start()); DecodeModuleHeader(base::VectorOf(start(), end() - start()), offset); if (failed()) { - return FinishDecoding(verify_functions); + return FinishDecoding(); } // Size of the module header. offset += 8; @@ -1674,7 +1660,7 @@ class ModuleDecoderTemplate : public Decoder { offset += section_iter.payload_start() - section_iter.section_start(); if (section_iter.section_code() != SectionCode::kUnknownSectionCode) { DecodeSection(section_iter.section_code(), section_iter.payload(), - offset, verify_functions); + offset, validate_functions); } // Shift the offset by the remaining section payload offset += section_iter.payload_length(); @@ -1688,26 +1674,21 @@ class ModuleDecoderTemplate : public Decoder { return decoder.toResult<std::shared_ptr<WasmModule>>(nullptr); } - return FinishDecoding(verify_functions); + return FinishDecoding(); } // Decodes a single anonymous function starting at {start_}. - FunctionResult DecodeSingleFunction(Zone* zone, - const ModuleWireBytes& wire_bytes, - const WasmModule* module) { + FunctionResult DecodeSingleFunctionForTesting( + Zone* zone, const ModuleWireBytes& wire_bytes, const WasmModule* module) { pc_ = start_; expect_u8("type form", kWasmFunctionTypeCode); - if (!ok()) return FunctionResult{std::move(intermediate_error_)}; WasmFunction function; function.sig = consume_sig(zone); function.code = {off(pc_), static_cast<uint32_t>(end_ - pc_)}; + if (!ok()) return FunctionResult{std::move(error_)}; - if (ok()) - VerifyFunctionBody(zone->allocator(), 0, wire_bytes, module, &function); - - if (intermediate_error_.has_error()) { - return FunctionResult{std::move(intermediate_error_)}; - } + ValidateFunctionBody(zone->allocator(), 0, wire_bytes, module, &function); + if (!ok()) return FunctionResult{std::move(error_)}; return FunctionResult{std::make_unique<WasmFunction>(function)}; } @@ -1756,7 +1737,6 @@ class ModuleDecoderTemplate : public Decoder { kBitsPerByte * sizeof(ModuleDecoderTemplate::seen_unordered_sections_) > kLastKnownModuleSection, "not enough bits"); - WasmError intermediate_error_; ModuleOrigin origin_; AccountingAllocator allocator_; Zone init_expr_zone_{&allocator_, "constant expr. zone"}; @@ -1824,9 +1804,9 @@ class ModuleDecoderTemplate : public Decoder { } // Verifies the body (code) of a given function. - void VerifyFunctionBody(AccountingAllocator* allocator, uint32_t func_num, - const ModuleWireBytes& wire_bytes, - const WasmModule* module, WasmFunction* function) { + void ValidateFunctionBody(AccountingAllocator* allocator, uint32_t func_num, + const ModuleWireBytes& wire_bytes, + const WasmModule* module, WasmFunction* function) { if (v8_flags.trace_wasm_decoder) { WasmFunctionName func_name(function, wire_bytes.GetNameOrNull(function, module)); @@ -1838,19 +1818,19 @@ class ModuleDecoderTemplate : public Decoder { start_ + GetBufferRelativeOffset(function->code.end_offset())}; WasmFeatures unused_detected_features = WasmFeatures::None(); - DecodeResult result = VerifyWasmCode(allocator, enabled_features_, module, - &unused_detected_features, body); + DecodeResult result = wasm::ValidateFunctionBody( + allocator, enabled_features_, module, &unused_detected_features, body); // If the decode failed and this is the first error, set error code and // location. - if (result.failed() && intermediate_error_.empty()) { + if (result.failed() && error_.empty()) { // Wrap the error message from the function decoder. WasmFunctionName func_name(function, wire_bytes.GetNameOrNull(function, module)); std::ostringstream error_msg; error_msg << "in function " << func_name << ": " << result.error().message(); - intermediate_error_ = WasmError{result.error().offset(), error_msg.str()}; + error_ = WasmError{result.error().offset(), error_msg.str()}; } } @@ -2366,7 +2346,9 @@ class ModuleDecoderTemplate : public Decoder { } else { type = table_type; // Active segments with function indices must reference a function - // table. TODO(7748): Add support for anyref tables when we have them. + // table. (Using struct / array indices doesn't provide any value + // as such an index doesn't refer to a unique object instance unlike + // functions.) if (V8_UNLIKELY( !IsSubtypeOf(table_type, kWasmFuncRef, this->module_.get()))) { errorf(pos, diff --git a/deps/v8/src/wasm/module-decoder.cc b/deps/v8/src/wasm/module-decoder.cc index 6cf86cd6096772..2c70e2068dc217 100644 --- a/deps/v8/src/wasm/module-decoder.cc +++ b/deps/v8/src/wasm/module-decoder.cc @@ -88,7 +88,7 @@ class ModuleDecoderImpl : public ModuleDecoderTemplate<NoTracer> { ModuleResult DecodeWasmModule( const WasmFeatures& enabled, const byte* module_start, - const byte* module_end, bool verify_functions, ModuleOrigin origin, + const byte* module_end, bool validate_functions, ModuleOrigin origin, Counters* counters, std::shared_ptr<metrics::Recorder> metrics_recorder, v8::metrics::Recorder::ContextId context_id, DecodingMethod decoding_method, AccountingAllocator* allocator) { @@ -113,7 +113,7 @@ ModuleResult DecodeWasmModule( ? base::ThreadTicks::Now() : base::ThreadTicks(); ModuleResult result = - decoder.DecodeModule(counters, allocator, verify_functions); + decoder.DecodeModule(counters, allocator, validate_functions); // Record event metrics. metrics_event.wall_clock_duration_in_us = timer.Elapsed().InMicroseconds(); @@ -141,10 +141,10 @@ ModuleResult DecodeWasmModule( ModuleResult DecodeWasmModuleForDisassembler(const byte* module_start, const byte* module_end, AccountingAllocator* allocator) { - constexpr bool verify_functions = false; + constexpr bool validate_functions = false; ModuleDecoderImpl decoder(WasmFeatures::All(), module_start, module_end, kWasmOrigin); - return decoder.DecodeModule(nullptr, allocator, verify_functions); + return decoder.DecodeModule(nullptr, allocator, validate_functions); } ModuleDecoder::ModuleDecoder(const WasmFeatures& enabled) @@ -172,13 +172,14 @@ void ModuleDecoder::DecodeModuleHeader(base::Vector<const uint8_t> bytes, void ModuleDecoder::DecodeSection(SectionCode section_code, base::Vector<const uint8_t> bytes, - uint32_t offset, bool verify_functions) { - impl_->DecodeSection(section_code, bytes, offset, verify_functions); + uint32_t offset) { + impl_->DecodeSection(section_code, bytes, offset); } void ModuleDecoder::DecodeFunctionBody(uint32_t index, uint32_t length, - uint32_t offset, bool verify_functions) { - impl_->DecodeFunctionBody(index, length, offset, verify_functions); + uint32_t offset, + bool validate_functions) { + impl_->DecodeFunctionBody(index, length, offset, validate_functions); } void ModuleDecoder::StartCodeSection(WireBytesRef section_bytes) { @@ -190,9 +191,7 @@ bool ModuleDecoder::CheckFunctionsCount(uint32_t functions_count, return impl_->CheckFunctionsCount(functions_count, error_offset); } -ModuleResult ModuleDecoder::FinishDecoding(bool verify_functions) { - return impl_->FinishDecoding(verify_functions); -} +ModuleResult ModuleDecoder::FinishDecoding() { return impl_->FinishDecoding(); } size_t ModuleDecoder::IdentifyUnknownSection(ModuleDecoder* decoder, base::Vector<const uint8_t> bytes, @@ -237,7 +236,7 @@ FunctionResult DecodeWasmFunctionForTesting( } ModuleDecoderImpl decoder(enabled, function_start, function_end, kWasmOrigin); decoder.SetCounters(counters); - return decoder.DecodeSingleFunction(zone, wire_bytes, module); + return decoder.DecodeSingleFunctionForTesting(zone, wire_bytes, module); } AsmJsOffsetsResult DecodeAsmJsOffsets( diff --git a/deps/v8/src/wasm/module-decoder.h b/deps/v8/src/wasm/module-decoder.h index cbfca92886c4d1..c32fafa301000d 100644 --- a/deps/v8/src/wasm/module-decoder.h +++ b/deps/v8/src/wasm/module-decoder.h @@ -149,8 +149,7 @@ class ModuleDecoder { void DecodeModuleHeader(base::Vector<const uint8_t> bytes, uint32_t offset); void DecodeSection(SectionCode section_code, - base::Vector<const uint8_t> bytes, uint32_t offset, - bool verify_functions = true); + base::Vector<const uint8_t> bytes, uint32_t offset); void StartCodeSection(WireBytesRef section_bytes); @@ -159,7 +158,7 @@ class ModuleDecoder { void DecodeFunctionBody(uint32_t index, uint32_t size, uint32_t offset, bool verify_functions = true); - ModuleResult FinishDecoding(bool verify_functions = true); + ModuleResult FinishDecoding(); const std::shared_ptr<WasmModule>& shared_module() const; diff --git a/deps/v8/src/wasm/module-instantiate.cc b/deps/v8/src/wasm/module-instantiate.cc index a12cc954e1619c..2b6cd36ebb64a8 100644 --- a/deps/v8/src/wasm/module-instantiate.cc +++ b/deps/v8/src/wasm/module-instantiate.cc @@ -42,8 +42,9 @@ byte* raw_buffer_ptr(MaybeHandle<JSArrayBuffer> buffer, int offset) { return static_cast<byte*>(buffer.ToHandleChecked()->backing_store()) + offset; } -using ImportWrapperQueue = WrapperQueue<WasmImportWrapperCache::CacheKey, - WasmImportWrapperCache::CacheKeyHash>; +using ImportWrapperQueue = + WrapperQueue<WasmImportWrapperCache::CacheKey, const FunctionSig*, + WasmImportWrapperCache::CacheKeyHash>; class CompileImportWrapperJob final : public JobTask { public: @@ -66,12 +67,15 @@ class CompileImportWrapperJob final : public JobTask { void Run(JobDelegate* delegate) override { TRACE_EVENT0("v8.wasm", "wasm.CompileImportWrapperJob.Run"); - while (base::Optional<WasmImportWrapperCache::CacheKey> key = - queue_->pop()) { + while (base::Optional<std::pair<const WasmImportWrapperCache::CacheKey, + const FunctionSig*>> + key = queue_->pop()) { // TODO(wasm): Batch code publishing, to avoid repeated locking and // permission switching. - CompileImportWrapper(native_module_, counters_, key->kind, key->signature, - key->expected_arity, key->suspend, cache_scope_); + CompileImportWrapper(native_module_, counters_, key->first.kind, + key->second, key->first.canonical_type_index, + key->first.expected_arity, key->first.suspend, + cache_scope_); if (delegate->ShouldYield()) return; } } @@ -184,17 +188,15 @@ void CreateMapForType(Isolate* isolate, const WasmModule* module, uint32_t canonical_type_index = module->isorecursive_canonical_type_ids[type_index]; - if (v8_flags.wasm_type_canonicalization) { - // Try to find the canonical map for this type in the isolate store. - canonical_rtts = handle(isolate->heap()->wasm_canonical_rtts(), isolate); - DCHECK_GT(static_cast<uint32_t>(canonical_rtts->length()), - canonical_type_index); - MaybeObject maybe_canonical_map = canonical_rtts->Get(canonical_type_index); - if (maybe_canonical_map.IsStrongOrWeak() && - maybe_canonical_map.GetHeapObject().IsMap()) { - maps->set(type_index, maybe_canonical_map.GetHeapObject()); - return; - } + // Try to find the canonical map for this type in the isolate store. + canonical_rtts = handle(isolate->heap()->wasm_canonical_rtts(), isolate); + DCHECK_GT(static_cast<uint32_t>(canonical_rtts->length()), + canonical_type_index); + MaybeObject maybe_canonical_map = canonical_rtts->Get(canonical_type_index); + if (maybe_canonical_map.IsStrongOrWeak() && + maybe_canonical_map.GetHeapObject().IsMap()) { + maps->set(type_index, maybe_canonical_map.GetHeapObject()); + return; } Handle<Map> rtt_parent; @@ -220,9 +222,7 @@ void CreateMapForType(Isolate* isolate, const WasmModule* module, map = CreateFuncRefMap(isolate, module, rtt_parent, instance); break; } - if (v8_flags.wasm_type_canonicalization) { - canonical_rtts->Set(canonical_type_index, HeapObjectReference::Weak(*map)); - } + canonical_rtts->Set(canonical_type_index, HeapObjectReference::Weak(*map)); maps->set(type_index, *map); } @@ -521,12 +521,12 @@ MaybeHandle<WasmInstanceObject> InstanceBuilder::Build() { // From here on, we expect the build pipeline to run without exiting to JS. DisallowJavascriptExecution no_js(isolate_); - // Record build time into correct bucket, then build instance. - TimedHistogramScope wasm_instantiate_module_time_scope(SELECT_WASM_COUNTER( - isolate_->counters(), module_->origin, wasm_instantiate, module_time)); - v8::metrics::WasmModuleInstantiated wasm_module_instantiated; + // Start a timer for instantiation time, if we have a high resolution timer. base::ElapsedTimer timer; - timer.Start(); + if (base::TimeTicks::IsHighResolution()) { + timer.Start(); + } + v8::metrics::WasmModuleInstantiated wasm_module_instantiated; NativeModule* native_module = module_object_->native_module(); //-------------------------------------------------------------------------- @@ -657,10 +657,8 @@ MaybeHandle<WasmInstanceObject> InstanceBuilder::Build() { //-------------------------------------------------------------------------- // Set up table storage space. //-------------------------------------------------------------------------- - if (v8_flags.wasm_type_canonicalization) { - instance->set_isorecursive_canonical_types( - module_->isorecursive_canonical_type_ids.data()); - } + instance->set_isorecursive_canonical_types( + module_->isorecursive_canonical_type_ids.data()); int table_count = static_cast<int>(module_->tables.size()); { for (int i = 0; i < table_count; i++) { @@ -719,15 +717,11 @@ MaybeHandle<WasmInstanceObject> InstanceBuilder::Build() { // list. //-------------------------------------------------------------------------- if (enabled_.has_gc()) { - if (v8_flags.wasm_type_canonicalization && - module_->isorecursive_canonical_type_ids.size() > 0) { - uint32_t maximum_canonical_type_index = - *std::max_element(module_->isorecursive_canonical_type_ids.begin(), - module_->isorecursive_canonical_type_ids.end()); + if (module_->isorecursive_canonical_type_ids.size() > 0) { // Make sure all canonical indices have been set. - DCHECK_NE(maximum_canonical_type_index, kNoSuperType); + DCHECK_NE(module_->MaxCanonicalTypeIndex(), kNoSuperType); isolate_->heap()->EnsureWasmCanonicalRttsSize( - maximum_canonical_type_index + 1); + module_->MaxCanonicalTypeIndex() + 1); } Handle<FixedArray> maps = isolate_->factory()->NewFixedArray( static_cast<int>(module_->types.size())); @@ -747,8 +741,7 @@ MaybeHandle<WasmInstanceObject> InstanceBuilder::Build() { instance->set_feedback_vectors(*vectors); for (int i = 0; i < num_functions; i++) { int func_index = module_->num_imported_functions + i; - int slots = - base::Relaxed_Load(&module_->functions[func_index].feedback_slots); + int slots = NumFeedbackSlots(module_, func_index); if (slots == 0) continue; if (v8_flags.trace_wasm_speculative_inlining) { PrintF("[Function %d (declared %d): allocating %d feedback slots]\n", @@ -828,9 +821,13 @@ MaybeHandle<WasmInstanceObject> InstanceBuilder::Build() { if (module_->start_function_index >= 0) { int start_index = module_->start_function_index; auto& function = module_->functions[start_index]; + uint32_t canonical_sig_index = + module_->isorecursive_canonical_type_ids[module_->functions[start_index] + .sig_index]; Handle<CodeT> wrapper_code = JSToWasmWrapperCompilationUnit::CompileJSToWasmWrapper( - isolate_, function.sig, module_, function.imported); + isolate_, function.sig, canonical_sig_index, module_, + function.imported); // TODO(clemensb): Don't generate an exported function for the start // function. Use CWasmEntry instead. start_function_ = WasmExportedFunction::New( @@ -858,11 +855,16 @@ MaybeHandle<WasmInstanceObject> InstanceBuilder::Build() { TRACE("Successfully built instance for module %p\n", module_object_->native_module()); wasm_module_instantiated.success = true; - wasm_module_instantiated.wall_clock_duration_in_us = - timer.Elapsed().InMicroseconds(); - timer.Stop(); - isolate_->metrics_recorder()->DelayMainThreadEvent(wasm_module_instantiated, - context_id_); + if (timer.IsStarted()) { + base::TimeDelta instantiation_time = timer.Elapsed(); + wasm_module_instantiated.wall_clock_duration_in_us = + instantiation_time.InMicroseconds(); + SELECT_WASM_COUNTER(isolate_->counters(), module_->origin, wasm_instantiate, + module_time) + ->AddTimedSample(instantiation_time); + isolate_->metrics_recorder()->DelayMainThreadEvent(wasm_module_instantiated, + context_id_); + } return instance; } @@ -986,6 +988,7 @@ MaybeHandle<Object> InstanceBuilder::LookupImportAsm( case LookupIterator::INTEGER_INDEXED_EXOTIC: case LookupIterator::INTERCEPTOR: case LookupIterator::JSPROXY: + case LookupIterator::WASM_OBJECT: case LookupIterator::ACCESSOR: case LookupIterator::TRANSITION: return ReportLinkError("not a data property", index, import_name); @@ -1158,15 +1161,18 @@ bool InstanceBuilder::ProcessImportedFunction( WasmImportWrapperCache* cache = native_module->import_wrapper_cache(); // TODO(jkummerow): Consider precompiling CapiCallWrappers in parallel, // just like other import wrappers. - WasmCode* wasm_code = - cache->MaybeGet(kind, expected_sig, expected_arity, kNoSuspend); + uint32_t canonical_type_index = + module_->isorecursive_canonical_type_ids + [module_->functions[func_index].sig_index]; + WasmCode* wasm_code = cache->MaybeGet(kind, canonical_type_index, + expected_arity, kNoSuspend); if (wasm_code == nullptr) { WasmCodeRefScope code_ref_scope; WasmImportWrapperCache::ModificationScope cache_scope(cache); wasm_code = compiler::CompileWasmCapiCallWrapper(native_module, expected_sig); - WasmImportWrapperCache::CacheKey key(kind, expected_sig, expected_arity, - kNoSuspend); + WasmImportWrapperCache::CacheKey key(kind, canonical_type_index, + expected_arity, kNoSuspend); cache_scope[key] = wasm_code; wasm_code->IncRef(); isolate_->counters()->wasm_generated_code_size()->Increment( @@ -1203,8 +1209,11 @@ bool InstanceBuilder::ProcessImportedFunction( } NativeModule* native_module = instance->module_object().native_module(); + uint32_t canonical_type_index = + module_->isorecursive_canonical_type_ids + [module_->functions[func_index].sig_index]; WasmCode* wasm_code = native_module->import_wrapper_cache()->Get( - kind, expected_sig, expected_arity, resolved.suspend); + kind, canonical_type_index, expected_arity, resolved.suspend); DCHECK_NOT_NULL(wasm_code); ImportedFunctionEntry entry(instance, func_index); if (wasm_code->kind() == WasmCode::kWasmToJsWrapper) { @@ -1258,15 +1267,9 @@ bool InstanceBuilder::InitializeImportedIndirectFunctionTable( const WasmModule* target_module = target_instance->module_object().module(); const WasmFunction& function = target_module->functions[function_index]; - // Look up the signature's canonical id. In the case of - // !v8_flags.wasm_type_canonicalization, if there is no canonical id, then - // the signature does not appear at all in this module, so putting {-1} in - // the table will cause checks to always fail. FunctionTargetAndRef entry(target_instance, function_index); uint32_t canonicalized_sig_index = - v8_flags.wasm_type_canonicalization - ? target_module->isorecursive_canonical_type_ids[function.sig_index] - : module_->signature_map.Find(*function.sig); + target_module->isorecursive_canonical_type_ids[function.sig_index]; instance->GetIndirectFunctionTable(isolate_, table_index) ->Set(i, canonicalized_sig_index, entry.call_target(), *entry.ref()); } @@ -1614,14 +1617,16 @@ void InstanceBuilder::CompileImportWrappers( expected_arity = shared.internal_formal_parameter_count_without_receiver(); } - - WasmImportWrapperCache::CacheKey key(kind, sig, expected_arity, - resolved.suspend); + uint32_t canonical_type_index = + module_->isorecursive_canonical_type_ids[module_->functions[func_index] + .sig_index]; + WasmImportWrapperCache::CacheKey key(kind, canonical_type_index, + expected_arity, resolved.suspend); if (cache_scope[key] != nullptr) { // Cache entry already exists, no need to compile it again. continue; } - import_wrapper_queue.insert(key); + import_wrapper_queue.insert(key, sig); } auto compile_job_task = std::make_unique<CompileImportWrapperJob>( diff --git a/deps/v8/src/wasm/pgo.cc b/deps/v8/src/wasm/pgo.cc new file mode 100644 index 00000000000000..8d9069bee25bd2 --- /dev/null +++ b/deps/v8/src/wasm/pgo.cc @@ -0,0 +1,186 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "src/wasm/pgo.h" + +#include "src/wasm/decoder.h" +#include "src/wasm/wasm-module-builder.h" // For {ZoneBuffer}. + +namespace v8::internal::wasm { + +class ProfileGenerator { + public: + ProfileGenerator(const WasmModule* module) + : module_(module), + type_feedback_mutex_guard_(&module->type_feedback.mutex) {} + + base::OwnedVector<uint8_t> GetProfileData() { + ZoneBuffer buffer{&zone_}; + + SerializeTypeFeedback(buffer); + // TODO(13209): Serialize tiering information. + + return base::OwnedVector<uint8_t>::Of(buffer); + } + + private: + void SerializeTypeFeedback(ZoneBuffer& buffer) { + std::unordered_map<uint32_t, FunctionTypeFeedback>& feedback_for_function = + module_->type_feedback.feedback_for_function; + + // Get an ordered list of function indexes, so we generate deterministic + // data. + std::vector<uint32_t> ordered_function_indexes; + ordered_function_indexes.reserve(feedback_for_function.size()); + for (const auto& entry : feedback_for_function) { + // Skip functions for which we have to feedback. + if (entry.second.feedback_vector.empty()) continue; + ordered_function_indexes.push_back(entry.first); + } + std::sort(ordered_function_indexes.begin(), ordered_function_indexes.end()); + + buffer.write_u32v(static_cast<uint32_t>(ordered_function_indexes.size())); + for (const uint32_t func_index : ordered_function_indexes) { + buffer.write_u32v(func_index); + // Serialize {feedback_vector}. + const FunctionTypeFeedback& feedback = + feedback_for_function.at(func_index); + buffer.write_u32v(static_cast<uint32_t>(feedback.feedback_vector.size())); + for (const CallSiteFeedback& call_site_feedback : + feedback.feedback_vector) { + int cases = call_site_feedback.num_cases(); + buffer.write_i32v(cases); + for (int i = 0; i < cases; ++i) { + buffer.write_i32v(call_site_feedback.function_index(i)); + buffer.write_i32v(call_site_feedback.call_count(i)); + } + } + // Serialize {call_targets}. + buffer.write_u32v(static_cast<uint32_t>(feedback.call_targets.size())); + for (uint32_t call_target : feedback.call_targets) { + buffer.write_u32v(call_target); + } + } + } + + private: + const WasmModule* module_; + AccountingAllocator allocator_; + Zone zone_{&allocator_, "wasm::ProfileGenerator"}; + base::MutexGuard type_feedback_mutex_guard_; +}; + +void DeserializeTypeFeedback(Decoder& decoder, WasmModule* module) { + std::unordered_map<uint32_t, FunctionTypeFeedback>& feedback_for_function = + module->type_feedback.feedback_for_function; + uint32_t num_entries = decoder.consume_u32v("num function entries"); + CHECK_LE(num_entries, module->num_declared_functions); + for (uint32_t missing_entries = num_entries; missing_entries > 0; + --missing_entries) { + uint32_t function_index = decoder.consume_u32v("function index"); + CHECK(!feedback_for_function.count(function_index)); + FunctionTypeFeedback& feedback = feedback_for_function[function_index]; + // Deserialize {feedback_vector}. + uint32_t feedback_vector_size = + decoder.consume_u32v("feedback vector size"); + feedback.feedback_vector.resize(feedback_vector_size); + for (CallSiteFeedback& feedback : feedback.feedback_vector) { + int num_cases = decoder.consume_i32v("num cases"); + if (num_cases == 0) continue; // no feedback + if (num_cases == 1) { // monomorphic + int called_function_index = decoder.consume_i32v("function index"); + int call_count = decoder.consume_i32v("call count"); + feedback = CallSiteFeedback{called_function_index, call_count}; + } else { // polymorphic + auto* polymorphic = new CallSiteFeedback::PolymorphicCase[num_cases]; + for (int i = 0; i < num_cases; ++i) { + polymorphic[i].function_index = + decoder.consume_i32v("function index"); + polymorphic[i].absolute_call_frequency = + decoder.consume_i32v("call count"); + } + feedback = CallSiteFeedback{polymorphic, num_cases}; + } + } + // Deserialize {call_targets}. + uint32_t num_call_targets = decoder.consume_u32v("num call targets"); + feedback.call_targets = + base::OwnedVector<uint32_t>::NewForOverwrite(num_call_targets); + for (uint32_t& call_target : feedback.call_targets) { + call_target = decoder.consume_u32v("call target"); + } + } +} + +void RestoreProfileData(WasmModule* module, + base::Vector<uint8_t> profile_data) { + Decoder decoder{profile_data.begin(), profile_data.end()}; + + DeserializeTypeFeedback(decoder, module); + + CHECK(decoder.ok()); + CHECK_EQ(decoder.pc(), decoder.end()); +} + +void DumpProfileToFile(const WasmModule* module, + base::Vector<const uint8_t> wire_bytes) { + CHECK(!wire_bytes.empty()); + // File are named `profile-wasm-<hash>`. + // We use the same hash as for reported scripts, to make it easier to + // correlate files to wasm modules (see {CreateWasmScript}). + uint32_t hash = static_cast<uint32_t>(GetWireBytesHash(wire_bytes)); + base::EmbeddedVector<char, 32> filename; + SNPrintF(filename, "profile-wasm-%08x", hash); + + ProfileGenerator profile_generator{module}; + base::OwnedVector<uint8_t> profile_data = profile_generator.GetProfileData(); + + PrintF("Dumping Wasm PGO data to file '%s' (%zu bytes)\n", filename.begin(), + profile_data.size()); + if (FILE* file = base::OS::FOpen(filename.begin(), "wb")) { + size_t written = fwrite(profile_data.begin(), 1, profile_data.size(), file); + CHECK_EQ(profile_data.size(), written); + base::Fclose(file); + } +} + +void LoadProfileFromFile(WasmModule* module, + base::Vector<const uint8_t> wire_bytes) { + CHECK(!wire_bytes.empty()); + // File are named `profile-wasm-<hash>`. + // We use the same hash as for reported scripts, to make it easier to + // correlate files to wasm modules (see {CreateWasmScript}). + uint32_t hash = static_cast<uint32_t>(GetWireBytesHash(wire_bytes)); + base::EmbeddedVector<char, 32> filename; + SNPrintF(filename, "profile-wasm-%08x", hash); + + FILE* file = base::OS::FOpen(filename.begin(), "rb"); + if (!file) { + PrintF("No Wasm PGO data found: Cannot open file '%s'\n", filename.begin()); + return; + } + + fseek(file, 0, SEEK_END); + size_t size = ftell(file); + rewind(file); + + PrintF("Loading Wasm PGO data from file '%s' (%zu bytes)\n", filename.begin(), + size); + base::OwnedVector<uint8_t> profile_data = + base::OwnedVector<uint8_t>::NewForOverwrite(size); + for (size_t read = 0; read < size;) { + read += fread(profile_data.begin() + read, 1, size - read, file); + CHECK(!ferror(file)); + } + + base::Fclose(file); + + RestoreProfileData(module, profile_data.as_vector()); + + // Check that the generated profile is deterministic. + DCHECK_EQ(profile_data.as_vector(), + ProfileGenerator{module}.GetProfileData().as_vector()); +} + +} // namespace v8::internal::wasm diff --git a/deps/v8/src/wasm/pgo.h b/deps/v8/src/wasm/pgo.h new file mode 100644 index 00000000000000..31a2269896e834 --- /dev/null +++ b/deps/v8/src/wasm/pgo.h @@ -0,0 +1,26 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#if !V8_ENABLE_WEBASSEMBLY +#error This header should only be included if WebAssembly is enabled. +#endif // !V8_ENABLE_WEBASSEMBLY + +#ifndef V8_WASM_PGO_H_ +#define V8_WASM_PGO_H_ + +#include "src/base/vector.h" + +namespace v8::internal::wasm { + +struct WasmModule; + +void DumpProfileToFile(const WasmModule* module, + base::Vector<const uint8_t> wire_bytes); + +void LoadProfileFromFile(WasmModule* module, + base::Vector<const uint8_t> wire_bytes); + +} // namespace v8::internal::wasm + +#endif // V8_WASM_PGO_H_ diff --git a/deps/v8/src/wasm/signature-map.cc b/deps/v8/src/wasm/signature-map.cc deleted file mode 100644 index 5d449a9ee70c2b..00000000000000 --- a/deps/v8/src/wasm/signature-map.cc +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright 2016 the V8 project authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#include "src/wasm/signature-map.h" - -#include "src/codegen/signature.h" - -namespace v8 { -namespace internal { -namespace wasm { - -uint32_t SignatureMap::FindOrInsert(const FunctionSig& sig) { - CHECK(!frozen_); - auto pos = map_.find(sig); - if (pos != map_.end()) return pos->second; - // Indexes are returned as int32_t, thus check against their limit. - CHECK_GE(kMaxInt, map_.size()); - uint32_t index = static_cast<uint32_t>(map_.size()); - map_.insert(std::make_pair(sig, index)); - return index; -} - -int32_t SignatureMap::Find(const FunctionSig& sig) const { - auto pos = map_.find(sig); - if (pos == map_.end()) return -1; - return static_cast<int32_t>(pos->second); -} - -} // namespace wasm -} // namespace internal -} // namespace v8 diff --git a/deps/v8/src/wasm/signature-map.h b/deps/v8/src/wasm/signature-map.h deleted file mode 100644 index 3b4b7c96202fc9..00000000000000 --- a/deps/v8/src/wasm/signature-map.h +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright 2016 the V8 project authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#if !V8_ENABLE_WEBASSEMBLY -#error This header should only be included if WebAssembly is enabled. -#endif // !V8_ENABLE_WEBASSEMBLY - -#ifndef V8_WASM_SIGNATURE_MAP_H_ -#define V8_WASM_SIGNATURE_MAP_H_ - -#include <unordered_map> - -#include "src/base/functional.h" -#include "src/codegen/signature.h" -#include "src/wasm/value-type.h" - -namespace v8 { -namespace internal { - -namespace wasm { - -// A signature map canonicalizes signatures into a range of indices so that -// two different {FunctionSig} instances with the same contents map to the -// same index. -class V8_EXPORT_PRIVATE SignatureMap { - public: - // Allow default construction and move construction (because we have vectors - // of objects containing SignatureMaps), but disallow copy or assign. It's - // too easy to get security bugs by accidentally updating a copy of the map. - MOVE_ONLY_WITH_DEFAULT_CONSTRUCTORS(SignatureMap); - - // Gets the index for a signature, assigning a new index if necessary. - uint32_t FindOrInsert(const FunctionSig& sig); - - // Gets the index for a signature, returning {-1} if not found. - int32_t Find(const FunctionSig& sig) const; - - // Disallows further insertions to this signature map. - void Freeze() { frozen_ = true; } - - size_t size() const { return map_.size(); } - - bool is_frozen() const { return frozen_; } - - private: - bool frozen_ = false; - std::unordered_map<FunctionSig, uint32_t, base::hash<FunctionSig>> map_; -}; - -} // namespace wasm -} // namespace internal -} // namespace v8 - -#endif // V8_WASM_SIGNATURE_MAP_H_ diff --git a/deps/v8/src/wasm/streaming-decoder.h b/deps/v8/src/wasm/streaming-decoder.h index 15a05024a2adec..229219ec226b67 100644 --- a/deps/v8/src/wasm/streaming-decoder.h +++ b/deps/v8/src/wasm/streaming-decoder.h @@ -104,10 +104,11 @@ class V8_EXPORT_PRIVATE StreamingDecoder { virtual void NotifyNativeModuleCreated( const std::shared_ptr<NativeModule>& native_module) = 0; - base::Vector<const char> url() { return base::VectorOf(url_); } + const std::string& url() const { return *url_; } + std::shared_ptr<const std::string> shared_url() const { return url_; } void SetUrl(base::Vector<const char> url) { - url_.assign(url.begin(), url.length()); + url_->assign(url.begin(), url.size()); } static std::unique_ptr<StreamingDecoder> CreateAsyncStreamingDecoder( @@ -121,7 +122,7 @@ class V8_EXPORT_PRIVATE StreamingDecoder { protected: bool deserializing() const { return !compiled_module_bytes_.empty(); } - std::string url_; + const std::shared_ptr<std::string> url_ = std::make_shared<std::string>(); MoreFunctionsCanBeSerializedCallback more_functions_can_be_serialized_callback_; // The content of `compiled_module_bytes_` shouldn't be used until diff --git a/deps/v8/src/wasm/sync-streaming-decoder.cc b/deps/v8/src/wasm/sync-streaming-decoder.cc index ffed424c1a6332..ad0ecbdd7ddf78 100644 --- a/deps/v8/src/wasm/sync-streaming-decoder.cc +++ b/deps/v8/src/wasm/sync-streaming-decoder.cc @@ -49,7 +49,8 @@ class V8_EXPORT_PRIVATE SyncStreamingDecoder : public StreamingDecoder { MaybeHandle<WasmModuleObject> module_object = DeserializeNativeModule( isolate_, compiled_module_bytes_, - base::Vector<const uint8_t>(bytes.get(), buffer_size_), url()); + base::Vector<const uint8_t>(bytes.get(), buffer_size_), + base::VectorOf(url())); if (!module_object.is_null()) { Handle<WasmModuleObject> module = module_object.ToHandleChecked(); diff --git a/deps/v8/src/wasm/value-type.h b/deps/v8/src/wasm/value-type.h index fed87cfb99da49..da001f037a6c1d 100644 --- a/deps/v8/src/wasm/value-type.h +++ b/deps/v8/src/wasm/value-type.h @@ -666,6 +666,7 @@ class ValueType { uint32_t bit_field_; }; +ASSERT_TRIVIALLY_COPYABLE(ValueType); inline constexpr intptr_t ValueType::kBitFieldOffset = offsetof(ValueType, bit_field_); diff --git a/deps/v8/src/wasm/wasm-code-manager.cc b/deps/v8/src/wasm/wasm-code-manager.cc index 51874973e1dab4..6f9bd745a93d81 100644 --- a/deps/v8/src/wasm/wasm-code-manager.cc +++ b/deps/v8/src/wasm/wasm-code-manager.cc @@ -34,6 +34,7 @@ #include "src/wasm/jump-table-assembler.h" #include "src/wasm/module-compiler.h" #include "src/wasm/names-provider.h" +#include "src/wasm/pgo.h" #include "src/wasm/wasm-debug.h" #include "src/wasm/wasm-engine.h" #include "src/wasm/wasm-import-wrapper-cache.h" @@ -151,7 +152,7 @@ base::AddressRegion DisjointAllocationPool::AllocateInRegion( } Address WasmCode::constant_pool() const { - if (v8_flags.enable_embedded_constant_pool) { + if (V8_EMBEDDED_CONSTANT_POOL_BOOL) { if (constant_pool_offset_ < code_comments_offset_) { return instruction_start() + constant_pool_offset_; } @@ -2335,16 +2336,6 @@ std::vector<std::unique_ptr<WasmCode>> NativeModule::AddCompiledCode( for (auto& result : results) { DCHECK(result.succeeded()); total_code_space += RoundUp<kCodeAlignment>(result.code_desc.instr_size); - if (result.result_tier == ExecutionTier::kLiftoff) { - int index = result.func_index; - int* slots = &module()->functions[index].feedback_slots; -#if DEBUG - int current_value = base::Relaxed_Load(slots); - DCHECK(current_value == 0 || - current_value == result.feedback_vector_slots); -#endif - base::Relaxed_Store(slots, result.feedback_vector_slots); - } } base::Vector<byte> code_space; NativeModule::JumpTablesRef jump_tables; diff --git a/deps/v8/src/wasm/wasm-debug.cc b/deps/v8/src/wasm/wasm-debug.cc index 802c7fa9402c0c..d64a9c7032647b 100644 --- a/deps/v8/src/wasm/wasm-debug.cc +++ b/deps/v8/src/wasm/wasm-debug.cc @@ -783,13 +783,13 @@ int FindNextBreakablePosition(wasm::NativeModule* native_module, int func_index, int offset_in_func) { AccountingAllocator alloc; Zone tmp(&alloc, ZONE_NAME); - wasm::BodyLocalDecls locals(&tmp); + wasm::BodyLocalDecls locals; const byte* module_start = native_module->wire_bytes().begin(); const wasm::WasmFunction& func = native_module->module()->functions[func_index]; wasm::BytecodeIterator iterator(module_start + func.code.offset(), module_start + func.code.end_offset(), - &locals); + &locals, &tmp); DCHECK_LT(0, locals.encoded_size); if (offset_in_func < 0) return 0; for (; iterator.has_next(); iterator.next()) { @@ -1099,10 +1099,10 @@ bool WasmScript::GetPossibleBreakpoints( const wasm::WasmFunction& func = functions[func_idx]; if (func.code.length() == 0) continue; - wasm::BodyLocalDecls locals(&tmp); + wasm::BodyLocalDecls locals; wasm::BytecodeIterator iterator(module_start + func.code.offset(), module_start + func.code.end_offset(), - &locals); + &locals, &tmp); DCHECK_LT(0u, locals.encoded_size); for (; iterator.has_next(); iterator.next()) { uint32_t total_offset = func.code.offset() + iterator.pc_offset(); diff --git a/deps/v8/src/wasm/wasm-disassembler.cc b/deps/v8/src/wasm/wasm-disassembler.cc index d43b911e48b96d..3504b6d998e37d 100644 --- a/deps/v8/src/wasm/wasm-disassembler.cc +++ b/deps/v8/src/wasm/wasm-disassembler.cc @@ -168,7 +168,6 @@ void FunctionBodyDisassembler::DecodeAsWat(MultiLineStringBuilder& out, // Decode and print locals. uint32_t locals_length; - InitializeLocalsFromSig(); DecodeLocals(pc_, &locals_length); if (failed()) { // TODO(jkummerow): Improve error handling. diff --git a/deps/v8/src/wasm/wasm-engine.cc b/deps/v8/src/wasm/wasm-engine.cc index ddb45653511c08..7fc49ed058b77b 100644 --- a/deps/v8/src/wasm/wasm-engine.cc +++ b/deps/v8/src/wasm/wasm-engine.cc @@ -23,6 +23,7 @@ #include "src/wasm/module-compiler.h" #include "src/wasm/module-decoder.h" #include "src/wasm/module-instantiate.h" +#include "src/wasm/pgo.h" #include "src/wasm/stacks.h" #include "src/wasm/streaming-decoder.h" #include "src/wasm/wasm-debug.h" @@ -510,14 +511,12 @@ MaybeHandle<AsmWasmData> WasmEngine::SyncCompileTranslatedAsmJs( // Transfer ownership of the WasmModule to the {Managed<WasmModule>} generated // in {CompileToNativeModule}. - Handle<FixedArray> export_wrappers; std::shared_ptr<NativeModule> native_module = CompileToNativeModule( isolate, WasmFeatures::ForAsmjs(), thrower, std::move(result).value(), - bytes, &export_wrappers, compilation_id, context_id); + bytes, compilation_id, context_id); if (!native_module) return {}; - return AsmWasmData::New(isolate, std::move(native_module), export_wrappers, - uses_bitset); + return AsmWasmData::New(isolate, std::move(native_module), uses_bitset); } Handle<WasmModuleObject> WasmEngine::FinalizeTranslatedAsmJs( @@ -525,10 +524,8 @@ Handle<WasmModuleObject> WasmEngine::FinalizeTranslatedAsmJs( Handle<Script> script) { std::shared_ptr<NativeModule> native_module = asm_wasm_data->managed_native_module().get(); - Handle<FixedArray> export_wrappers = - handle(asm_wasm_data->export_wrappers(), isolate); - Handle<WasmModuleObject> module_object = WasmModuleObject::New( - isolate, std::move(native_module), script, export_wrappers); + Handle<WasmModuleObject> module_object = + WasmModuleObject::New(isolate, std::move(native_module), script); return module_object; } @@ -559,10 +556,9 @@ MaybeHandle<WasmModuleObject> WasmEngine::SyncCompile( // Transfer ownership of the WasmModule to the {Managed<WasmModule>} generated // in {CompileToNativeModule}. - Handle<FixedArray> export_wrappers; std::shared_ptr<NativeModule> native_module = CompileToNativeModule(isolate, enabled, thrower, std::move(module), bytes, - &export_wrappers, compilation_id, context_id); + compilation_id, context_id); if (!native_module) return {}; #ifdef DEBUG @@ -586,8 +582,8 @@ MaybeHandle<WasmModuleObject> WasmEngine::SyncCompile( // and information needed at instantiation time. This object needs to be // serializable. Instantiation may occur off a deserialized version of this // object. - Handle<WasmModuleObject> module_object = WasmModuleObject::New( - isolate, std::move(native_module), script, export_wrappers); + Handle<WasmModuleObject> module_object = + WasmModuleObject::New(isolate, std::move(native_module), script); // Finish the Wasm script now and make it public to the debugger. isolate->debug()->OnAfterCompile(script); @@ -870,10 +866,8 @@ Handle<WasmModuleObject> WasmEngine::ImportNativeModule( ModuleWireBytes wire_bytes(native_module->wire_bytes()); Handle<Script> script = GetOrCreateScript(isolate, shared_native_module, source_url); - Handle<FixedArray> export_wrappers; - CompileJsToWasmWrappers(isolate, native_module->module(), &export_wrappers); - Handle<WasmModuleObject> module_object = WasmModuleObject::New( - isolate, std::move(shared_native_module), script, export_wrappers); + Handle<WasmModuleObject> module_object = + WasmModuleObject::New(isolate, std::move(shared_native_module), script); { base::MutexGuard lock(&mutex_); DCHECK_EQ(1, isolates_.count(isolate)); diff --git a/deps/v8/src/wasm/wasm-import-wrapper-cache.cc b/deps/v8/src/wasm/wasm-import-wrapper-cache.cc index 7c6ca19f08f8d8..bbe3a480e17fb8 100644 --- a/deps/v8/src/wasm/wasm-import-wrapper-cache.cc +++ b/deps/v8/src/wasm/wasm-import-wrapper-cache.cc @@ -23,23 +23,25 @@ WasmCode*& WasmImportWrapperCache::operator[]( } WasmCode* WasmImportWrapperCache::Get(compiler::WasmImportCallKind kind, - const FunctionSig* sig, + uint32_t canonical_type_index, int expected_arity, Suspend suspend) const { base::MutexGuard lock(&mutex_); - auto it = entry_map_.find({kind, sig, expected_arity, suspend}); + auto it = + entry_map_.find({kind, canonical_type_index, expected_arity, suspend}); DCHECK(it != entry_map_.end()); return it->second; } WasmCode* WasmImportWrapperCache::MaybeGet(compiler::WasmImportCallKind kind, - const FunctionSig* sig, + uint32_t canonical_type_index, int expected_arity, Suspend suspend) const { base::MutexGuard lock(&mutex_); - auto it = entry_map_.find({kind, sig, expected_arity, suspend}); + auto it = + entry_map_.find({kind, canonical_type_index, expected_arity, suspend}); if (it == entry_map_.end()) return nullptr; return it->second; } diff --git a/deps/v8/src/wasm/wasm-import-wrapper-cache.h b/deps/v8/src/wasm/wasm-import-wrapper-cache.h index f12b07477fe16f..e07431dd94fa7f 100644 --- a/deps/v8/src/wasm/wasm-import-wrapper-cache.h +++ b/deps/v8/src/wasm/wasm-import-wrapper-cache.h @@ -28,22 +28,23 @@ using FunctionSig = Signature<ValueType>; class WasmImportWrapperCache { public: struct CacheKey { - CacheKey(const compiler::WasmImportCallKind& _kind, const FunctionSig* _sig, - int _expected_arity, Suspend _suspend) - : kind(_kind), - signature(_sig), - expected_arity(_expected_arity == kDontAdaptArgumentsSentinel + CacheKey(const compiler::WasmImportCallKind& kind, + uint32_t canonical_type_index, int expected_arity, Suspend suspend) + : kind(kind), + canonical_type_index(canonical_type_index), + expected_arity(expected_arity == kDontAdaptArgumentsSentinel ? 0 - : _expected_arity), - suspend(_suspend) {} + : expected_arity), + suspend(suspend) {} bool operator==(const CacheKey& rhs) const { - return kind == rhs.kind && signature == rhs.signature && + return kind == rhs.kind && + canonical_type_index == rhs.canonical_type_index && expected_arity == rhs.expected_arity && suspend == rhs.suspend; } compiler::WasmImportCallKind kind; - const FunctionSig* signature; + uint32_t canonical_type_index; int expected_arity; Suspend suspend; }; @@ -51,8 +52,8 @@ class WasmImportWrapperCache { class CacheKeyHash { public: size_t operator()(const CacheKey& key) const { - return base::hash_combine(static_cast<uint8_t>(key.kind), key.signature, - key.expected_arity); + return base::hash_combine(static_cast<uint8_t>(key.kind), + key.canonical_type_index, key.expected_arity); } }; @@ -75,11 +76,12 @@ class WasmImportWrapperCache { // Thread-safe. Assumes the key exists in the map. V8_EXPORT_PRIVATE WasmCode* Get(compiler::WasmImportCallKind kind, - const FunctionSig* sig, int expected_arity, - Suspend suspend) const; + uint32_t canonical_type_index, + int expected_arity, Suspend suspend) const; // Thread-safe. Returns nullptr if the key doesn't exist in the map. - WasmCode* MaybeGet(compiler::WasmImportCallKind kind, const FunctionSig* sig, - int expected_arity, Suspend suspend) const; + WasmCode* MaybeGet(compiler::WasmImportCallKind kind, + uint32_t canonical_type_index, int expected_arity, + Suspend suspend) const; ~WasmImportWrapperCache(); diff --git a/deps/v8/src/wasm/wasm-init-expr.h b/deps/v8/src/wasm/wasm-init-expr.h index c08aa7dece417b..8d5be635a2a17f 100644 --- a/deps/v8/src/wasm/wasm-init-expr.h +++ b/deps/v8/src/wasm/wasm-init-expr.h @@ -24,6 +24,7 @@ class WasmFeatures; // Representation of an constant expression. Unlike {ConstantExpression}, this // does not use {WireBytesRef}, i.e., it does not depend on a wasm module's // bytecode representation. +// TODO(manoskouk): Add missing kinds of expressions. class WasmInitExpr : public ZoneObject { public: enum Operator { diff --git a/deps/v8/src/wasm/wasm-js.cc b/deps/v8/src/wasm/wasm-js.cc index 3e8931f378b034..7806d885676bca 100644 --- a/deps/v8/src/wasm/wasm-js.cc +++ b/deps/v8/src/wasm/wasm-js.cc @@ -17,6 +17,7 @@ #include "src/execution/execution.h" #include "src/execution/isolate.h" #include "src/execution/messages.h" +#include "src/flags/flags.h" #include "src/handles/handles.h" #include "src/heap/factory.h" #include "src/objects/fixed-array.h" @@ -84,10 +85,9 @@ class WasmStreaming::WasmStreamingImpl { std::function<void(CompiledWasmModule)> callback) { streaming_decoder_->SetMoreFunctionsCanBeSerializedCallback( [callback = std::move(callback), - streaming_decoder = streaming_decoder_]( + url = streaming_decoder_->shared_url()]( const std::shared_ptr<i::wasm::NativeModule>& native_module) { - base::Vector<const char> url = streaming_decoder->url(); - callback(CompiledWasmModule{native_module, url.begin(), url.size()}); + callback(CompiledWasmModule{native_module, url->data(), url->size()}); }); } @@ -1565,8 +1565,6 @@ void WebAssemblyGlobal(const v8::FunctionCallbackInfo<v8::Value>& args) { break; } case i::wasm::kRtt: - // TODO(7748): Implement. - UNIMPLEMENTED(); case i::wasm::kI8: case i::wasm::kI16: case i::wasm::kVoid: @@ -2725,7 +2723,6 @@ void WebAssemblyGlobalGetValueCommon( break; } case i::wasm::kRtt: - UNIMPLEMENTED(); // TODO(7748): Implement. case i::wasm::kI8: case i::wasm::kI16: case i::wasm::kBottom: @@ -2992,9 +2989,10 @@ void WasmJs::Install(Isolate* isolate, bool exposed_on_global_object) { InstallFunc(isolate, webassembly, "validate", WebAssemblyValidate, 1); InstallFunc(isolate, webassembly, "instantiate", WebAssemblyInstantiate, 1); - // TODO(tebbi): Put this behind its own flag once --wasm-gc-js-interop gets - // closer to shipping. - if (v8_flags.wasm_gc_js_interop) { + // TODO(7748): These built-ins should not be shipped with wasm GC. + // Either a new flag will be needed or the built-ins have to be deleted prior + // to shipping. + if (v8_flags.experimental_wasm_gc) { SimpleInstallFunction( isolate, webassembly, "experimentalConvertArrayToString", Builtin::kExperimentalWasmConvertArrayToString, 0, true); diff --git a/deps/v8/src/wasm/wasm-module.cc b/deps/v8/src/wasm/wasm-module.cc index 85a415c2ef09e1..5ad67a18174a16 100644 --- a/deps/v8/src/wasm/wasm-module.cc +++ b/deps/v8/src/wasm/wasm-module.cc @@ -64,31 +64,8 @@ bool LazilyGeneratedNames::Has(uint32_t function_index) { return function_names_.Get(function_index) != nullptr; } -// static -int MaxNumExportWrappers(const WasmModule* module) { - // For each signature there may exist a wrapper, both for imported and - // internal functions. - return static_cast<int>(module->signature_map.size()) * 2; -} - -int GetExportWrapperIndexInternal(const WasmModule* module, - int canonical_sig_index, bool is_import) { - if (is_import) canonical_sig_index += module->signature_map.size(); - return canonical_sig_index; -} - -int GetExportWrapperIndex(const WasmModule* module, const FunctionSig* sig, - bool is_import) { - int canonical_sig_index = module->signature_map.Find(*sig); - CHECK_GE(canonical_sig_index, 0); - return GetExportWrapperIndexInternal(module, canonical_sig_index, is_import); -} - -int GetExportWrapperIndex(const WasmModule* module, uint32_t sig_index, - bool is_import) { - uint32_t canonical_sig_index = - module->per_module_canonical_type_ids[sig_index]; - return GetExportWrapperIndexInternal(module, canonical_sig_index, is_import); +int GetExportWrapperIndex(uint32_t canonical_sig_index, bool is_import) { + return 2 * canonical_sig_index + (is_import ? 1 : 0); } // static @@ -650,7 +627,6 @@ size_t EstimateStoredSize(const WasmModule* module) { (module->signature_zone ? module->signature_zone->allocation_size() : 0) + VectorSize(module->types) + - VectorSize(module->per_module_canonical_type_ids) + VectorSize(module->isorecursive_canonical_type_ids) + VectorSize(module->functions) + VectorSize(module->data_segments) + VectorSize(module->tables) + VectorSize(module->import_table) + @@ -689,146 +665,16 @@ size_t GetWireBytesHash(base::Vector<const uint8_t> wire_bytes) { kZeroHashSeed); } -base::OwnedVector<uint8_t> GetProfileData(const WasmModule* module) { - const TypeFeedbackStorage& type_feedback = module->type_feedback; - AccountingAllocator allocator; - Zone zone{&allocator, "wasm::GetProfileData"}; - ZoneBuffer buffer{&zone}; - base::MutexGuard mutex_guard{&type_feedback.mutex}; - - // Get an ordered list of function indexes, so we generate deterministic data. - std::vector<uint32_t> ordered_func_indexes; - ordered_func_indexes.reserve(type_feedback.feedback_for_function.size()); - for (const auto& entry : type_feedback.feedback_for_function) { - ordered_func_indexes.push_back(entry.first); - } - std::sort(ordered_func_indexes.begin(), ordered_func_indexes.end()); - - buffer.write_u32v(static_cast<uint32_t>(ordered_func_indexes.size())); - for (const uint32_t func_index : ordered_func_indexes) { - buffer.write_u32v(func_index); - // Serialize {feedback_vector}. - const FunctionTypeFeedback& feedback = - type_feedback.feedback_for_function.at(func_index); - buffer.write_u32v(static_cast<uint32_t>(feedback.feedback_vector.size())); - for (const CallSiteFeedback& call_site_feedback : - feedback.feedback_vector) { - int cases = call_site_feedback.num_cases(); - buffer.write_i32v(cases); - for (int i = 0; i < cases; ++i) { - buffer.write_i32v(call_site_feedback.function_index(i)); - buffer.write_i32v(call_site_feedback.call_count(i)); - } - } - // Serialize {call_targets}. - buffer.write_u32v(static_cast<uint32_t>(feedback.call_targets.size())); - for (uint32_t call_target : feedback.call_targets) { - buffer.write_u32v(call_target); - } - } - return base::OwnedVector<uint8_t>::Of(buffer); -} - -void RestoreProfileData(WasmModule* module, - base::Vector<uint8_t> profile_data) { - TypeFeedbackStorage& type_feedback = module->type_feedback; - Decoder decoder{profile_data.begin(), profile_data.end()}; - uint32_t num_entries = decoder.consume_u32v("num function entries"); - CHECK_LE(num_entries, module->num_declared_functions); - for (uint32_t missing_entries = num_entries; missing_entries > 0; - --missing_entries) { - uint32_t function_index = decoder.consume_u32v("function index"); - CHECK(!type_feedback.feedback_for_function.count(function_index)); - FunctionTypeFeedback& feedback = - type_feedback.feedback_for_function[function_index]; - // Deserialize {feedback_vector}. - uint32_t feedback_vector_size = - decoder.consume_u32v("feedback vector size"); - feedback.feedback_vector.resize(feedback_vector_size); - for (CallSiteFeedback& feedback : feedback.feedback_vector) { - int num_cases = decoder.consume_i32v("num cases"); - if (num_cases == 0) continue; // no feedback - if (num_cases == 1) { // monomorphic - int called_function_index = decoder.consume_i32v("function index"); - int call_count = decoder.consume_i32v("call count"); - feedback = CallSiteFeedback{called_function_index, call_count}; - } else { // polymorphic - auto* polymorphic = new CallSiteFeedback::PolymorphicCase[num_cases]; - for (int i = 0; i < num_cases; ++i) { - polymorphic[i].function_index = - decoder.consume_i32v("function index"); - polymorphic[i].absolute_call_frequency = - decoder.consume_i32v("call count"); - } - feedback = CallSiteFeedback{polymorphic, num_cases}; - } - } - // Deserialize {call_targets}. - uint32_t num_call_targets = decoder.consume_u32v("num call targets"); - feedback.call_targets = - base::OwnedVector<uint32_t>::NewForOverwrite(num_call_targets); - for (uint32_t& call_target : feedback.call_targets) { - call_target = decoder.consume_u32v("call target"); - } - } - CHECK(decoder.ok()); - CHECK_EQ(decoder.pc(), decoder.end()); -} - -void DumpProfileToFile(const WasmModule* module, - base::Vector<const uint8_t> wire_bytes) { - CHECK(!wire_bytes.empty()); - // File are named `profile-wasm-<hash>`. - // We use the same hash as for reported scripts, to make it easier to - // correlate files to wasm modules (see {CreateWasmScript}). - uint32_t hash = static_cast<uint32_t>(GetWireBytesHash(wire_bytes)); - base::EmbeddedVector<char, 32> filename; - SNPrintF(filename, "profile-wasm-%08x", hash); - base::OwnedVector<uint8_t> profile_data = GetProfileData(module); - PrintF("Dumping Wasm PGO data to file '%s' (%zu bytes)\n", filename.begin(), - profile_data.size()); - if (FILE* file = base::OS::FOpen(filename.begin(), "wb")) { - CHECK_EQ(profile_data.size(), - fwrite(profile_data.begin(), 1, profile_data.size(), file)); - base::Fclose(file); - } -} - -void LoadProfileFromFile(WasmModule* module, - base::Vector<const uint8_t> wire_bytes) { - CHECK(!wire_bytes.empty()); - // File are named `profile-wasm-<hash>`. - // We use the same hash as for reported scripts, to make it easier to - // correlate files to wasm modules (see {CreateWasmScript}). - uint32_t hash = static_cast<uint32_t>(GetWireBytesHash(wire_bytes)); - base::EmbeddedVector<char, 32> filename; - SNPrintF(filename, "profile-wasm-%08x", hash); - - FILE* file = base::OS::FOpen(filename.begin(), "rb"); - if (!file) { - PrintF("No Wasm PGO data found: Cannot open file '%s'\n", filename.begin()); - return; - } - - fseek(file, 0, SEEK_END); - size_t size = ftell(file); - rewind(file); - - PrintF("Loading Wasm PGO data from file '%s' (%zu bytes)\n", filename.begin(), - size); - base::OwnedVector<uint8_t> profile_data = - base::OwnedVector<uint8_t>::NewForOverwrite(size); - for (size_t read = 0; read < size;) { - read += fread(profile_data.begin() + read, 1, size - read, file); - CHECK(!ferror(file)); - } - - base::Fclose(file); - - RestoreProfileData(module, profile_data.as_vector()); - - // Check that the generated profile is deterministic. - DCHECK_EQ(profile_data.as_vector(), GetProfileData(module).as_vector()); +int NumFeedbackSlots(const WasmModule* module, int func_index) { + if (!v8_flags.wasm_speculative_inlining) return 0; + // TODO(clemensb): Avoid the mutex once this ships, or at least switch to a + // shared mutex. + base::MutexGuard type_feedback_guard{&module->type_feedback.mutex}; + auto it = module->type_feedback.feedback_for_function.find(func_index); + if (it == module->type_feedback.feedback_for_function.end()) return 0; + // The number of call instructions is capped by max function size. + static_assert(kV8MaxWasmFunctionSize < std::numeric_limits<int>::max() / 2); + return static_cast<int>(2 * it->second.call_targets.size()); } } // namespace v8::internal::wasm diff --git a/deps/v8/src/wasm/wasm-module.h b/deps/v8/src/wasm/wasm-module.h index 14a667759a65bb..ac90527ff9bb7f 100644 --- a/deps/v8/src/wasm/wasm-module.h +++ b/deps/v8/src/wasm/wasm-module.h @@ -20,7 +20,6 @@ #include "src/handles/handles.h" #include "src/wasm/branch-hint-map.h" #include "src/wasm/constant-expression.h" -#include "src/wasm/signature-map.h" #include "src/wasm/struct-types.h" #include "src/wasm/wasm-constants.h" #include "src/wasm/wasm-init-expr.h" @@ -64,10 +63,6 @@ struct WasmFunction { uint32_t func_index; // index into the function table. uint32_t sig_index; // index into the signature table. WireBytesRef code; // code of this function. - // Required number of slots in a feedback vector. Marked {mutable} because - // this is computed late (by Liftoff compilation), when the rest of the - // {WasmFunction} is typically considered {const}. - mutable int feedback_slots; bool imported; bool exported; bool declared; @@ -515,10 +510,6 @@ struct V8_EXPORT_PRIVATE WasmModule { void add_type(TypeDefinition type) { types.push_back(type); - uint32_t canonical_id = type.kind == TypeDefinition::kFunction - ? signature_map.FindOrInsert(*type.function_sig) - : 0; - per_module_canonical_type_ids.push_back(canonical_id); // Isorecursive canonical type will be computed later. isorecursive_canonical_type_ids.push_back(kNoSuperType); } @@ -570,15 +561,17 @@ struct V8_EXPORT_PRIVATE WasmModule { return supertype(index) != kNoSuperType; } + // Linear search. Returns -1 if types are empty. + int MaxCanonicalTypeIndex() const { + if (isorecursive_canonical_type_ids.empty()) return -1; + return *std::max_element(isorecursive_canonical_type_ids.begin(), + isorecursive_canonical_type_ids.end()); + } + std::vector<TypeDefinition> types; // by type index - // TODO(7748): Unify the following two arrays. - // Maps each type index to a canonical index for purposes of call_indirect. - std::vector<uint32_t> per_module_canonical_type_ids; // Maps each type index to its global (cross-module) canonical index as per // isorecursive type canonicalization. std::vector<uint32_t> isorecursive_canonical_type_ids; - // Canonicalizing map for signature indexes. - SignatureMap signature_map; std::vector<WasmFunction> functions; std::vector<WasmGlobal> globals; std::vector<WasmDataSegment> data_segments; @@ -626,16 +619,9 @@ inline bool is_asmjs_module(const WasmModule* module) { size_t EstimateStoredSize(const WasmModule* module); -// Returns the number of possible export wrappers for a given module. -V8_EXPORT_PRIVATE int MaxNumExportWrappers(const WasmModule* module); - -// Returns the wrapper index for a function in {module} with signature {sig} -// or {sig_index} and origin defined by {is_import}. -// Prefer to use the {sig_index} consuming version, as it is much faster. -int GetExportWrapperIndex(const WasmModule* module, const FunctionSig* sig, - bool is_import); -int GetExportWrapperIndex(const WasmModule* module, uint32_t sig_index, - bool is_import); +// Returns the wrapper index for a function with isorecursive canonical +// signature index {canonical_sig_index}, and origin defined by {is_import}. +int GetExportWrapperIndex(uint32_t canonical_sig_index, bool is_import); // Return the byte offset of the function identified by the given index. // The offset will be relative to the start of the module bytes. @@ -795,11 +781,8 @@ size_t PrintSignature(base::Vector<char> buffer, const wasm::FunctionSig*, V8_EXPORT_PRIVATE size_t GetWireBytesHash(base::Vector<const uint8_t> wire_bytes); -void DumpProfileToFile(const WasmModule* module, - base::Vector<const uint8_t> wire_bytes); - -void LoadProfileFromFile(WasmModule* module, - base::Vector<const uint8_t> wire_bytes); +// Get the required number of feedback slots for a function. +int NumFeedbackSlots(const WasmModule* module, int func_index); } // namespace v8::internal::wasm diff --git a/deps/v8/src/wasm/wasm-objects.cc b/deps/v8/src/wasm/wasm-objects.cc index d28597c8844ddd..caa2440104b152 100644 --- a/deps/v8/src/wasm/wasm-objects.cc +++ b/deps/v8/src/wasm/wasm-objects.cc @@ -56,14 +56,6 @@ enum DispatchTableElements : int { Handle<WasmModuleObject> WasmModuleObject::New( Isolate* isolate, std::shared_ptr<wasm::NativeModule> native_module, Handle<Script> script) { - Handle<FixedArray> export_wrappers = isolate->factory()->NewFixedArray(0); - return New(isolate, std::move(native_module), script, export_wrappers); -} - -// static -Handle<WasmModuleObject> WasmModuleObject::New( - Isolate* isolate, std::shared_ptr<wasm::NativeModule> native_module, - Handle<Script> script, Handle<FixedArray> export_wrappers) { Handle<Managed<wasm::NativeModule>> managed_native_module; if (script->type() == Script::TYPE_WASM) { managed_native_module = handle( @@ -79,7 +71,6 @@ Handle<WasmModuleObject> WasmModuleObject::New( } Handle<WasmModuleObject> module_object = Handle<WasmModuleObject>::cast( isolate->factory()->NewJSObject(isolate->wasm_module_constructor())); - module_object->set_export_wrappers(*export_wrappers); module_object->set_managed_native_module(*managed_native_module); module_object->set_script(*script); return module_object; @@ -462,24 +453,8 @@ void WasmTableObject::UpdateDispatchTables(Isolate* isolate, Smi::cast(dispatch_tables.get(i + kDispatchTableIndexOffset)).value(); WasmInstanceObject instance = WasmInstanceObject::cast( dispatch_tables.get(i + kDispatchTableInstanceOffset)); - const WasmModule* module = instance.module(); - int sig_id; - if (v8_flags.wasm_type_canonicalization) { - sig_id = target_instance.module() - ->isorecursive_canonical_type_ids[original_sig_id]; - } else { - // Try to avoid the signature map lookup by checking if the signature in - // {module} at {original_sig_id} matches {func->sig}. - if (module->has_signature(original_sig_id) && - *module->signature(original_sig_id) == *func->sig) { - sig_id = module->per_module_canonical_type_ids[original_sig_id]; - DCHECK_EQ(sig_id, module->signature_map.Find(*func->sig)); - } else { - // Note that {SignatureMap::Find} may return {-1} if the signature is - // not found; it will simply never match any check. - sig_id = module->signature_map.Find(*func->sig); - } - } + int sig_id = target_instance.module() + ->isorecursive_canonical_type_ids[original_sig_id]; WasmIndirectFunctionTable ift = WasmIndirectFunctionTable::cast( instance.indirect_function_tables().get(table_index)); ift.Set(entry_index, sig_id, call_target, call_ref); @@ -548,14 +523,16 @@ void WasmTableObject::UpdateDispatchTables( instance->module_object().native_module(); wasm::WasmImportWrapperCache* cache = native_module->import_wrapper_cache(); auto kind = compiler::WasmImportCallKind::kWasmToCapi; - wasm::WasmCode* wasm_code = - cache->MaybeGet(kind, &sig, param_count, wasm::kNoSuspend); + uint32_t canonical_type_index = + wasm::GetTypeCanonicalizer()->AddRecursiveGroup(&sig); + wasm::WasmCode* wasm_code = cache->MaybeGet(kind, canonical_type_index, + param_count, wasm::kNoSuspend); if (wasm_code == nullptr) { wasm::WasmCodeRefScope code_ref_scope; wasm::WasmImportWrapperCache::ModificationScope cache_scope(cache); wasm_code = compiler::CompileWasmCapiCallWrapper(native_module, &sig); - wasm::WasmImportWrapperCache::CacheKey key(kind, &sig, param_count, - wasm::kNoSuspend); + wasm::WasmImportWrapperCache::CacheKey key(kind, canonical_type_index, + param_count, wasm::kNoSuspend); cache_scope[key] = wasm_code; wasm_code->IncRef(); isolate->counters()->wasm_generated_code_size()->Increment( @@ -563,13 +540,8 @@ void WasmTableObject::UpdateDispatchTables( isolate->counters()->wasm_reloc_size()->Increment( wasm_code->reloc_info().length()); } - // Note that {SignatureMap::Find} may return {-1} if the signature is - // not found; it will simply never match any check. - // It is safe to use this even when v8_flags.wasm_type_canonicalization, as - // the C API cannot refer to user-defined types. - auto sig_id = instance->module()->signature_map.Find(sig); instance->GetIndirectFunctionTable(isolate, table_index) - ->Set(entry_index, sig_id, wasm_code->instruction_start(), + ->Set(entry_index, canonical_type_index, wasm_code->instruction_start(), WasmCapiFunctionData::cast( capi_function->shared().function_data(kAcquireLoad)) .internal() @@ -1379,25 +1351,30 @@ WasmInstanceObject::GetOrCreateWasmInternalFunction( Handle<WasmModuleObject> module_object(instance->module_object(), isolate); const WasmModule* module = module_object->module(); const WasmFunction& function = module->functions[function_index]; + uint32_t canonical_sig_index = + module->isorecursive_canonical_type_ids[function.sig_index]; + isolate->heap()->EnsureWasmCanonicalRttsSize(canonical_sig_index + 1); int wrapper_index = - GetExportWrapperIndex(module, function.sig_index, function.imported); - DCHECK_EQ(wrapper_index, - GetExportWrapperIndex(module, function.sig, function.imported)); + wasm::GetExportWrapperIndex(canonical_sig_index, function.imported); - Handle<Object> entry = - FixedArray::get(module_object->export_wrappers(), wrapper_index, isolate); + MaybeObject entry = isolate->heap()->js_to_wasm_wrappers().Get(wrapper_index); Handle<CodeT> wrapper; - if (entry->IsCodeT()) { - wrapper = Handle<CodeT>::cast(entry); + // {entry} can be cleared, {undefined}, or a ready {CodeT}. + if (entry.IsStrongOrWeak() && entry.GetHeapObject().IsCodeT()) { + wrapper = handle(CodeT::cast(entry.GetHeapObject()), isolate); } else { // The wrapper may not exist yet if no function in the exports section has // this signature. We compile it and store the wrapper in the module for // later use. wrapper = wasm::JSToWasmWrapperCompilationUnit::CompileJSToWasmWrapper( - isolate, function.sig, instance->module(), function.imported); - module_object->export_wrappers().set(wrapper_index, *wrapper); + isolate, function.sig, canonical_sig_index, instance->module(), + function.imported); } + // Store the wrapper in the isolate, or make its reference weak now that we + // have a function referencing it. + isolate->heap()->js_to_wasm_wrappers().Set( + wrapper_index, HeapObjectReference::Weak(*wrapper)); auto external = Handle<WasmExternalFunction>::cast(WasmExportedFunction::New( isolate, instance, function_index, static_cast<int>(function.sig->parameter_count()), wrapper)); @@ -1434,16 +1411,26 @@ void WasmInstanceObject::ImportWasmJSFunctionIntoTable( // not found; it will simply never match any check. Zone zone(isolate->allocator(), ZONE_NAME); const wasm::FunctionSig* sig = js_function->GetSignature(&zone); - // It is safe to look up the signature this way even if - // v8_flags.wasm_type_canonicalization: Signatures created in the JS API - // cannot contain user-defined (module-dependent) types. - auto sig_id = instance->module()->signature_map.Find(*sig); + // Get the function's canonical signature index. Note that the function's + // signature may not be present in the importing module. + uint32_t canonical_sig_index = + wasm::GetTypeCanonicalizer()->AddRecursiveGroup(sig); // Compile a wrapper for the target callable. Handle<JSReceiver> callable(js_function->GetCallable(), isolate); + wasm::Suspend suspend = js_function->GetSuspend(); wasm::WasmCodeRefScope code_ref_scope; Address call_target = kNullAddress; - if (sig_id >= 0) { + + auto module_canonical_ids = + instance->module()->isorecursive_canonical_type_ids; + // TODO(manoskouk): Consider adding a set of canonical indices to the module + // to avoid this linear search. + auto sig_in_module = + std::find(module_canonical_ids.begin(), module_canonical_ids.end(), + canonical_sig_index); + + if (sig_in_module != module_canonical_ids.end()) { wasm::NativeModule* native_module = instance->module_object().native_module(); // TODO(wasm): Cache and reuse wrapper code, to avoid repeated compilation @@ -1464,7 +1451,7 @@ void WasmInstanceObject::ImportWasmJSFunctionIntoTable( } // TODO(manoskouk): Reuse js_function->wasm_to_js_wrapper_code(). wasm::WasmCompilationResult result = compiler::CompileWasmImportCallWrapper( - &env, kind, sig, false, expected_arity, resolved.suspend); + &env, kind, sig, false, expected_arity, suspend); wasm::CodeSpaceWriteScope write_scope(native_module); std::unique_ptr<wasm::WasmCode> wasm_code = native_module->AddCode( result.func_index, result.code_desc, result.frame_slot_count, @@ -1482,17 +1469,12 @@ void WasmInstanceObject::ImportWasmJSFunctionIntoTable( } // Update the dispatch table. - wasm::Suspend suspend = js_function->GetSuspend(); Handle<WasmApiFunctionRef> ref = isolate->factory()->NewWasmApiFunctionRef(callable, suspend, instance); - uint32_t canonicalized_sig_id = - v8_flags.wasm_type_canonicalization && sig_id >= 0 - ? instance->module()->isorecursive_canonical_type_ids[sig_id] - : sig_id; WasmIndirectFunctionTable::cast( instance->indirect_function_tables().get(table_index)) - .Set(entry_index, canonicalized_sig_id, call_target, *ref); + .Set(entry_index, canonical_sig_index, call_target, *ref); } // static @@ -1572,8 +1554,6 @@ wasm::WasmValue WasmStruct::GetFieldValue(uint32_t index) { return wasm::WasmValue(ref, field_type); } case wasm::kRtt: - // TODO(7748): Expose RTTs to DevTools. - UNIMPLEMENTED(); case wasm::kVoid: case wasm::kBottom: UNREACHABLE(); @@ -1601,8 +1581,6 @@ wasm::WasmValue WasmArray::GetElement(uint32_t index) { return wasm::WasmValue(ref, element_type); } case wasm::kRtt: - // TODO(7748): Expose RTTs to DevTools. - UNIMPLEMENTED(); case wasm::kVoid: case wasm::kBottom: UNREACHABLE(); @@ -2219,7 +2197,7 @@ Handle<WasmExceptionTag> WasmExceptionTag::New(Isolate* isolate, int index) { Handle<AsmWasmData> AsmWasmData::New( Isolate* isolate, std::shared_ptr<wasm::NativeModule> native_module, - Handle<FixedArray> export_wrappers, Handle<HeapNumber> uses_bitset) { + Handle<HeapNumber> uses_bitset) { const WasmModule* module = native_module->module(); const bool kUsesLiftoff = false; size_t memory_estimate = @@ -2232,7 +2210,6 @@ Handle<AsmWasmData> AsmWasmData::New( Handle<AsmWasmData> result = Handle<AsmWasmData>::cast( isolate->factory()->NewStruct(ASM_WASM_DATA_TYPE, AllocationType::kOld)); result->set_managed_native_module(*managed_native_module); - result->set_export_wrappers(*export_wrappers); result->set_uses_bitset(*uses_bitset); return result; } diff --git a/deps/v8/src/wasm/wasm-objects.h b/deps/v8/src/wasm/wasm-objects.h index 18f5ec09d8ff8b..a81a23fe2fcb18 100644 --- a/deps/v8/src/wasm/wasm-objects.h +++ b/deps/v8/src/wasm/wasm-objects.h @@ -2,7 +2,6 @@ // this source code is governed by a BSD-style license that can be // found in the LICENSE file. -#include "src/base/bit-field.h" #if !V8_ENABLE_WEBASSEMBLY #error This header should only be included if WebAssembly is enabled. #endif // !V8_ENABLE_WEBASSEMBLY @@ -12,6 +11,7 @@ #include <memory> +#include "src/base/bit-field.h" #include "src/debug/interface-types.h" #include "src/objects/foreign.h" #include "src/objects/js-function.h" @@ -133,9 +133,6 @@ class WasmModuleObject V8_EXPORT_PRIVATE static Handle<WasmModuleObject> New( Isolate* isolate, std::shared_ptr<wasm::NativeModule> native_module, Handle<Script> script); - V8_EXPORT_PRIVATE static Handle<WasmModuleObject> New( - Isolate* isolate, std::shared_ptr<wasm::NativeModule> native_module, - Handle<Script> script, Handle<FixedArray> export_wrappers); // Check whether this module was generated from asm.js source. inline bool is_asm_js(); @@ -899,7 +896,7 @@ class AsmWasmData : public TorqueGeneratedAsmWasmData<AsmWasmData, Struct> { public: static Handle<AsmWasmData> New( Isolate* isolate, std::shared_ptr<wasm::NativeModule> native_module, - Handle<FixedArray> export_wrappers, Handle<HeapNumber> uses_bitset); + Handle<HeapNumber> uses_bitset); DECL_PRINTER(AsmWasmData) @@ -1003,7 +1000,7 @@ class WasmArray : public TorqueGeneratedWasmArray<WasmArray, WasmObject> { inline uint32_t element_offset(uint32_t index); inline Address ElementAddress(uint32_t index); - static int MaxLength(uint32_t element_size_bytes) { + static constexpr int MaxLength(uint32_t element_size_bytes) { // The total object size must fit into a Smi, for filler objects. To make // the behavior of Wasm programs independent from the Smi configuration, // we hard-code the smaller of the two supported ranges. diff --git a/deps/v8/src/wasm/wasm-objects.tq b/deps/v8/src/wasm/wasm-objects.tq index 55a7e7458da740..3607621bbfd334 100644 --- a/deps/v8/src/wasm/wasm-objects.tq +++ b/deps/v8/src/wasm/wasm-objects.tq @@ -77,11 +77,13 @@ extern class WasmExportedFunctionData extends WasmFunctionData { extern class WasmJSFunctionData extends WasmFunctionData { serialized_return_count: Smi; serialized_parameter_count: Smi; + // TODO(7748): Maybe store the canonical type index of the signature instead. serialized_signature: PodArrayOfWasmValueType; } extern class WasmCapiFunctionData extends WasmFunctionData { embedder_data: Foreign; // Managed<wasm::FuncData> + // TODO(7748): Maybe store the canonical type index of the signature instead. serialized_signature: PodArrayOfWasmValueType; } @@ -125,7 +127,6 @@ extern class WasmExceptionPackage extends JSObject; extern class WasmModuleObject extends JSObject { managed_native_module: ManagedWasmNativeModule; - export_wrappers: FixedArray; script: Script; } @@ -175,7 +176,6 @@ type WasmExportedFunction extends JSFunction; extern class AsmWasmData extends Struct { managed_native_module: ManagedWasmNativeModule; - export_wrappers: FixedArray; uses_bitset: HeapNumber; } diff --git a/deps/v8/src/wasm/wasm-opcodes.cc b/deps/v8/src/wasm/wasm-opcodes.cc index 2e8f36ce313200..cda2c11721f8bf 100644 --- a/deps/v8/src/wasm/wasm-opcodes.cc +++ b/deps/v8/src/wasm/wasm-opcodes.cc @@ -36,8 +36,8 @@ std::ostream& operator<<(std::ostream& os, const FunctionSig& sig) { bool IsJSCompatibleSignature(const FunctionSig* sig, const WasmModule* module, const WasmFeatures& enabled_features) { for (auto type : sig->all()) { - // TODO(7748): Allow structs, arrays, and rtts when their JS-interaction is - // decided on. + // Structs and arrays may only be passed via externref. + // Rtts are implicit and can not be used explicitly. if (type == kWasmS128 || type.is_rtt() || (type.has_index() && !module->has_signature(type.ref_index()))) { return false; diff --git a/deps/v8/src/wasm/wasm-opcodes.h b/deps/v8/src/wasm/wasm-opcodes.h index 0140dead2fac53..7d4c80d0b86642 100644 --- a/deps/v8/src/wasm/wasm-opcodes.h +++ b/deps/v8/src/wasm/wasm-opcodes.h @@ -707,11 +707,13 @@ bool V8_EXPORT_PRIVATE IsJSCompatibleSignature(const FunctionSig* sig, V(I31New, 0xfb20, _, "i31.new") \ V(I31GetS, 0xfb21, _, "i31.get_s") \ V(I31GetU, 0xfb22, _, "i31.get_u") \ - V(RefTest, 0xfb44, _, "ref.test") \ + V(RefTest, 0xfb40, _, "ref.test") \ + V(RefTestNull, 0xfb48, _, "ref.test null") \ + V(RefTestDeprecated, 0xfb44, _, "ref.test") \ V(RefCast, 0xfb45, _, "ref.cast") \ V(BrOnCast, 0xfb46, _, "br_on_cast") \ V(BrOnCastFail, 0xfb47, _, "br_on_cast_fail") \ - V(RefCastNop, 0xfb48, _, "ref.cast_nop") \ + V(RefCastNop, 0xfb4c, _, "ref.cast_nop") \ V(RefIsData, 0xfb51, _, "ref.is_data") \ V(RefIsI31, 0xfb52, _, "ref.is_i31") \ V(RefIsArray, 0xfb53, _, "ref.is_array") \ diff --git a/deps/v8/src/wasm/wasm-serialization.cc b/deps/v8/src/wasm/wasm-serialization.cc index 1b8064da1fffa6..8d6adeec0096d6 100644 --- a/deps/v8/src/wasm/wasm-serialization.cc +++ b/deps/v8/src/wasm/wasm-serialization.cc @@ -900,14 +900,10 @@ MaybeHandle<WasmModuleObject> DeserializeNativeModule( wasm_engine->UpdateNativeModuleCache(error, &shared_native_module, isolate); } - Handle<FixedArray> export_wrappers; - CompileJsToWasmWrappers(isolate, shared_native_module->module(), - &export_wrappers); - Handle<Script> script = wasm_engine->GetOrCreateScript(isolate, shared_native_module, source_url); - Handle<WasmModuleObject> module_object = WasmModuleObject::New( - isolate, shared_native_module, script, export_wrappers); + Handle<WasmModuleObject> module_object = + WasmModuleObject::New(isolate, shared_native_module, script); // Finish the Wasm script now and make it public to the debugger. isolate->debug()->OnAfterCompile(script); diff --git a/deps/v8/src/wasm/wasm-subtyping.cc b/deps/v8/src/wasm/wasm-subtyping.cc index 2e9a8f7043fb3e..8cc6ba872ff883 100644 --- a/deps/v8/src/wasm/wasm-subtyping.cc +++ b/deps/v8/src/wasm/wasm-subtyping.cc @@ -17,7 +17,6 @@ V8_INLINE bool EquivalentIndices(uint32_t index1, uint32_t index2, const WasmModule* module1, const WasmModule* module2) { DCHECK(index1 != index2 || module1 != module2); - if (!v8_flags.wasm_type_canonicalization) return false; return module1->isorecursive_canonical_type_ids[index1] == module2->isorecursive_canonical_type_ids[index2]; } @@ -99,8 +98,9 @@ bool ValidFunctionSubtypeDefinition(uint32_t subtype_index, return true; } -HeapType::Representation NullSentinelImpl(TypeInModule type) { - switch (type.type.heap_type().representation()) { +HeapType::Representation NullSentinelImpl(HeapType type, + const WasmModule* module) { + switch (type.representation()) { case HeapType::kI31: case HeapType::kNone: case HeapType::kEq: @@ -119,9 +119,8 @@ HeapType::Representation NullSentinelImpl(TypeInModule type) { case HeapType::kNoFunc: return HeapType::kNoFunc; default: - return type.module->has_signature(type.type.ref_index()) - ? HeapType::kNoFunc - : HeapType::kNone; + return module->has_signature(type.ref_index()) ? HeapType::kNoFunc + : HeapType::kNone; } } @@ -289,19 +288,8 @@ V8_NOINLINE V8_EXPORT_PRIVATE bool IsHeapSubtypeOfImpl( // The {IsSubtypeOf} entry point already has a fast path checking ValueType // equality; here we catch (ref $x) being a subtype of (ref null $x). if (sub_module == super_module && sub_index == super_index) return true; - - if (v8_flags.wasm_type_canonicalization) { - return GetTypeCanonicalizer()->IsCanonicalSubtype(sub_index, super_index, - sub_module, super_module); - } else { - uint32_t explicit_super = sub_module->supertype(sub_index); - while (true) { - if (explicit_super == super_index) return true; - // Reached the end of the explicitly defined inheritance chain. - if (explicit_super == kNoSuperType) return false; - explicit_super = sub_module->supertype(explicit_super); - } - } + return GetTypeCanonicalizer()->IsCanonicalSubtype(sub_index, super_index, + sub_module, super_module); } V8_NOINLINE bool EquivalentTypes(ValueType type1, ValueType type2, @@ -555,12 +543,18 @@ TypeInModule Intersection(ValueType type1, ValueType type2, } ValueType ToNullSentinel(TypeInModule type) { - HeapType::Representation null_heap = NullSentinelImpl(type); + HeapType::Representation null_heap = + NullSentinelImpl(type.type.heap_type(), type.module); DCHECK( IsHeapSubtypeOf(HeapType(null_heap), type.type.heap_type(), type.module)); return ValueType::RefNull(null_heap); } +bool IsSameTypeHierarchy(HeapType type1, HeapType type2, + const WasmModule* module) { + return NullSentinelImpl(type1, module) == NullSentinelImpl(type2, module); +} + } // namespace wasm } // namespace internal } // namespace v8 diff --git a/deps/v8/src/wasm/wasm-subtyping.h b/deps/v8/src/wasm/wasm-subtyping.h index 670fc00916a183..07630118c2f5c0 100644 --- a/deps/v8/src/wasm/wasm-subtyping.h +++ b/deps/v8/src/wasm/wasm-subtyping.h @@ -47,19 +47,26 @@ V8_NOINLINE V8_EXPORT_PRIVATE bool EquivalentTypes(ValueType type1, // - (ref ht1) <: (ref null? ht2) iff ht1 <: ht2. // - rtt1 <: rtt2 iff rtt1 ~ rtt2. // For heap types, the following subtyping rules hold: -// - The abstract heap types form the following type hierarchy: -// any (a.k.a. extern) -// / \ -// eq func +// - The abstract heap types form the following type hierarchies: +// TODO(7748): abstract ref.data should become ref.struct. +// +// any func extern +// | | | +// eq nofunc noextern // / \ // i31 data -// | -// array +// | | +// | array +// \ / +// none +// // - All functions are subtypes of func. // - All structs are subtypes of data. // - All arrays are subtypes of array. // - An indexed heap type h1 is a subtype of indexed heap type h2 if h2 is // transitively an explicit canonical supertype of h1. +// Note that {any} includes references introduced by the host which belong to +// none of any's subtypes (e.g. JS objects). V8_INLINE bool IsSubtypeOf(ValueType subtype, ValueType supertype, const WasmModule* sub_module, const WasmModule* super_module) { @@ -156,6 +163,10 @@ V8_INLINE V8_EXPORT_PRIVATE TypeInModule Intersection(TypeInModule type1, // Returns the matching abstract null type (none, nofunc, noextern). ValueType ToNullSentinel(TypeInModule type); +// Returns if two types share the same type hierarchy (any, extern, funcref). +bool IsSameTypeHierarchy(HeapType type1, HeapType type2, + const WasmModule* module); + } // namespace wasm } // namespace internal } // namespace v8 diff --git a/deps/v8/src/web-snapshot/web-snapshot.cc b/deps/v8/src/web-snapshot/web-snapshot.cc index f2f91ee47cde00..3a8a97a8c5388f 100644 --- a/deps/v8/src/web-snapshot/web-snapshot.cc +++ b/deps/v8/src/web-snapshot/web-snapshot.cc @@ -1592,7 +1592,28 @@ void WebSnapshotSerializer::SerializeObject(Handle<JSObject> object) { } // Elements. - SerializeElements(object, object_serializer_); + ElementsKind kind = object->GetElementsKind(); + // We only serialize the actual elements excluding the slack part. + DCHECK(!IsDoubleElementsKind(kind)); + if (!IsDictionaryElementsKind(kind)) { + uint32_t elements_length = object->elements().length(); + if (IsHoleyElementsKindForRead(kind)) { + uint32_t max_element_index = 0; + FixedArray elements = FixedArray::cast(object->elements()); + for (int i = elements_length - 1; i >= 0; i--) { + if (!elements.is_the_hole(isolate_, i)) { + max_element_index = i + 1; + break; + } + } + return SerializeElements(object, object_serializer_, + Just(max_element_index)); + } else { + return SerializeElements(object, object_serializer_, + Just(elements_length)); + } + } + SerializeElements(object, object_serializer_, Nothing<uint32_t>()); } // Format (serialized array): @@ -1606,11 +1627,17 @@ void WebSnapshotSerializer::SerializeObject(Handle<JSObject> object) { // - Element index // - Serialized value void WebSnapshotSerializer::SerializeArray(Handle<JSArray> array) { - SerializeElements(array, array_serializer_); + uint32_t length; + if (!array->length().ToUint32(&length)) { + Throw("Invalid array length"); + return; + } + SerializeElements(array, array_serializer_, Just(length)); } void WebSnapshotSerializer::SerializeElements(Handle<JSObject> object, - ValueSerializer& serializer) { + ValueSerializer& serializer, + Maybe<uint32_t> length) { // TODO(v8:11525): Handle sealed & frozen elements correctly. (Also: handle // sealed & frozen objects.) @@ -1634,9 +1661,8 @@ void WebSnapshotSerializer::SerializeElements(Handle<JSObject> object, serializer.WriteUint32(ElementsType::kDense); Handle<FixedArray> elements = handle(FixedArray::cast(object->elements()), isolate_); - uint32_t length = static_cast<uint32_t>(elements->length()); - serializer.WriteUint32(length); - for (uint32_t i = 0; i < length; ++i) { + serializer.WriteUint32(length.ToChecked()); + for (uint32_t i = 0; i < length.ToChecked(); ++i) { WriteValue(handle(elements->get(i), isolate_), serializer); } break; @@ -1646,9 +1672,8 @@ void WebSnapshotSerializer::SerializeElements(Handle<JSObject> object, serializer.WriteUint32(ElementsType::kDense); Handle<FixedDoubleArray> elements = handle(FixedDoubleArray::cast(object->elements()), isolate_); - uint32_t length = static_cast<uint32_t>(elements->length()); - serializer.WriteUint32(length); - for (uint32_t i = 0; i < length; ++i) { + serializer.WriteUint32(length.ToChecked()); + for (uint32_t i = 0; i < length.ToChecked(); ++i) { if (!elements->is_the_hole(i)) { double double_value = elements->get_scalar(i); Handle<Object> element_value = @@ -1692,7 +1717,7 @@ uint8_t WebSnapshotSerializerDeserializer::ArrayBufferKindToFlags( Handle<JSArrayBuffer> array_buffer) { return DetachedBitField::encode(array_buffer->was_detached()) | SharedBitField::encode(array_buffer->is_shared()) | - ResizableBitField::encode(array_buffer->is_resizable()); + ResizableBitField::encode(array_buffer->is_resizable_by_js()); } uint32_t WebSnapshotSerializerDeserializer::BigIntSignAndLengthToFlags( @@ -1714,9 +1739,9 @@ uint32_t WebSnapshotSerializerDeserializer::BigIntFlagsToBitField( } // Format (serialized array buffer): -// - ArrayBufferFlags, including was_detached, is_shared and is_resizable. +// - ArrayBufferFlags, including was_detached, is_shared and is_resizable_by_js. // - Byte length -// - if is_resizable +// - if is_resizable_by_js // - Max byte length // - Raw bytes void WebSnapshotSerializer::SerializeArrayBuffer( @@ -1729,7 +1754,7 @@ void WebSnapshotSerializer::SerializeArrayBuffer( array_buffer_serializer_.WriteByte(ArrayBufferKindToFlags(array_buffer)); array_buffer_serializer_.WriteUint32(static_cast<uint32_t>(byte_length)); - if (array_buffer->is_resizable()) { + if (array_buffer->is_resizable_by_js()) { size_t max_byte_length = array_buffer->max_byte_length(); if (max_byte_length > std::numeric_limits<uint32_t>::max()) { Throw("Too large resizable array buffer"); @@ -2269,7 +2294,7 @@ bool WebSnapshotDeserializer::Deserialize( auto buffer_size = deserializer_->end_ - deserializer_->position_; base::ElapsedTimer timer; - if (FLAG_trace_web_snapshot) { + if (v8_flags.trace_web_snapshot) { timer.Start(); } if (!DeserializeSnapshot(skip_exports)) { @@ -2279,7 +2304,7 @@ bool WebSnapshotDeserializer::Deserialize( return false; } - if (FLAG_trace_web_snapshot) { + if (v8_flags.trace_web_snapshot) { double ms = timer.Elapsed().InMillisecondsF(); PrintF("[Deserializing snapshot (%zu bytes) took %0.3f ms]\n", buffer_size, ms); @@ -2359,7 +2384,7 @@ bool WebSnapshotDeserializer::DeserializeSnapshot(bool skip_exports) { #ifdef VERIFY_HEAP // Verify the objects we produced during deserializing snapshot. - if (FLAG_verify_heap && !has_error()) { + if (v8_flags.verify_heap && !has_error()) { VerifyObjects(); } #endif @@ -3614,7 +3639,7 @@ void WebSnapshotDeserializer::DeserializeDataViews() { bool is_length_tracking = LengthTrackingBitField::decode(flags); if (is_length_tracking) { - CHECK(array_buffer->is_resizable()); + CHECK(array_buffer->is_resizable_by_js()); } else { if (!deserializer_->ReadUint32(&byte_length)) { Throw("Malformed data view"); @@ -3636,7 +3661,7 @@ void WebSnapshotDeserializer::DeserializeDataViews() { byte_offset); raw_data_view.set_is_length_tracking(is_length_tracking); raw_data_view.set_is_backed_by_rab(!raw_array_buffer.is_shared() && - raw_array_buffer.is_resizable()); + raw_array_buffer.is_resizable_by_js()); } data_views_.set(static_cast<int>(current_data_view_count_), *data_view); @@ -3700,7 +3725,7 @@ void WebSnapshotDeserializer::DeserializeTypedArrays() { bool is_length_tracking = LengthTrackingBitField::decode(flags); if (is_length_tracking) { - CHECK(array_buffer->is_resizable()); + CHECK(array_buffer->is_resizable_by_js()); } else { if (!deserializer_->ReadUint32(&byte_length)) { Throw("Malformed typed array"); @@ -3717,7 +3742,7 @@ void WebSnapshotDeserializer::DeserializeTypedArrays() { } } - bool rabGsab = array_buffer->is_resizable() && + bool rabGsab = array_buffer->is_resizable_by_js() && (!array_buffer->is_shared() || is_length_tracking); if (rabGsab) { map = handle( @@ -3738,7 +3763,7 @@ void WebSnapshotDeserializer::DeserializeTypedArrays() { raw.SetOffHeapDataPtr(isolate_, array_buffer->backing_store(), byte_offset); raw.set_is_length_tracking(is_length_tracking); - raw.set_is_backed_by_rab(array_buffer->is_resizable() && + raw.set_is_backed_by_rab(array_buffer->is_resizable_by_js() && !array_buffer->is_shared()); } @@ -3766,7 +3791,7 @@ void WebSnapshotDeserializer::DeserializeExports(bool skip_exports) { // have been deserialized. Object export_value = std::get<0>(ReadValue()); #ifdef VERIFY_HEAP - if (FLAG_verify_heap) { + if (v8_flags.verify_heap) { export_value.ObjectVerify(isolate_); } #endif @@ -3797,7 +3822,7 @@ void WebSnapshotDeserializer::DeserializeExports(bool skip_exports) { // been deserialized. Object export_value = std::get<0>(ReadValue()); #ifdef VERIFY_HEAP - if (FLAG_verify_heap) { + if (v8_flags.verify_heap) { export_value.ObjectVerify(isolate_); } #endif diff --git a/deps/v8/src/web-snapshot/web-snapshot.h b/deps/v8/src/web-snapshot/web-snapshot.h index b96efeb0f07f93..1de4efa46b3844 100644 --- a/deps/v8/src/web-snapshot/web-snapshot.h +++ b/deps/v8/src/web-snapshot/web-snapshot.h @@ -314,7 +314,8 @@ class V8_EXPORT WebSnapshotSerializer void SerializeClass(Handle<JSFunction> function); void SerializeContext(Handle<Context> context, uint32_t id); void SerializeArray(Handle<JSArray> array); - void SerializeElements(Handle<JSObject> object, ValueSerializer& serializer); + void SerializeElements(Handle<JSObject> object, ValueSerializer& serializer, + Maybe<uint32_t> length); void SerializeObject(Handle<JSObject> object); void SerializeArrayBufferView(Handle<JSArrayBufferView> array_buffer_view, ValueSerializer& serializer); diff --git a/deps/v8/test/cctest/BUILD.gn b/deps/v8/test/cctest/BUILD.gn index 0ab437883cff65..22d660104b1c72 100644 --- a/deps/v8/test/cctest/BUILD.gn +++ b/deps/v8/test/cctest/BUILD.gn @@ -116,7 +116,6 @@ v8_source_set("cctest_sources") { "compiler/test-node.cc", "compiler/test-operator.cc", "compiler/test-representation-change.cc", - "compiler/test-run-bytecode-graph-builder.cc", "compiler/test-run-calls-to-external-references.cc", "compiler/test-run-load-store.cc", "compiler/test-run-machops.cc", diff --git a/deps/v8/test/cctest/assembler-helper-arm.cc b/deps/v8/test/cctest/assembler-helper-arm.cc index ca4041dc7235a4..698005fd9eba74 100644 --- a/deps/v8/test/cctest/assembler-helper-arm.cc +++ b/deps/v8/test/cctest/assembler-helper-arm.cc @@ -21,7 +21,7 @@ Handle<Code> AssembleCodeImpl(Isolate* isolate, assm.GetCode(isolate, &desc); Handle<Code> code = Factory::CodeBuilder(isolate, desc, CodeKind::FOR_TESTING).Build(); - if (FLAG_print_code) { + if (v8_flags.print_code) { code->Print(); } return code; diff --git a/deps/v8/test/cctest/cctest.cc b/deps/v8/test/cctest/cctest.cc index b5d15c53a0e9da..fddcdd3df313f2 100644 --- a/deps/v8/test/cctest/cctest.cc +++ b/deps/v8/test/cctest/cctest.cc @@ -122,7 +122,7 @@ void CcTest::Run(const char* snapshot_directory) { // Allow changing flags in cctests. // TODO(12887): Fix tests to avoid changing flag values after initialization. - i::FLAG_freeze_flags_after_init = false; + i::v8_flags.freeze_flags_after_init = false; v8::V8::Initialize(); v8::V8::InitializeExternalStartupData(snapshot_directory); @@ -221,7 +221,8 @@ void CcTest::PreciseCollectAllGarbage(i::Isolate* isolate) { void CcTest::CollectSharedGarbage(i::Isolate* isolate) { i::Isolate* iso = isolate ? isolate : i_isolate(); - iso->heap()->CollectSharedGarbage(i::GarbageCollectionReason::kTesting); + iso->heap()->CollectGarbageShared(iso->main_thread_local_heap(), + i::GarbageCollectionReason::kTesting); } i::Handle<i::String> CcTest::MakeString(const char* str) { @@ -435,13 +436,18 @@ bool IsValidUnwrapObject(v8::Object* object) { } ManualGCScope::ManualGCScope(i::Isolate* isolate) - : flag_concurrent_marking_(i::FLAG_concurrent_marking), - flag_concurrent_sweeping_(i::FLAG_concurrent_sweeping), - flag_stress_concurrent_allocation_(i::FLAG_stress_concurrent_allocation), - flag_stress_incremental_marking_(i::FLAG_stress_incremental_marking), - flag_parallel_marking_(i::FLAG_parallel_marking), + : flag_concurrent_marking_(i::v8_flags.concurrent_marking), + flag_concurrent_sweeping_(i::v8_flags.concurrent_sweeping), + flag_concurrent_minor_mc_marking_( + i::v8_flags.concurrent_minor_mc_marking), + flag_concurrent_minor_mc_sweeping_( + i::v8_flags.concurrent_minor_mc_sweeping), + flag_stress_concurrent_allocation_( + i::v8_flags.stress_concurrent_allocation), + flag_stress_incremental_marking_(i::v8_flags.stress_incremental_marking), + flag_parallel_marking_(i::v8_flags.parallel_marking), flag_detect_ineffective_gcs_near_heap_limit_( - i::FLAG_detect_ineffective_gcs_near_heap_limit) { + i::v8_flags.detect_ineffective_gcs_near_heap_limit) { // Some tests run threaded (back-to-back) and thus the GC may already be // running by the time a ManualGCScope is created. Finalizing existing marking // prevents any undefined/unexpected behavior. @@ -449,22 +455,26 @@ ManualGCScope::ManualGCScope(i::Isolate* isolate) CcTest::CollectGarbage(i::OLD_SPACE, isolate); } - i::FLAG_concurrent_marking = false; - i::FLAG_concurrent_sweeping = false; - i::FLAG_stress_incremental_marking = false; - i::FLAG_stress_concurrent_allocation = false; + i::v8_flags.concurrent_marking = false; + i::v8_flags.concurrent_sweeping = false; + i::v8_flags.concurrent_minor_mc_marking = false; + i::v8_flags.concurrent_minor_mc_sweeping = false; + i::v8_flags.stress_incremental_marking = false; + i::v8_flags.stress_concurrent_allocation = false; // Parallel marking has a dependency on concurrent marking. - i::FLAG_parallel_marking = false; - i::FLAG_detect_ineffective_gcs_near_heap_limit = false; + i::v8_flags.parallel_marking = false; + i::v8_flags.detect_ineffective_gcs_near_heap_limit = false; } ManualGCScope::~ManualGCScope() { - i::FLAG_concurrent_marking = flag_concurrent_marking_; - i::FLAG_concurrent_sweeping = flag_concurrent_sweeping_; - i::FLAG_stress_concurrent_allocation = flag_stress_concurrent_allocation_; - i::FLAG_stress_incremental_marking = flag_stress_incremental_marking_; - i::FLAG_parallel_marking = flag_parallel_marking_; - i::FLAG_detect_ineffective_gcs_near_heap_limit = + i::v8_flags.concurrent_marking = flag_concurrent_marking_; + i::v8_flags.concurrent_sweeping = flag_concurrent_sweeping_; + i::v8_flags.concurrent_minor_mc_marking = flag_concurrent_minor_mc_marking_; + i::v8_flags.concurrent_minor_mc_sweeping = flag_concurrent_minor_mc_sweeping_; + i::v8_flags.stress_concurrent_allocation = flag_stress_concurrent_allocation_; + i::v8_flags.stress_incremental_marking = flag_stress_incremental_marking_; + i::v8_flags.parallel_marking = flag_parallel_marking_; + i::v8_flags.detect_ineffective_gcs_near_heap_limit = flag_detect_ineffective_gcs_near_heap_limit_; } diff --git a/deps/v8/test/cctest/cctest.h b/deps/v8/test/cctest/cctest.h index dedfc479f33406..ceb2db40e28f49 100644 --- a/deps/v8/test/cctest/cctest.h +++ b/deps/v8/test/cctest/cctest.h @@ -705,6 +705,8 @@ class V8_NODISCARD ManualGCScope { private: const bool flag_concurrent_marking_; const bool flag_concurrent_sweeping_; + const bool flag_concurrent_minor_mc_marking_; + const bool flag_concurrent_minor_mc_sweeping_; const bool flag_stress_concurrent_allocation_; const bool flag_stress_incremental_marking_; const bool flag_parallel_marking_; diff --git a/deps/v8/test/cctest/cctest.status b/deps/v8/test/cctest/cctest.status index b225460be093bf..fabc7e75441db9 100644 --- a/deps/v8/test/cctest/cctest.status +++ b/deps/v8/test/cctest/cctest.status @@ -280,9 +280,6 @@ # BUG(v8:4642). 'test-lockers/LockAndUnlockDifferentIsolates': [PASS, NO_VARIANTS], - # BUG(v8:8744). - 'test-cpu-profiler/FunctionCallSample': [SKIP], - # BUG(10107): Failing flakily 'test-cpu-profiler/Inlining2': ['arch == ia32 and mode == debug', SKIP], 'test-cpu-profiler/CrossScriptInliningCallerLineNumbers': ['arch == ia32 and mode == debug', SKIP], @@ -558,7 +555,6 @@ 'test-debug-helper/GetObjectProperties': [SKIP], 'test-js-context-specialization/*': [SKIP], 'test-multiple-return/*': [SKIP], - 'test-run-bytecode-graph-builder/*': [SKIP], 'test-run-calls-to-external-references/*': [SKIP], 'test-run-intrinsics/*': [SKIP], diff --git a/deps/v8/test/cctest/compiler/function-tester.cc b/deps/v8/test/cctest/compiler/function-tester.cc index 220b4b6d434566..5d104467c005c5 100644 --- a/deps/v8/test/cctest/compiler/function-tester.cc +++ b/deps/v8/test/cctest/compiler/function-tester.cc @@ -23,7 +23,7 @@ namespace compiler { FunctionTester::FunctionTester(const char* source, uint32_t flags) : isolate(main_isolate()), canonical(isolate), - function((FLAG_allow_natives_syntax = true, NewFunction(source))), + function((v8_flags.allow_natives_syntax = true, NewFunction(source))), flags_(flags) { Compile(function); const uint32_t supported_flags = OptimizedCompilationInfo::kInlining; @@ -41,7 +41,7 @@ FunctionTester::FunctionTester(Graph* graph, int param_count) FunctionTester::FunctionTester(Handle<Code> code, int param_count) : isolate(main_isolate()), canonical(isolate), - function((FLAG_allow_natives_syntax = true, + function((v8_flags.allow_natives_syntax = true, NewFunction(BuildFunction(param_count).c_str()))), flags_(0) { CHECK(!code.is_null()); diff --git a/deps/v8/test/cctest/compiler/test-basic-block-profiler.cc b/deps/v8/test/cctest/compiler/test-basic-block-profiler.cc index 011bc1f11e1e1f..218944bbd10720 100644 --- a/deps/v8/test/cctest/compiler/test-basic-block-profiler.cc +++ b/deps/v8/test/cctest/compiler/test-basic-block-profiler.cc @@ -15,7 +15,7 @@ class BasicBlockProfilerTest : public RawMachineAssemblerTester<int32_t> { public: BasicBlockProfilerTest() : RawMachineAssemblerTester<int32_t>(MachineType::Int32()) { - FLAG_turbo_profiling = true; + v8_flags.turbo_profiling = true; } void ResetCounts() { diff --git a/deps/v8/test/cctest/compiler/test-calls-with-arraylike-or-spread.cc b/deps/v8/test/cctest/compiler/test-calls-with-arraylike-or-spread.cc index 7e056f4e6c1b26..dc9767865e5ae0 100644 --- a/deps/v8/test/cctest/compiler/test-calls-with-arraylike-or-spread.cc +++ b/deps/v8/test/cctest/compiler/test-calls-with-arraylike-or-spread.cc @@ -20,8 +20,8 @@ void CompileRunWithNodeObserver(const std::string& js_code, v8::Isolate* isolate = env->GetIsolate(); v8::HandleScope handle_scope(isolate); - FLAG_allow_natives_syntax = true; - FLAG_turbo_optimize_apply = true; + v8_flags.allow_natives_syntax = true; + v8_flags.turbo_optimize_apply = true; // Note: Make sure to not capture stack locations (e.g. `this`) here since // these lambdas are executed on another thread. @@ -146,8 +146,8 @@ TEST(ReduceCAPICallWithArrayLike) { v8::Isolate* isolate = env->GetIsolate(); v8::HandleScope scope(isolate); - FLAG_allow_natives_syntax = true; - FLAG_turbo_optimize_apply = true; + v8_flags.allow_natives_syntax = true; + v8_flags.turbo_optimize_apply = true; Local<v8::FunctionTemplate> sum = v8::FunctionTemplate::New(isolate, SumF); CHECK(env->Global() diff --git a/deps/v8/test/cctest/compiler/test-code-assembler.cc b/deps/v8/test/cctest/compiler/test-code-assembler.cc index 780851a0ecb914..d37ec78c34e1d7 100644 --- a/deps/v8/test/cctest/compiler/test-code-assembler.cc +++ b/deps/v8/test/cctest/compiler/test-code-assembler.cc @@ -468,7 +468,7 @@ TEST(ExceptionHandler) { TEST(TestCodeAssemblerCodeComment) { #ifdef V8_CODE_COMMENTS - i::FLAG_code_comments = true; + i::v8_flags.code_comments = true; Isolate* isolate(CcTest::InitIsolateOnce()); const int kNumParams = 0; CodeAssemblerTester asm_tester(isolate, kNumParams); diff --git a/deps/v8/test/cctest/compiler/test-code-generator.cc b/deps/v8/test/cctest/compiler/test-code-generator.cc index e686d2a1a58759..26ca83a475c4ef 100644 --- a/deps/v8/test/cctest/compiler/test-code-generator.cc +++ b/deps/v8/test/cctest/compiler/test-code-generator.cc @@ -693,13 +693,13 @@ class TestEnvironment : public HandleAndZoneScope { // The "setup" and "teardown" functions are relatively big, and with // runtime assertions enabled they get so big that memory during register // allocation becomes a problem. Temporarily disable such assertions. - bool old_enable_slow_asserts = FLAG_enable_slow_asserts; - FLAG_enable_slow_asserts = false; + bool old_enable_slow_asserts = v8_flags.enable_slow_asserts; + v8_flags.enable_slow_asserts = false; #endif Handle<Code> setup = BuildSetupFunction(main_isolate(), test_descriptor_, layout_); #ifdef ENABLE_SLOW_DCHECKS - FLAG_enable_slow_asserts = old_enable_slow_asserts; + v8_flags.enable_slow_asserts = old_enable_slow_asserts; #endif // FunctionTester maintains its own HandleScope which means that its // return value will be freed along with it. Copy the result into @@ -1270,7 +1270,7 @@ TEST(FuzzAssembleMove) { } Handle<Code> test = c.FinalizeForExecuting(); - if (FLAG_print_code) { + if (v8_flags.print_code) { test->Print(); } @@ -1309,7 +1309,7 @@ TEST(FuzzAssembleParallelMove) { } Handle<Code> test = c.FinalizeForExecuting(); - if (FLAG_print_code) { + if (v8_flags.print_code) { test->Print(); } @@ -1335,7 +1335,7 @@ TEST(FuzzAssembleSwap) { } Handle<Code> test = c.FinalizeForExecuting(); - if (FLAG_print_code) { + if (v8_flags.print_code) { test->Print(); } @@ -1373,7 +1373,7 @@ TEST(FuzzAssembleMoveAndSwap) { } Handle<Code> test = c.FinalizeForExecuting(); - if (FLAG_print_code) { + if (v8_flags.print_code) { test->Print(); } @@ -1454,7 +1454,7 @@ TEST(AssembleTailCallGap) { c.CheckAssembleTailCallGaps(instr, first_slot + 4, CodeGeneratorTester::kRegisterPush); Handle<Code> code = c.Finalize(); - if (FLAG_print_code) { + if (v8_flags.print_code) { code->Print(); } } @@ -1483,7 +1483,7 @@ TEST(AssembleTailCallGap) { c.CheckAssembleTailCallGaps(instr, first_slot + 4, CodeGeneratorTester::kStackSlotPush); Handle<Code> code = c.Finalize(); - if (FLAG_print_code) { + if (v8_flags.print_code) { code->Print(); } } @@ -1512,7 +1512,7 @@ TEST(AssembleTailCallGap) { c.CheckAssembleTailCallGaps(instr, first_slot + 4, CodeGeneratorTester::kScalarPush); Handle<Code> code = c.Finalize(); - if (FLAG_print_code) { + if (v8_flags.print_code) { code->Print(); } } diff --git a/deps/v8/test/cctest/compiler/test-concurrent-shared-function-info.cc b/deps/v8/test/cctest/compiler/test-concurrent-shared-function-info.cc index 1dfa2dbab758aa..d9aabbe0c96a11 100644 --- a/deps/v8/test/cctest/compiler/test-concurrent-shared-function-info.cc +++ b/deps/v8/test/cctest/compiler/test-concurrent-shared-function-info.cc @@ -91,7 +91,7 @@ class BackgroundCompilationThread final : public v8::base::Thread { }; TEST(TestConcurrentSharedFunctionInfo) { - FlagScope<bool> allow_natives_syntax(&i::FLAG_allow_natives_syntax, true); + FlagScope<bool> allow_natives_syntax(&i::v8_flags.allow_natives_syntax, true); HandleAndZoneScope scope; Isolate* isolate = scope.main_isolate(); diff --git a/deps/v8/test/cctest/compiler/test-jump-threading.cc b/deps/v8/test/cctest/compiler/test-jump-threading.cc index 64c8db7d0b2cbf..6ca5985a6413b1 100644 --- a/deps/v8/test/cctest/compiler/test-jump-threading.cc +++ b/deps/v8/test/cctest/compiler/test-jump-threading.cc @@ -82,6 +82,21 @@ class TestCode : public HandleAndZoneScope { AllocatedOperand(LocationOperand::REGISTER, MachineRepresentation::kWord32, 11)); } + int JumpWithGapMove(int target, int id = 10) { + Start(); + InstructionOperand ops[] = {UseRpo(target)}; + sequence_.AddInstruction(Instruction::New(main_zone(), kArchJmp, 0, nullptr, + 1, ops, 0, nullptr)); + int index = static_cast<int>(sequence_.instructions().size()) - 1; + InstructionOperand from = AllocatedOperand( + LocationOperand::REGISTER, MachineRepresentation::kWord32, id); + InstructionOperand to = AllocatedOperand( + LocationOperand::REGISTER, MachineRepresentation::kWord32, id + 1); + AddGapMove(index, from, to); + End(); + return index; + } + void Other() { Start(); sequence_.AddInstruction(Instruction::New(main_zone(), 155)); @@ -228,6 +243,45 @@ TEST(FwMoves2b) { VerifyForwarding(&code, kBlockCount, expected); } +TEST(FwMoves3a) { + constexpr size_t kBlockCount = 4; + TestCode code(kBlockCount); + + // B0 + code.JumpWithGapMove(3, 10); + // B1 (merge B1 into B0, because they have the same gap moves.) + code.JumpWithGapMove(3, 10); + // B2 (can not merge B2 into B0, because they have different gap moves.) + code.JumpWithGapMove(3, 11); + // B3 + code.End(); + + static int expected[] = {0, 0, 2, 3}; + VerifyForwarding(&code, kBlockCount, expected); +} + +TEST(FwMoves3b) { + constexpr size_t kBlockCount = 7; + TestCode code(kBlockCount); + + // B0 + code.JumpWithGapMove(6); + // B1 + code.Jump(2); + // B2 + code.Jump(3); + // B3 + code.JumpWithGapMove(6); + // B4 + code.Jump(3); + // B5 + code.Jump(2); + // B6 + code.End(); + + static int expected[] = {0, 0, 0, 0, 0, 0, 6}; + VerifyForwarding(&code, kBlockCount, expected); +} TEST(FwOther2) { constexpr size_t kBlockCount = 2; @@ -463,6 +517,35 @@ TEST(FwLoop3_1a) { VerifyForwarding(&code, kBlockCount, expected); } +TEST(FwLoop4a) { + constexpr size_t kBlockCount = 2; + TestCode code(kBlockCount); + + // B0 + code.JumpWithGapMove(1); + // B1 + code.JumpWithGapMove(0); + + static int expected[] = {0, 1}; + VerifyForwarding(&code, kBlockCount, expected); +} + +TEST(FwLoop4b) { + constexpr size_t kBlockCount = 4; + TestCode code(kBlockCount); + + // B0 + code.Jump(3); + // B1 + code.JumpWithGapMove(2); + // B2 + code.Jump(0); + // B3 + code.JumpWithGapMove(2); + + static int expected[] = {3, 3, 3, 3}; + VerifyForwarding(&code, kBlockCount, expected); +} TEST(FwDiamonds) { constexpr size_t kBlockCount = 4; @@ -925,6 +1008,61 @@ TEST(DifferentSizeRet) { CheckRet(&code, j2); } +TEST(RewireGapJump1) { + constexpr size_t kBlockCount = 4; + TestCode code(kBlockCount); + + // B0 + int j1 = code.JumpWithGapMove(3); + // B1 + int j2 = code.JumpWithGapMove(3); + // B2 + int j3 = code.JumpWithGapMove(3); + // B3 + code.End(); + + int forward[] = {0, 0, 0, 3}; + VerifyForwarding(&code, kBlockCount, forward); + ApplyForwarding(&code, kBlockCount, forward); + CheckJump(&code, j1, 3); + CheckNop(&code, j2); + CheckNop(&code, j3); + + static int assembly[] = {0, 1, 1, 1}; + CheckAssemblyOrder(&code, kBlockCount, assembly); +} + +TEST(RewireGapJump2) { + constexpr size_t kBlockCount = 6; + TestCode code(kBlockCount); + + // B0 + int j1 = code.JumpWithGapMove(4); + // B1 + int j2 = code.JumpWithGapMove(4); + // B2 + code.Other(); + int j3 = code.Jump(3); + // B3 + int j4 = code.Jump(1); + // B4 + int j5 = code.Jump(5); + // B5 + code.End(); + + int forward[] = {0, 0, 2, 0, 5, 5}; + VerifyForwarding(&code, kBlockCount, forward); + ApplyForwarding(&code, kBlockCount, forward); + CheckJump(&code, j1, 5); + CheckNop(&code, j2); + CheckJump(&code, j3, 0); + CheckNop(&code, j4); + CheckNop(&code, j5); + + static int assembly[] = {0, 1, 1, 2, 2, 2}; + CheckAssemblyOrder(&code, kBlockCount, assembly); +} + } // namespace compiler } // namespace internal } // namespace v8 diff --git a/deps/v8/test/cctest/compiler/test-loop-analysis.cc b/deps/v8/test/cctest/compiler/test-loop-analysis.cc index fe5009f231cc08..ede4da40c0f4bd 100644 --- a/deps/v8/test/cctest/compiler/test-loop-analysis.cc +++ b/deps/v8/test/cctest/compiler/test-loop-analysis.cc @@ -126,7 +126,7 @@ class LoopFinderTester : HandleAndZoneScope { LoopTree* GetLoopTree() { if (loop_tree == nullptr) { - if (FLAG_trace_turbo_graph) { + if (v8_flags.trace_turbo_graph) { StdoutStream{} << AsRPO(graph); } Zone zone(main_isolate()->allocator(), ZONE_NAME); diff --git a/deps/v8/test/cctest/compiler/test-multiple-return.cc b/deps/v8/test/cctest/compiler/test-multiple-return.cc index c9884226835477..c21ddff33f59a1 100644 --- a/deps/v8/test/cctest/compiler/test-multiple-return.cc +++ b/deps/v8/test/cctest/compiler/test-multiple-return.cc @@ -169,7 +169,7 @@ void TestReturnMultipleValues(MachineType type, int min_count, int max_count) { m.ExportForTest()) .ToHandleChecked(); #ifdef ENABLE_DISASSEMBLER - if (FLAG_print_code) { + if (v8_flags.print_code) { StdoutStream os; code->Disassemble("multi_value", os, handles.main_isolate()); } @@ -217,7 +217,7 @@ void TestReturnMultipleValues(MachineType type, int min_count, int max_count) { mt.Return(ToInt32(&mt, type, ret)); #ifdef ENABLE_DISASSEMBLER Handle<Code> code2 = mt.GetCode(); - if (FLAG_print_code) { + if (v8_flags.print_code) { StdoutStream os; code2->Disassemble("multi_value_call", os, handles.main_isolate()); } diff --git a/deps/v8/test/cctest/compiler/test-run-native-calls.cc b/deps/v8/test/cctest/compiler/test-run-native-calls.cc index 126bdc4c1fee89..cf55b622e9877c 100644 --- a/deps/v8/test/cctest/compiler/test-run-native-calls.cc +++ b/deps/v8/test/cctest/compiler/test-run-native-calls.cc @@ -251,7 +251,7 @@ Handle<CodeT> CompileGraph(const char* name, CallDescriptor* call_descriptor, AssemblerOptions::Default(isolate), schedule) .ToHandleChecked(); #ifdef ENABLE_DISASSEMBLER - if (FLAG_print_opt_code) { + if (v8_flags.print_opt_code) { StdoutStream os; code->Disassemble(name, os, isolate); } diff --git a/deps/v8/test/cctest/compiler/test-run-unwinding-info.cc b/deps/v8/test/cctest/compiler/test-run-unwinding-info.cc index c4f65b4a4bcb4e..b5bc43db436847 100644 --- a/deps/v8/test/cctest/compiler/test-run-unwinding-info.cc +++ b/deps/v8/test/cctest/compiler/test-run-unwinding-info.cc @@ -17,8 +17,8 @@ namespace internal { namespace compiler { TEST(RunUnwindingInfo) { - FLAG_always_turbofan = true; - FLAG_perf_prof_unwinding_info = true; + v8_flags.always_turbofan = true; + v8_flags.perf_prof_unwinding_info = true; FunctionTester tester( "(function (x) {\n" diff --git a/deps/v8/test/cctest/compiler/test-verify-type.cc b/deps/v8/test/cctest/compiler/test-verify-type.cc index 8a3ea077ddfbe9..ffc69d5514083c 100644 --- a/deps/v8/test/cctest/compiler/test-verify-type.cc +++ b/deps/v8/test/cctest/compiler/test-verify-type.cc @@ -12,7 +12,7 @@ namespace internal { namespace compiler { TEST(TestVerifyType) { - FlagScope<bool> allow_natives_syntax(&i::FLAG_allow_natives_syntax, true); + FlagScope<bool> allow_natives_syntax(&i::v8_flags.allow_natives_syntax, true); HandleAndZoneScope handle_scope; Isolate* isolate = handle_scope.main_isolate(); Zone* zone = handle_scope.main_zone(); diff --git a/deps/v8/test/cctest/heap/heap-utils.cc b/deps/v8/test/cctest/heap/heap-utils.cc index 13f8daf5e9eae3..b390552c42b90b 100644 --- a/deps/v8/test/cctest/heap/heap-utils.cc +++ b/deps/v8/test/cctest/heap/heap-utils.cc @@ -37,8 +37,7 @@ void SealCurrentObjects(Heap* heap) { CHECK(!v8_flags.stress_concurrent_allocation); CcTest::CollectAllGarbage(); CcTest::CollectAllGarbage(); - heap->mark_compact_collector()->EnsureSweepingCompleted( - MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only); + heap->EnsureSweepingCompleted(Heap::SweepingForcedFinalizationMode::kV8Only); heap->old_space()->FreeLinearAllocationArea(); for (Page* page : *heap->old_space()) { page->MarkNeverAllocateForTesting(); @@ -260,12 +259,11 @@ void SimulateIncrementalMarking(i::Heap* heap, bool force_completion) { const double kStepSizeInMs = 100; CHECK(v8_flags.incremental_marking); i::IncrementalMarking* marking = heap->incremental_marking(); - i::MarkCompactCollector* collector = heap->mark_compact_collector(); - if (collector->sweeping_in_progress()) { + if (heap->sweeping_in_progress()) { SafepointScope scope(heap); - collector->EnsureSweepingCompleted( - MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only); + heap->EnsureSweepingCompleted( + Heap::SweepingForcedFinalizationMode::kV8Only); } if (marking->IsMinorMarking()) { @@ -297,10 +295,9 @@ void SimulateFullSpace(v8::internal::PagedSpace* space) { // Background thread allocating concurrently interferes with this function. CHECK(!v8_flags.stress_concurrent_allocation); CodePageCollectionMemoryModificationScopeForTesting code_scope(space->heap()); - i::MarkCompactCollector* collector = space->heap()->mark_compact_collector(); - if (collector->sweeping_in_progress()) { - collector->EnsureSweepingCompleted( - MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only); + if (space->heap()->sweeping_in_progress()) { + space->heap()->EnsureSweepingCompleted( + Heap::SweepingForcedFinalizationMode::kV8Only); } space->FreeLinearAllocationArea(); space->ResetFreeList(); @@ -315,10 +312,10 @@ void AbandonCurrentlyFreeMemory(PagedSpace* space) { void GcAndSweep(Heap* heap, AllocationSpace space) { heap->CollectGarbage(space, GarbageCollectionReason::kTesting); - if (heap->mark_compact_collector()->sweeping_in_progress()) { + if (heap->sweeping_in_progress()) { SafepointScope scope(heap); - heap->mark_compact_collector()->EnsureSweepingCompleted( - MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only); + heap->EnsureSweepingCompleted( + Heap::SweepingForcedFinalizationMode::kV8Only); } } diff --git a/deps/v8/test/cctest/heap/test-compaction.cc b/deps/v8/test/cctest/heap/test-compaction.cc index 18d32ba6d91394..a0bdbe9f716f05 100644 --- a/deps/v8/test/cctest/heap/test-compaction.cc +++ b/deps/v8/test/cctest/heap/test-compaction.cc @@ -6,6 +6,7 @@ #include "src/heap/factory.h" #include "src/heap/heap-inl.h" #include "src/heap/mark-compact.h" +#include "src/heap/marking-state-inl.h" #include "src/heap/memory-chunk.h" #include "src/heap/remembered-set-inl.h" #include "src/objects/objects-inl.h" @@ -24,11 +25,7 @@ void CheckInvariantsOfAbortedPage(Page* page) { // 1) Markbits are cleared // 2) The page is not marked as evacuation candidate anymore // 3) The page is not marked as aborted compaction anymore. - CHECK(page->heap() - ->mark_compact_collector() - ->non_atomic_marking_state() - ->bitmap(page) - ->IsClean()); + CHECK(page->heap()->non_atomic_marking_state()->bitmap(page)->IsClean()); CHECK(!page->IsEvacuationCandidate()); CHECK(!page->IsFlagSet(Page::COMPACTION_WAS_ABORTED)); } @@ -79,8 +76,8 @@ HEAP_TEST(CompactionFullAbortedPage) { heap->set_force_oom(true); CcTest::CollectAllGarbage(); - heap->mark_compact_collector()->EnsureSweepingCompleted( - MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only); + heap->EnsureSweepingCompleted( + Heap::SweepingForcedFinalizationMode::kV8Only); // Check that all handles still point to the same page, i.e., compaction // has been aborted on the page. @@ -161,8 +158,8 @@ HEAP_TEST(CompactionPartiallyAbortedPage) { heap->set_force_oom(true); CcTest::CollectAllGarbage(); - heap->mark_compact_collector()->EnsureSweepingCompleted( - MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only); + heap->EnsureSweepingCompleted( + Heap::SweepingForcedFinalizationMode::kV8Only); bool migration_aborted = false; for (Handle<FixedArray> object : compaction_page_handles) { @@ -260,8 +257,8 @@ HEAP_TEST(CompactionPartiallyAbortedPageWithInvalidatedSlots) { heap->set_force_oom(true); CcTest::CollectAllGarbage(); - heap->mark_compact_collector()->EnsureSweepingCompleted( - MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only); + heap->EnsureSweepingCompleted( + Heap::SweepingForcedFinalizationMode::kV8Only); CHECK_EQ(Page::FromHeapObject(*compaction_page_handles.front()), page_to_fill); @@ -339,8 +336,8 @@ HEAP_TEST(CompactionPartiallyAbortedPageIntraAbortedPointers) { heap->set_force_oom(true); CcTest::CollectAllGarbage(); - heap->mark_compact_collector()->EnsureSweepingCompleted( - MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only); + heap->EnsureSweepingCompleted( + Heap::SweepingForcedFinalizationMode::kV8Only); // The following check makes sure that we compacted "some" objects, while // leaving others in place. @@ -441,8 +438,8 @@ HEAP_TEST(CompactionPartiallyAbortedPageWithRememberedSetEntries) { heap->set_force_oom(true); CcTest::CollectAllGarbage(); - heap->mark_compact_collector()->EnsureSweepingCompleted( - MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only); + heap->EnsureSweepingCompleted( + Heap::SweepingForcedFinalizationMode::kV8Only); // The following check makes sure that we compacted "some" objects, while // leaving others in place. diff --git a/deps/v8/test/cctest/heap/test-concurrent-allocation.cc b/deps/v8/test/cctest/heap/test-concurrent-allocation.cc index 007b92a9dbae5b..313309e73c6bed 100644 --- a/deps/v8/test/cctest/heap/test-concurrent-allocation.cc +++ b/deps/v8/test/cctest/heap/test-concurrent-allocation.cc @@ -20,6 +20,7 @@ #include "src/heap/concurrent-allocator-inl.h" #include "src/heap/heap.h" #include "src/heap/local-heap-inl.h" +#include "src/heap/marking-state-inl.h" #include "src/heap/parked-scope.h" #include "src/heap/safepoint.h" #include "src/objects/heap-number.h" @@ -250,7 +251,7 @@ class LargeObjectConcurrentAllocationThread final : public v8::base::Thread { kLargeObjectSize, AllocationType::kOld, AllocationOrigin::kRuntime, AllocationAlignment::kTaggedAligned); if (result.IsFailure()) { - local_heap.TryPerformCollection(); + heap_->CollectGarbageFromAnyThread(&local_heap); } else { Address address = result.ToAddress(); CreateFixedArray(heap_, address, kLargeObjectSize); @@ -371,9 +372,9 @@ UNINITIALIZED_TEST(ConcurrentBlackAllocation) { HeapObject object = HeapObject::FromAddress(address); if (i < kWhiteIterations * kObjectsAllocatedPerIteration) { - CHECK(heap->incremental_marking()->marking_state()->IsWhite(object)); + CHECK(heap->marking_state()->IsWhite(object)); } else { - CHECK(heap->incremental_marking()->marking_state()->IsBlack(object)); + CHECK(heap->marking_state()->IsBlack(object)); } } @@ -427,7 +428,7 @@ UNINITIALIZED_TEST(ConcurrentWriteBarrier) { } heap->StartIncrementalMarking(i::Heap::kNoGCFlags, i::GarbageCollectionReason::kTesting); - CHECK(heap->incremental_marking()->marking_state()->IsWhite(value)); + CHECK(heap->marking_state()->IsWhite(value)); auto thread = std::make_unique<ConcurrentWriteBarrierThread>(heap, fixed_array, value); @@ -435,7 +436,7 @@ UNINITIALIZED_TEST(ConcurrentWriteBarrier) { thread->Join(); - CHECK(heap->incremental_marking()->marking_state()->IsBlackOrGrey(value)); + CHECK(heap->marking_state()->IsBlackOrGrey(value)); heap::InvokeMarkSweep(i_isolate); isolate->Dispose(); @@ -513,7 +514,7 @@ UNINITIALIZED_TEST(ConcurrentRecordRelocSlot) { } heap->StartIncrementalMarking(i::Heap::kNoGCFlags, i::GarbageCollectionReason::kTesting); - CHECK(heap->incremental_marking()->marking_state()->IsWhite(value)); + CHECK(heap->marking_state()->IsWhite(value)); { // TODO(v8:13023): remove ResetPKUPermissionsForThreadSpawning in the @@ -527,7 +528,7 @@ UNINITIALIZED_TEST(ConcurrentRecordRelocSlot) { thread->Join(); } - CHECK(heap->incremental_marking()->marking_state()->IsBlackOrGrey(value)); + CHECK(heap->marking_state()->IsBlackOrGrey(value)); heap::InvokeMarkSweep(i_isolate); } isolate->Dispose(); diff --git a/deps/v8/test/cctest/heap/test-concurrent-marking.cc b/deps/v8/test/cctest/heap/test-concurrent-marking.cc index 2ad4d4a696bc99..e57fa68f37e6a1 100644 --- a/deps/v8/test/cctest/heap/test-concurrent-marking.cc +++ b/deps/v8/test/cctest/heap/test-concurrent-marking.cc @@ -32,15 +32,15 @@ TEST(ConcurrentMarking) { Heap* heap = CcTest::heap(); CcTest::CollectAllGarbage(); if (!heap->incremental_marking()->IsStopped()) return; - MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector(); - if (collector->sweeping_in_progress()) { - collector->EnsureSweepingCompleted( - MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only); + if (heap->sweeping_in_progress()) { + heap->EnsureSweepingCompleted( + Heap::SweepingForcedFinalizationMode::kV8Only); } WeakObjects weak_objects; ConcurrentMarking* concurrent_marking = new ConcurrentMarking(heap, &weak_objects); + MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector(); PublishSegment(*collector->marking_worklists()->shared(), ReadOnlyRoots(heap).undefined_value()); concurrent_marking->ScheduleJob(GarbageCollector::MARK_COMPACTOR); @@ -54,15 +54,15 @@ TEST(ConcurrentMarkingReschedule) { Heap* heap = CcTest::heap(); CcTest::CollectAllGarbage(); if (!heap->incremental_marking()->IsStopped()) return; - MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector(); - if (collector->sweeping_in_progress()) { - collector->EnsureSweepingCompleted( - MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only); + if (heap->sweeping_in_progress()) { + heap->EnsureSweepingCompleted( + Heap::SweepingForcedFinalizationMode::kV8Only); } WeakObjects weak_objects; ConcurrentMarking* concurrent_marking = new ConcurrentMarking(heap, &weak_objects); + MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector(); PublishSegment(*collector->marking_worklists()->shared(), ReadOnlyRoots(heap).undefined_value()); concurrent_marking->ScheduleJob(GarbageCollector::MARK_COMPACTOR); @@ -80,15 +80,15 @@ TEST(ConcurrentMarkingPreemptAndReschedule) { Heap* heap = CcTest::heap(); CcTest::CollectAllGarbage(); if (!heap->incremental_marking()->IsStopped()) return; - MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector(); - if (collector->sweeping_in_progress()) { - collector->EnsureSweepingCompleted( - MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only); + if (heap->sweeping_in_progress()) { + heap->EnsureSweepingCompleted( + Heap::SweepingForcedFinalizationMode::kV8Only); } WeakObjects weak_objects; ConcurrentMarking* concurrent_marking = new ConcurrentMarking(heap, &weak_objects); + MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector(); for (int i = 0; i < 5000; i++) PublishSegment(*collector->marking_worklists()->shared(), ReadOnlyRoots(heap).undefined_value()); diff --git a/deps/v8/test/cctest/heap/test-heap.cc b/deps/v8/test/cctest/heap/test-heap.cc index 90b655fdc79c26..a4535b2abf56a3 100644 --- a/deps/v8/test/cctest/heap/test-heap.cc +++ b/deps/v8/test/cctest/heap/test-heap.cc @@ -40,6 +40,7 @@ #include "src/debug/debug.h" #include "src/deoptimizer/deoptimizer.h" #include "src/execution/execution.h" +#include "src/flags/flags.h" #include "src/handles/global-handles-inl.h" #include "src/heap/combined-heap.h" #include "src/heap/factory.h" @@ -50,6 +51,7 @@ #include "src/heap/large-spaces.h" #include "src/heap/mark-compact.h" #include "src/heap/marking-barrier.h" +#include "src/heap/marking-state-inl.h" #include "src/heap/memory-chunk.h" #include "src/heap/memory-reducer.h" #include "src/heap/parked-scope.h" @@ -1874,6 +1876,7 @@ TEST(TestSizeOfRegExpCode) { v8_flags.stress_concurrent_allocation = false; Isolate* isolate = CcTest::i_isolate(); + Heap* heap = CcTest::heap(); HandleScope scope(isolate); LocalContext context; @@ -1896,21 +1899,19 @@ TEST(TestSizeOfRegExpCode) { // Get initial heap size after several full GCs, which will stabilize // the heap size and return with sweeping finished completely. CcTest::CollectAllAvailableGarbage(); - MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector(); - if (collector->sweeping_in_progress()) { - collector->EnsureSweepingCompleted( - MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only); + if (heap->sweeping_in_progress()) { + heap->EnsureSweepingCompleted( + Heap::SweepingForcedFinalizationMode::kV8Only); } - int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects()); + int initial_size = static_cast<int>(heap->SizeOfObjects()); CompileRun("'foo'.match(reg_exp_source);"); CcTest::CollectAllAvailableGarbage(); - int size_with_regexp = static_cast<int>(CcTest::heap()->SizeOfObjects()); + int size_with_regexp = static_cast<int>(heap->SizeOfObjects()); CompileRun("'foo'.match(half_size_reg_exp);"); CcTest::CollectAllAvailableGarbage(); - int size_with_optimized_regexp = - static_cast<int>(CcTest::heap()->SizeOfObjects()); + int size_with_optimized_regexp = static_cast<int>(heap->SizeOfObjects()); int size_of_regexp_code = size_with_regexp - initial_size; @@ -1934,14 +1935,13 @@ HEAP_TEST(TestSizeOfObjects) { // Disable LAB, such that calculations with SizeOfObjects() and object size // are correct. heap->DisableInlineAllocation(); - MarkCompactCollector* collector = heap->mark_compact_collector(); // Get initial heap size after several full GCs, which will stabilize // the heap size and return with sweeping finished completely. CcTest::CollectAllAvailableGarbage(); - if (collector->sweeping_in_progress()) { - collector->EnsureSweepingCompleted( - MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only); + if (heap->sweeping_in_progress()) { + heap->EnsureSweepingCompleted( + Heap::SweepingForcedFinalizationMode::kV8Only); } int initial_size = static_cast<int>(heap->SizeOfObjects()); @@ -1965,9 +1965,9 @@ HEAP_TEST(TestSizeOfObjects) { // Normally sweeping would not be complete here, but no guarantees. CHECK_EQ(initial_size, static_cast<int>(heap->SizeOfObjects())); // Waiting for sweeper threads should not change heap size. - if (collector->sweeping_in_progress()) { - collector->EnsureSweepingCompleted( - MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only); + if (heap->sweeping_in_progress()) { + heap->EnsureSweepingCompleted( + Heap::SweepingForcedFinalizationMode::kV8Only); } CHECK_EQ(initial_size, static_cast<int>(heap->SizeOfObjects())); } @@ -2481,7 +2481,7 @@ TEST(InstanceOfStubWriteBarrier) { CHECK(f->HasAttachedOptimizedCode()); - MarkingState* marking_state = marking->marking_state(); + MarkingState* marking_state = CcTest::heap()->marking_state(); const double kStepSizeInMs = 100; while (!marking_state->IsBlack(f->code())) { @@ -2517,10 +2517,9 @@ HEAP_TEST(GCFlags) { GarbageCollectionReason::kTesting); CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_); - MarkCompactCollector* collector = heap->mark_compact_collector(); - if (collector->sweeping_in_progress()) { - collector->EnsureSweepingCompleted( - MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only); + if (heap->sweeping_in_progress()) { + heap->EnsureSweepingCompleted( + Heap::SweepingForcedFinalizationMode::kV8Only); } IncrementalMarking* marking = heap->incremental_marking(); @@ -5576,8 +5575,7 @@ HEAP_TEST(Regress587004) { CcTest::CollectGarbage(OLD_SPACE); heap::SimulateFullSpace(heap->old_space()); heap->RightTrimFixedArray(*array, N - 1); - heap->mark_compact_collector()->EnsureSweepingCompleted( - MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only); + heap->EnsureSweepingCompleted(Heap::SweepingForcedFinalizationMode::kV8Only); ByteArray byte_array; const int M = 256; // Don't allow old space expansion. The test works without this flag too, @@ -5748,15 +5746,14 @@ TEST(Regress598319) { // GC to cleanup state CcTest::CollectGarbage(OLD_SPACE); - MarkCompactCollector* collector = heap->mark_compact_collector(); - if (collector->sweeping_in_progress()) { - collector->EnsureSweepingCompleted( - MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only); + if (heap->sweeping_in_progress()) { + heap->EnsureSweepingCompleted( + Heap::SweepingForcedFinalizationMode::kV8Only); } CHECK(heap->lo_space()->Contains(arr.get())); IncrementalMarking* marking = heap->incremental_marking(); - MarkingState* marking_state = marking->marking_state(); + MarkingState* marking_state = heap->marking_state(); CHECK(marking_state->IsWhite(arr.get())); for (int i = 0; i < arr.get().length(); i++) { HeapObject arr_value = HeapObject::cast(arr.get().get(i)); @@ -5820,8 +5817,7 @@ Handle<FixedArray> ShrinkArrayAndCheckSize(Heap* heap, int length) { for (int i = 0; i < 5; i++) { CcTest::CollectAllGarbage(); } - heap->mark_compact_collector()->EnsureSweepingCompleted( - MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only); + heap->EnsureSweepingCompleted(Heap::SweepingForcedFinalizationMode::kV8Only); // Disable LAB, such that calculations with SizeOfObjects() and object size // are correct. heap->DisableInlineAllocation(); @@ -5836,8 +5832,7 @@ Handle<FixedArray> ShrinkArrayAndCheckSize(Heap* heap, int length) { CHECK_EQ(size_after_allocation, size_after_shrinking); // GC and sweeping updates the size to acccount for shrinking. CcTest::CollectAllGarbage(); - heap->mark_compact_collector()->EnsureSweepingCompleted( - MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only); + heap->EnsureSweepingCompleted(Heap::SweepingForcedFinalizationMode::kV8Only); intptr_t size_after_gc = heap->SizeOfObjects(); CHECK_EQ(size_after_gc, size_before_allocation + array->Size()); return array; @@ -5871,11 +5866,10 @@ TEST(Regress615489) { Isolate* isolate = heap->isolate(); CcTest::CollectAllGarbage(); - i::MarkCompactCollector* collector = heap->mark_compact_collector(); i::IncrementalMarking* marking = heap->incremental_marking(); - if (collector->sweeping_in_progress()) { - collector->EnsureSweepingCompleted( - MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only); + if (heap->sweeping_in_progress()) { + heap->EnsureSweepingCompleted( + Heap::SweepingForcedFinalizationMode::kV8Only); } CHECK(marking->IsMarking() || marking->IsStopped()); if (marking->IsStopped()) { @@ -5971,11 +5965,10 @@ TEST(LeftTrimFixedArrayInBlackArea) { Isolate* isolate = heap->isolate(); CcTest::CollectAllGarbage(); - i::MarkCompactCollector* collector = heap->mark_compact_collector(); i::IncrementalMarking* marking = heap->incremental_marking(); - if (collector->sweeping_in_progress()) { - collector->EnsureSweepingCompleted( - MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only); + if (heap->sweeping_in_progress()) { + heap->EnsureSweepingCompleted( + Heap::SweepingForcedFinalizationMode::kV8Only); } CHECK(marking->IsMarking() || marking->IsStopped()); if (marking->IsStopped()) { @@ -5992,7 +5985,7 @@ TEST(LeftTrimFixedArrayInBlackArea) { Handle<FixedArray> array = isolate->factory()->NewFixedArray(50, AllocationType::kOld); CHECK(heap->old_space()->Contains(*array)); - MarkingState* marking_state = marking->marking_state(); + MarkingState* marking_state = heap->marking_state(); CHECK(marking_state->IsBlack(*array)); // Now left trim the allocated black area. A filler has to be installed @@ -6013,11 +6006,10 @@ TEST(ContinuousLeftTrimFixedArrayInBlackArea) { Isolate* isolate = heap->isolate(); CcTest::CollectAllGarbage(); - i::MarkCompactCollector* collector = heap->mark_compact_collector(); i::IncrementalMarking* marking = heap->incremental_marking(); - if (collector->sweeping_in_progress()) { - collector->EnsureSweepingCompleted( - MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only); + if (heap->sweeping_in_progress()) { + heap->EnsureSweepingCompleted( + Heap::SweepingForcedFinalizationMode::kV8Only); } CHECK(marking->IsMarking() || marking->IsStopped()); if (marking->IsStopped()) { @@ -6038,7 +6030,7 @@ TEST(ContinuousLeftTrimFixedArrayInBlackArea) { Address start_address = array->address(); Address end_address = start_address + array->Size(); Page* page = Page::FromAddress(start_address); - NonAtomicMarkingState* marking_state = marking->non_atomic_marking_state(); + NonAtomicMarkingState* marking_state = heap->non_atomic_marking_state(); CHECK(marking_state->IsBlack(*array)); CHECK(marking_state->bitmap(page)->AllBitsSetInRange( page->AddressToMarkbitIndex(start_address), @@ -6082,11 +6074,10 @@ TEST(ContinuousRightTrimFixedArrayInBlackArea) { Isolate* isolate = CcTest::i_isolate(); CcTest::CollectAllGarbage(); - i::MarkCompactCollector* collector = heap->mark_compact_collector(); i::IncrementalMarking* marking = heap->incremental_marking(); - if (collector->sweeping_in_progress()) { - collector->EnsureSweepingCompleted( - MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only); + if (heap->sweeping_in_progress()) { + heap->EnsureSweepingCompleted( + Heap::SweepingForcedFinalizationMode::kV8Only); } CHECK(marking->IsMarking() || marking->IsStopped()); if (marking->IsStopped()) { @@ -6103,13 +6094,12 @@ TEST(ContinuousRightTrimFixedArrayInBlackArea) { // Allocate the fixed array that will be trimmed later. Handle<FixedArray> array = - CcTest::i_isolate()->factory()->NewFixedArray(100, AllocationType::kOld); + isolate->factory()->NewFixedArray(100, AllocationType::kOld); Address start_address = array->address(); Address end_address = start_address + array->Size(); Page* page = Page::FromAddress(start_address); - NonAtomicMarkingState* marking_state = marking->non_atomic_marking_state(); + NonAtomicMarkingState* marking_state = heap->non_atomic_marking_state(); CHECK(marking_state->IsBlack(*array)); - CHECK(marking_state->bitmap(page)->AllBitsSetInRange( page->AddressToMarkbitIndex(start_address), page->AddressToMarkbitIndex(end_address))); @@ -6501,12 +6491,11 @@ HEAP_TEST(Regress670675) { v8::HandleScope scope(CcTest::isolate()); Heap* heap = CcTest::heap(); Isolate* isolate = heap->isolate(); - i::MarkCompactCollector* collector = heap->mark_compact_collector(); CcTest::CollectAllGarbage(); - if (collector->sweeping_in_progress()) { - collector->EnsureSweepingCompleted( - MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only); + if (heap->sweeping_in_progress()) { + heap->EnsureSweepingCompleted( + Heap::SweepingForcedFinalizationMode::kV8Only); } heap->tracer()->StopFullCycleIfNeeded(); i::IncrementalMarking* marking = CcTest::heap()->incremental_marking(); @@ -6558,10 +6547,9 @@ HEAP_TEST(RegressMissingWriteBarrierInAllocate) { // then the map is white and will be freed prematurely. heap::SimulateIncrementalMarking(heap, true); CcTest::CollectAllGarbage(); - MarkCompactCollector* collector = heap->mark_compact_collector(); - if (collector->sweeping_in_progress()) { - collector->EnsureSweepingCompleted( - MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only); + if (heap->sweeping_in_progress()) { + heap->EnsureSweepingCompleted( + Heap::SweepingForcedFinalizationMode::kV8Only); } CHECK(object->map().IsMap()); } @@ -6906,7 +6894,7 @@ UNINITIALIZED_TEST(RestoreHeapLimit) { } void HeapTester::UncommitUnusedMemory(Heap* heap) { - heap->new_space()->Shrink(); + if (!v8_flags.minor_mc) heap->new_space()->Shrink(); heap->memory_allocator()->unmapper()->EnsureUnmappingCompleted(); } diff --git a/deps/v8/test/cctest/heap/test-mark-compact.cc b/deps/v8/test/cctest/heap/test-mark-compact.cc index 93ef52c45f5ad2..6047eae158e5ae 100644 --- a/deps/v8/test/cctest/heap/test-mark-compact.cc +++ b/deps/v8/test/cctest/heap/test-mark-compact.cc @@ -205,8 +205,7 @@ HEAP_TEST(DoNotEvacuatePinnedPages) { page->SetFlag(MemoryChunk::PINNED); CcTest::CollectAllGarbage(); - heap->mark_compact_collector()->EnsureSweepingCompleted( - MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only); + heap->EnsureSweepingCompleted(Heap::SweepingForcedFinalizationMode::kV8Only); // The pinned flag should prevent the page from moving. for (Handle<FixedArray> object : handles) { @@ -216,8 +215,7 @@ HEAP_TEST(DoNotEvacuatePinnedPages) { page->ClearFlag(MemoryChunk::PINNED); CcTest::CollectAllGarbage(); - heap->mark_compact_collector()->EnsureSweepingCompleted( - MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only); + heap->EnsureSweepingCompleted(Heap::SweepingForcedFinalizationMode::kV8Only); // `compact_on_every_full_gc` ensures that this page is an evacuation // candidate, so with the pin flag cleared compaction should now move it. @@ -450,11 +448,10 @@ TEST(Regress5829) { v8::HandleScope sc(CcTest::isolate()); Heap* heap = isolate->heap(); heap::SealCurrentObjects(heap); - i::MarkCompactCollector* collector = heap->mark_compact_collector(); i::IncrementalMarking* marking = heap->incremental_marking(); - if (collector->sweeping_in_progress()) { - collector->EnsureSweepingCompleted( - MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only); + if (heap->sweeping_in_progress()) { + heap->EnsureSweepingCompleted( + Heap::SweepingForcedFinalizationMode::kV8Only); } CHECK(marking->IsMarking() || marking->IsStopped()); if (marking->IsStopped()) { @@ -471,7 +468,7 @@ TEST(Regress5829) { heap->CreateFillerObjectAt(old_end - kTaggedSize, kTaggedSize); heap->old_space()->FreeLinearAllocationArea(); Page* page = Page::FromAddress(array->address()); - MarkingState* marking_state = marking->marking_state(); + MarkingState* marking_state = heap->marking_state(); for (auto object_and_size : LiveObjectRange<kGreyObjects>(page, marking_state->bitmap(page))) { CHECK(!object_and_size.first.IsFreeSpaceOrFiller()); diff --git a/deps/v8/test/cctest/heap/test-spaces.cc b/deps/v8/test/cctest/heap/test-spaces.cc index d70d1787f9c0dd..bfa96eee84399d 100644 --- a/deps/v8/test/cctest/heap/test-spaces.cc +++ b/deps/v8/test/cctest/heap/test-spaces.cc @@ -448,8 +448,7 @@ TEST(SizeOfInitialHeap) { Heap* heap = isolate->heap(); for (int i = FIRST_GROWABLE_PAGED_SPACE; i <= LAST_GROWABLE_PAGED_SPACE; i++) { - // Map space might be disabled. - if (i == MAP_SPACE && !heap->paged_space(i)) continue; + if (!heap->paged_space(i)) continue; // Debug code can be very large, so skip CODE_SPACE if we are generating it. if (i == CODE_SPACE && i::v8_flags.debug_code) continue; diff --git a/deps/v8/test/cctest/heap/test-write-barrier.cc b/deps/v8/test/cctest/heap/test-write-barrier.cc index 1a467ceee573f6..1608a5f716feec 100644 --- a/deps/v8/test/cctest/heap/test-write-barrier.cc +++ b/deps/v8/test/cctest/heap/test-write-barrier.cc @@ -4,6 +4,7 @@ #include "src/heap/incremental-marking.h" #include "src/heap/mark-compact.h" +#include "src/heap/marking-state-inl.h" #include "src/heap/spaces.h" #include "src/objects/js-array-buffer-inl.h" #include "src/objects/objects-inl.h" @@ -21,7 +22,7 @@ HEAP_TEST(WriteBarrier_Marking) { CcTest::InitializeVM(); Isolate* isolate = CcTest::i_isolate(); Factory* factory = isolate->factory(); - MarkCompactCollector* collector = isolate->heap()->mark_compact_collector(); + Heap* heap = isolate->heap(); HandleScope outer(isolate); Handle<FixedArray> objects = factory->NewFixedArray(3); v8::Global<Value> global_objects(CcTest::isolate(), Utils::ToLocal(objects)); @@ -40,20 +41,19 @@ HEAP_TEST(WriteBarrier_Marking) { FixedArray host = FixedArray::cast(objects->get(0)); HeapObject value1 = HeapObject::cast(objects->get(1)); HeapObject value2 = HeapObject::cast(objects->get(2)); - CHECK(collector->marking_state()->IsWhite(host)); - CHECK(collector->marking_state()->IsWhite(value1)); + CHECK(heap->marking_state()->IsWhite(host)); + CHECK(heap->marking_state()->IsWhite(value1)); WriteBarrier::Marking(host, host.RawFieldOfElementAt(0), value1); - CHECK_EQ(V8_CONCURRENT_MARKING_BOOL, - collector->marking_state()->IsGrey(value1)); - collector->marking_state()->WhiteToGrey(host); - collector->marking_state()->GreyToBlack(host); - CHECK(collector->marking_state()->IsWhite(value2)); + CHECK_EQ(V8_CONCURRENT_MARKING_BOOL, heap->marking_state()->IsGrey(value1)); + heap->marking_state()->WhiteToGrey(host); + heap->marking_state()->GreyToBlack(host); + CHECK(heap->marking_state()->IsWhite(value2)); WriteBarrier::Marking(host, host.RawFieldOfElementAt(0), value2); - CHECK(collector->marking_state()->IsGrey(value2)); + CHECK(heap->marking_state()->IsGrey(value2)); heap::SimulateIncrementalMarking(CcTest::heap(), true); - CHECK(collector->marking_state()->IsBlack(host)); - CHECK(collector->marking_state()->IsBlack(value1)); - CHECK(collector->marking_state()->IsBlack(value2)); + CHECK(heap->marking_state()->IsBlack(host)); + CHECK(heap->marking_state()->IsBlack(value1)); + CHECK(heap->marking_state()->IsBlack(value2)); } HEAP_TEST(WriteBarrier_MarkingExtension) { @@ -62,7 +62,7 @@ HEAP_TEST(WriteBarrier_MarkingExtension) { CcTest::InitializeVM(); Isolate* isolate = CcTest::i_isolate(); Factory* factory = isolate->factory(); - MarkCompactCollector* collector = isolate->heap()->mark_compact_collector(); + Heap* heap = isolate->heap(); HandleScope outer(isolate); Handle<FixedArray> objects = factory->NewFixedArray(1); ArrayBufferExtension* extension; @@ -75,7 +75,7 @@ HEAP_TEST(WriteBarrier_MarkingExtension) { } heap::SimulateIncrementalMarking(CcTest::heap(), false); JSArrayBuffer host = JSArrayBuffer::cast(objects->get(0)); - CHECK(collector->marking_state()->IsWhite(host)); + CHECK(heap->marking_state()->IsWhite(host)); CHECK(!extension->IsMarked()); WriteBarrier::Marking(host, extension); // Concurrent marking barrier should mark this object. @@ -84,7 +84,7 @@ HEAP_TEST(WriteBarrier_MarkingExtension) { v8::Global<ArrayBuffer> global_host(CcTest::isolate(), Utils::ToLocal(handle(host, isolate))); heap::SimulateIncrementalMarking(CcTest::heap(), true); - CHECK(collector->marking_state()->IsBlack(host)); + CHECK(heap->marking_state()->IsBlack(host)); CHECK(extension->IsMarked()); } diff --git a/deps/v8/test/cctest/test-accessor-assembler.cc b/deps/v8/test/cctest/test-accessor-assembler.cc index 19893ac5fc9420..5c45090370d33e 100644 --- a/deps/v8/test/cctest/test-accessor-assembler.cc +++ b/deps/v8/test/cctest/test-accessor-assembler.cc @@ -159,7 +159,7 @@ TEST(TryProbeStubCache) { std::vector<Handle<JSObject>> receivers; std::vector<Handle<Code>> handlers; - base::RandomNumberGenerator rand_gen(FLAG_random_seed); + base::RandomNumberGenerator rand_gen(v8_flags.random_seed); Factory* factory = isolate->factory(); diff --git a/deps/v8/test/cctest/test-accessors.cc b/deps/v8/test/cctest/test-accessors.cc index ae68ae6bed20d1..08a445981769ba 100644 --- a/deps/v8/test/cctest/test-accessors.cc +++ b/deps/v8/test/cctest/test-accessors.cc @@ -738,7 +738,7 @@ static bool SecurityTestCallback(Local<v8::Context> accessing_context, TEST(PrototypeGetterAccessCheck) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; LocalContext env; v8::Isolate* isolate = env->GetIsolate(); v8::HandleScope scope(isolate); @@ -905,7 +905,7 @@ TEST(ObjectSetLazyDataPropertyForIndex) { } TEST(ObjectTemplateSetLazyPropertySurvivesIC) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; LocalContext env; v8::Isolate* isolate = env->GetIsolate(); v8::HandleScope scope(isolate); diff --git a/deps/v8/test/cctest/test-api-array-buffer.cc b/deps/v8/test/cctest/test-api-array-buffer.cc index dff69296908cca..29169560f1fe67 100644 --- a/deps/v8/test/cctest/test-api-array-buffer.cc +++ b/deps/v8/test/cctest/test-api-array-buffer.cc @@ -298,7 +298,7 @@ THREADED_TEST(ArrayBuffer_ExternalizeEmpty) { } THREADED_TEST(SharedArrayBuffer_ApiInternalToExternal) { - i::FLAG_harmony_sharedarraybuffer = true; + i::v8_flags.harmony_sharedarraybuffer = true; LocalContext env; v8::Isolate* isolate = env->GetIsolate(); v8::HandleScope handle_scope(isolate); @@ -333,7 +333,7 @@ THREADED_TEST(SharedArrayBuffer_ApiInternalToExternal) { } THREADED_TEST(SharedArrayBuffer_JSInternalToExternal) { - i::FLAG_harmony_sharedarraybuffer = true; + i::v8_flags.harmony_sharedarraybuffer = true; LocalContext env; v8::Isolate* isolate = env->GetIsolate(); v8::HandleScope handle_scope(isolate); diff --git a/deps/v8/test/cctest/test-api-interceptors.cc b/deps/v8/test/cctest/test-api-interceptors.cc index 2a7f2528dc1a36..4fcffa21928690 100644 --- a/deps/v8/test/cctest/test-api-interceptors.cc +++ b/deps/v8/test/cctest/test-api-interceptors.cc @@ -1424,7 +1424,7 @@ THREADED_TEST(InterceptorLoadGlobalICGlobalWithInterceptor) { // Test load of a non-existing global through prototype chain when a global // object has an interceptor. THREADED_TEST(InterceptorLoadICGlobalWithInterceptor) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; v8::Isolate* isolate = CcTest::isolate(); v8::HandleScope scope(isolate); v8::Local<v8::ObjectTemplate> templ_global = v8::ObjectTemplate::New(isolate); @@ -2571,7 +2571,7 @@ THREADED_TEST(PropertyDefinerCallbackInDefineNamedOwnIC) { } { - i::FLAG_lazy_feedback_allocation = false; + i::v8_flags.lazy_feedback_allocation = false; i::FlagList::EnforceFlagImplications(); LocalContext env; v8::HandleScope scope(env->GetIsolate()); @@ -2579,7 +2579,7 @@ THREADED_TEST(PropertyDefinerCallbackInDefineNamedOwnIC) { } { - i::FLAG_lazy_feedback_allocation = false; + i::v8_flags.lazy_feedback_allocation = false; i::FlagList::EnforceFlagImplications(); LocalContext env; v8::HandleScope scope(env->GetIsolate()); @@ -4408,7 +4408,7 @@ static void InterceptorCallICGetter6( // Same test as above, except the code is wrapped in a function // to test the optimized compiler. THREADED_TEST(InterceptorCallICConstantFunctionNotNeededWrapped) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; v8::Isolate* isolate = CcTest::isolate(); v8::HandleScope scope(isolate); v8::Local<v8::ObjectTemplate> templ = ObjectTemplate::New(isolate); @@ -4952,8 +4952,8 @@ THREADED_TEST(NamedPropertyHandlerGetterAttributes) { THREADED_TEST(Regress256330) { - if (!i::FLAG_turbofan) return; - i::FLAG_allow_natives_syntax = true; + if (!i::v8_flags.turbofan) return; + i::v8_flags.allow_natives_syntax = true; LocalContext context; v8::HandleScope scope(context->GetIsolate()); Local<FunctionTemplate> templ = FunctionTemplate::New(context->GetIsolate()); @@ -4976,7 +4976,7 @@ THREADED_TEST(Regress256330) { } THREADED_TEST(OptimizedInterceptorSetter) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; v8::HandleScope scope(CcTest::isolate()); Local<FunctionTemplate> templ = FunctionTemplate::New(CcTest::isolate()); AddInterceptor(templ, InterceptorGetter, InterceptorSetter); @@ -5006,7 +5006,7 @@ THREADED_TEST(OptimizedInterceptorSetter) { } THREADED_TEST(OptimizedInterceptorGetter) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; v8::HandleScope scope(CcTest::isolate()); Local<FunctionTemplate> templ = FunctionTemplate::New(CcTest::isolate()); AddInterceptor(templ, InterceptorGetter, InterceptorSetter); @@ -5033,7 +5033,7 @@ THREADED_TEST(OptimizedInterceptorGetter) { } THREADED_TEST(OptimizedInterceptorFieldRead) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; v8::HandleScope scope(CcTest::isolate()); Local<FunctionTemplate> templ = FunctionTemplate::New(CcTest::isolate()); AddInterceptor(templ, InterceptorGetter, InterceptorSetter); @@ -5057,7 +5057,7 @@ THREADED_TEST(OptimizedInterceptorFieldRead) { } THREADED_TEST(OptimizedInterceptorFieldWrite) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; v8::HandleScope scope(CcTest::isolate()); Local<FunctionTemplate> templ = FunctionTemplate::New(CcTest::isolate()); AddInterceptor(templ, InterceptorGetter, InterceptorSetter); diff --git a/deps/v8/test/cctest/test-api-stack-traces.cc b/deps/v8/test/cctest/test-api-stack-traces.cc index 5f83acda3c0fcb..96a19018b8bf19 100644 --- a/deps/v8/test/cctest/test-api-stack-traces.cc +++ b/deps/v8/test/cctest/test-api-stack-traces.cc @@ -867,8 +867,8 @@ TEST(DynamicWithSourceURLInStackTraceString) { } UNINITIALIZED_TEST(CaptureStackTraceForStackOverflow) { - // We must set FLAG_stack_size before initializing the isolate. - v8::internal::FLAG_stack_size = 150; + // We must set v8_flags.stack_size before initializing the isolate. + v8::internal::v8_flags.stack_size = 150; v8::Isolate::CreateParams create_params; create_params.array_buffer_allocator = CcTest::array_buffer_allocator(); v8::Isolate* isolate = v8::Isolate::New(create_params); diff --git a/deps/v8/test/cctest/test-api-typed-array.cc b/deps/v8/test/cctest/test-api-typed-array.cc index eaac22b47a4242..42402d5d86447e 100644 --- a/deps/v8/test/cctest/test-api-typed-array.cc +++ b/deps/v8/test/cctest/test-api-typed-array.cc @@ -271,8 +271,8 @@ void ObjectWithExternalArrayTestHelper(Local<Context> context, } } - bool old_natives_flag_sentry = i::FLAG_allow_natives_syntax; - i::FLAG_allow_natives_syntax = true; + bool old_natives_flag_sentry = i::v8_flags.allow_natives_syntax; + i::v8_flags.allow_natives_syntax = true; // Test complex assignments result = CompileRun( @@ -310,7 +310,7 @@ void ObjectWithExternalArrayTestHelper(Local<Context> context, "sum;"); CHECK_EQ(4800, result->Int32Value(context).FromJust()); - i::FLAG_allow_natives_syntax = old_natives_flag_sentry; + i::v8_flags.allow_natives_syntax = old_natives_flag_sentry; result = CompileRun( "ext_array[3] = 33;" @@ -460,61 +460,61 @@ THREADED_TEST(DataView) { } THREADED_TEST(SharedUint8Array) { - i::FLAG_harmony_sharedarraybuffer = true; + i::v8_flags.harmony_sharedarraybuffer = true; TypedArrayTestHelper<uint8_t, v8::Uint8Array, v8::SharedArrayBuffer>( i::kExternalUint8Array, 0, 0xFF); } THREADED_TEST(SharedInt8Array) { - i::FLAG_harmony_sharedarraybuffer = true; + i::v8_flags.harmony_sharedarraybuffer = true; TypedArrayTestHelper<int8_t, v8::Int8Array, v8::SharedArrayBuffer>( i::kExternalInt8Array, -0x80, 0x7F); } THREADED_TEST(SharedUint16Array) { - i::FLAG_harmony_sharedarraybuffer = true; + i::v8_flags.harmony_sharedarraybuffer = true; TypedArrayTestHelper<uint16_t, v8::Uint16Array, v8::SharedArrayBuffer>( i::kExternalUint16Array, 0, 0xFFFF); } THREADED_TEST(SharedInt16Array) { - i::FLAG_harmony_sharedarraybuffer = true; + i::v8_flags.harmony_sharedarraybuffer = true; TypedArrayTestHelper<int16_t, v8::Int16Array, v8::SharedArrayBuffer>( i::kExternalInt16Array, -0x8000, 0x7FFF); } THREADED_TEST(SharedUint32Array) { - i::FLAG_harmony_sharedarraybuffer = true; + i::v8_flags.harmony_sharedarraybuffer = true; TypedArrayTestHelper<uint32_t, v8::Uint32Array, v8::SharedArrayBuffer>( i::kExternalUint32Array, 0, UINT_MAX); } THREADED_TEST(SharedInt32Array) { - i::FLAG_harmony_sharedarraybuffer = true; + i::v8_flags.harmony_sharedarraybuffer = true; TypedArrayTestHelper<int32_t, v8::Int32Array, v8::SharedArrayBuffer>( i::kExternalInt32Array, INT_MIN, INT_MAX); } THREADED_TEST(SharedFloat32Array) { - i::FLAG_harmony_sharedarraybuffer = true; + i::v8_flags.harmony_sharedarraybuffer = true; TypedArrayTestHelper<float, v8::Float32Array, v8::SharedArrayBuffer>( i::kExternalFloat32Array, -500, 500); } THREADED_TEST(SharedFloat64Array) { - i::FLAG_harmony_sharedarraybuffer = true; + i::v8_flags.harmony_sharedarraybuffer = true; TypedArrayTestHelper<double, v8::Float64Array, v8::SharedArrayBuffer>( i::kExternalFloat64Array, -500, 500); } THREADED_TEST(SharedUint8ClampedArray) { - i::FLAG_harmony_sharedarraybuffer = true; + i::v8_flags.harmony_sharedarraybuffer = true; TypedArrayTestHelper<uint8_t, v8::Uint8ClampedArray, v8::SharedArrayBuffer>( i::kExternalUint8ClampedArray, 0, 0xFF); } THREADED_TEST(SharedDataView) { - i::FLAG_harmony_sharedarraybuffer = true; + i::v8_flags.harmony_sharedarraybuffer = true; const int kSize = 50; LocalContext env; diff --git a/deps/v8/test/cctest/test-api.cc b/deps/v8/test/cctest/test-api.cc index 51294fccc8d888..5e13f3b4bae67f 100644 --- a/deps/v8/test/cctest/test-api.cc +++ b/deps/v8/test/cctest/test-api.cc @@ -266,7 +266,7 @@ static void TestSignature(const char* operation, Local<Value> receiver, } THREADED_TEST(ReceiverSignature) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; LocalContext env; v8::Isolate* isolate = env->GetIsolate(); v8::HandleScope scope(isolate); @@ -375,7 +375,7 @@ static void DoNothingCallback(const v8::FunctionCallbackInfo<v8::Value>&) {} // Regression test for issue chromium:1188563. THREADED_TEST(Regress1188563) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; LocalContext env; v8::Isolate* isolate = env->GetIsolate(); v8::HandleScope scope(isolate); @@ -659,7 +659,7 @@ TEST(MakingExternalStringConditions) { LocalContext env; v8::HandleScope scope(env->GetIsolate()); - if (!v8::internal::FLAG_single_generation) { + if (!i::v8_flags.single_generation) { // Free some space in the new space so that we can check freshness. CcTest::CollectGarbage(i::NEW_SPACE); CcTest::CollectGarbage(i::NEW_SPACE); @@ -677,7 +677,7 @@ TEST(MakingExternalStringConditions) { .ToLocalChecked(); i::DeleteArray(two_byte_string); - if (!v8::internal::FLAG_single_generation) { + if (!i::v8_flags.single_generation) { // We should refuse to externalize new space strings. CHECK(!local_string->CanMakeExternal()); // Trigger full GC so that the newly allocated string moves to old gen. @@ -697,7 +697,7 @@ TEST(MakingExternalOneByteStringConditions) { LocalContext env; v8::HandleScope scope(env->GetIsolate()); - if (!v8::internal::FLAG_single_generation) { + if (!i::v8_flags.single_generation) { // Free some space in the new space so that we can check freshness. CcTest::CollectGarbage(i::NEW_SPACE); CcTest::CollectGarbage(i::NEW_SPACE); @@ -709,7 +709,7 @@ TEST(MakingExternalOneByteStringConditions) { // Single-character strings should not be externalized because they // are always in the RO-space. CHECK(!tiny_local_string->CanMakeExternal()); - if (!v8::internal::FLAG_single_generation) { + if (!i::v8_flags.single_generation) { // We should refuse to externalize new space strings. CHECK(!local_string->CanMakeExternal()); // Trigger full GC so that the newly allocated string moves to old gen. @@ -722,7 +722,7 @@ TEST(MakingExternalOneByteStringConditions) { TEST(MakingExternalUnalignedOneByteString) { - i::FLAG_stress_concurrent_allocation = false; // For SimulateFullSpace. + i::v8_flags.stress_concurrent_allocation = false; // For SimulateFullSpace. LocalContext env; v8::HandleScope scope(env->GetIsolate()); @@ -851,8 +851,8 @@ THREADED_TEST(NewExternalForVeryLongString) { TEST(ScavengeExternalString) { ManualGCScope manual_gc_scope; - i::FLAG_stress_compaction = false; - i::FLAG_gc_global = false; + i::v8_flags.stress_compaction = false; + i::v8_flags.gc_global = false; int dispose_count = 0; bool in_young_generation = false; { @@ -876,8 +876,8 @@ TEST(ScavengeExternalString) { TEST(ScavengeExternalOneByteString) { ManualGCScope manual_gc_scope; - i::FLAG_stress_compaction = false; - i::FLAG_gc_global = false; + i::v8_flags.stress_compaction = false; + i::v8_flags.gc_global = false; int dispose_count = 0; bool in_young_generation = false; { @@ -3747,7 +3747,7 @@ THREADED_TEST(WellKnownSymbols) { THREADED_TEST(GlobalPrivates) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; LocalContext env; v8::Isolate* isolate = env->GetIsolate(); v8::HandleScope scope(isolate); @@ -3962,8 +3962,8 @@ THREADED_TEST(ResettingGlobalHandle) { v8::HandleScope scope(isolate); global.Reset(isolate, v8_str("str")); } - v8::internal::GlobalHandles* global_handles = - reinterpret_cast<v8::internal::Isolate*>(isolate)->global_handles(); + i::GlobalHandles* global_handles = + reinterpret_cast<i::Isolate*>(isolate)->global_handles(); size_t initial_handle_count = global_handles->handles_count(); { v8::HandleScope scope(isolate); @@ -3990,8 +3990,8 @@ THREADED_TEST(ResettingGlobalHandleToEmpty) { v8::HandleScope scope(isolate); global.Reset(isolate, v8_str("str")); } - v8::internal::GlobalHandles* global_handles = - reinterpret_cast<v8::internal::Isolate*>(isolate)->global_handles(); + i::GlobalHandles* global_handles = + reinterpret_cast<i::Isolate*>(isolate)->global_handles(); size_t initial_handle_count = global_handles->handles_count(); { v8::HandleScope scope(isolate); @@ -4028,8 +4028,8 @@ THREADED_TEST(Global) { v8::HandleScope scope(isolate); global.Reset(isolate, v8_str("str")); } - v8::internal::GlobalHandles* global_handles = - reinterpret_cast<v8::internal::Isolate*>(isolate)->global_handles(); + i::GlobalHandles* global_handles = + reinterpret_cast<i::Isolate*>(isolate)->global_handles(); size_t initial_handle_count = global_handles->handles_count(); { v8::Global<String> unique(isolate, global); @@ -4315,8 +4315,8 @@ void TestGlobalValueMap() { templ.Reset(isolate, t); } Map map(isolate); - v8::internal::GlobalHandles* global_handles = - reinterpret_cast<v8::internal::Isolate*>(isolate)->global_handles(); + i::GlobalHandles* global_handles = + reinterpret_cast<i::Isolate*>(isolate)->global_handles(); size_t initial_handle_count = global_handles->handles_count(); CHECK_EQ(0, static_cast<int>(map.Size())); { @@ -4384,8 +4384,8 @@ TEST(GlobalValueMap) { TEST(VectorOfGlobals) { LocalContext env; v8::Isolate* isolate = env->GetIsolate(); - v8::internal::GlobalHandles* global_handles = - reinterpret_cast<v8::internal::Isolate*>(isolate)->global_handles(); + i::GlobalHandles* global_handles = + reinterpret_cast<i::Isolate*>(isolate)->global_handles(); size_t handle_count = global_handles->handles_count(); HandleScope scope(isolate); @@ -7599,7 +7599,7 @@ static void SetFlag(const v8::WeakCallbackInfo<FlagAndPersistent>& data) { static void IndependentWeakHandle(bool global_gc, bool interlinked) { ManualGCScope manual_gc_scope; // Parallel scavenge introduces too much fragmentation. - i::FLAG_parallel_scavenge = false; + i::v8_flags.parallel_scavenge = false; v8::Isolate* iso = CcTest::isolate(); v8::HandleScope scope(iso); v8::Local<Context> context = Context::New(iso); @@ -7620,7 +7620,7 @@ static void IndependentWeakHandle(bool global_gc, bool interlinked) { a->Set(context, v8_str("x"), b).FromJust(); b->Set(context, v8_str("x"), a).FromJust(); } - if (v8::internal::FLAG_single_generation || global_gc) { + if (i::v8_flags.single_generation || global_gc) { CcTest::CollectAllGarbage(); } else { CcTest::CollectGarbage(i::NEW_SPACE); @@ -7628,9 +7628,7 @@ static void IndependentWeakHandle(bool global_gc, bool interlinked) { v8::Local<Value> big_array = v8::Array::New(CcTest::isolate(), 5000); // Verify that we created an array where the space was reserved up front. big_array_size = - v8::internal::JSArray::cast(*v8::Utils::OpenHandle(*big_array)) - .elements() - .Size(); + i::JSArray::cast(*v8::Utils::OpenHandle(*big_array)).elements().Size(); CHECK_LE(20000, big_array_size); a->Set(context, v8_str("y"), big_array).FromJust(); big_heap_size = CcTest::heap()->SizeOfObjects(); @@ -7642,7 +7640,7 @@ static void IndependentWeakHandle(bool global_gc, bool interlinked) { v8::WeakCallbackType::kParameter); object_b.handle.SetWeak(&object_b, &SetFlag, v8::WeakCallbackType::kParameter); - if (v8::internal::FLAG_single_generation || global_gc) { + if (i::v8_flags.single_generation || global_gc) { CcTest::CollectAllGarbage(); } else { CcTest::CollectGarbage(i::NEW_SPACE); @@ -7741,7 +7739,7 @@ void InternalFieldCallback(bool global_gc) { handle.SetWeak<v8::Persistent<v8::Object>>( &handle, CheckInternalFields, v8::WeakCallbackType::kInternalFields); } - if (v8::internal::FLAG_single_generation || global_gc) { + if (i::v8_flags.single_generation || global_gc) { CcTest::CollectAllGarbage(); } else { CcTest::CollectGarbage(i::NEW_SPACE); @@ -7767,8 +7765,8 @@ static void ResetUseValueAndSetFlag( data.GetParameter()->flag = true; } -void v8::internal::heap::HeapTester::ResetWeakHandle(bool global_gc) { - if (FLAG_stress_incremental_marking) return; +void i::heap::HeapTester::ResetWeakHandle(bool global_gc) { + if (v8_flags.stress_incremental_marking) return; using v8::Context; using v8::Local; using v8::Object; @@ -7786,7 +7784,7 @@ void v8::internal::heap::HeapTester::ResetWeakHandle(bool global_gc) { Local<Object> b(v8::Object::New(iso)); object_a.handle.Reset(iso, a); object_b.handle.Reset(iso, b); - if (global_gc || FLAG_single_generation) { + if (global_gc || v8_flags.single_generation) { CcTest::PreciseCollectAllGarbage(); } else { CcTest::CollectGarbage(i::NEW_SPACE); @@ -7799,7 +7797,7 @@ void v8::internal::heap::HeapTester::ResetWeakHandle(bool global_gc) { v8::WeakCallbackType::kParameter); object_b.handle.SetWeak(&object_b, &ResetUseValueAndSetFlag, v8::WeakCallbackType::kParameter); - if (global_gc || FLAG_single_generation) { + if (global_gc || v8_flags.single_generation) { CcTest::PreciseCollectAllGarbage(); } else { CcTest::CollectGarbage(i::NEW_SPACE); @@ -7809,8 +7807,8 @@ void v8::internal::heap::HeapTester::ResetWeakHandle(bool global_gc) { } THREADED_HEAP_TEST(ResetWeakHandle) { - v8::internal::heap::HeapTester::ResetWeakHandle(false); - v8::internal::heap::HeapTester::ResetWeakHandle(true); + i::heap::HeapTester::ResetWeakHandle(false); + i::heap::HeapTester::ResetWeakHandle(true); } static void InvokeScavenge() { CcTest::CollectGarbage(i::NEW_SPACE); } @@ -7848,7 +7846,7 @@ THREADED_TEST(GCFromWeakCallbacks) { v8::Local<Context> context = Context::New(isolate); Context::Scope context_scope(context); - if (v8::internal::FLAG_single_generation) { + if (i::v8_flags.single_generation) { FlagAndPersistent object; { v8::HandleScope handle_scope(isolate); @@ -10543,7 +10541,7 @@ THREADED_TEST(ShadowObject) { THREADED_TEST(ShadowObjectAndDataProperty) { // Lite mode doesn't make use of feedback vectors, which is what we // want to ensure has the correct form. - if (i::FLAG_lite_mode) return; + if (i::v8_flags.lite_mode) return; // This test mimics the kind of shadow property the Chromium embedder // uses for undeclared globals. The IC subsystem has special handling // for this case, using a PREMONOMORPHIC state to delay entering @@ -10551,7 +10549,7 @@ THREADED_TEST(ShadowObjectAndDataProperty) { // efficient access and good feedback for optimization. v8::Isolate* isolate = CcTest::isolate(); v8::HandleScope handle_scope(isolate); - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; Local<ObjectTemplate> global_template = v8::ObjectTemplate::New(isolate); LocalContext context(nullptr, global_template); @@ -10597,9 +10595,9 @@ THREADED_TEST(ShadowObjectAndDataProperty) { THREADED_TEST(ShadowObjectAndDataPropertyTurbo) { // This test is the same as the previous one except that it triggers // optimization of {foo} after its first invocation. - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; - if (i::FLAG_lite_mode) return; + if (i::v8_flags.lite_mode) return; v8::Isolate* isolate = CcTest::isolate(); v8::HandleScope handle_scope(isolate); @@ -10736,7 +10734,7 @@ THREADED_TEST(SetPrototype) { // triggers dictionary elements in GetOwnPropertyNames() shouldn't // crash the runtime. THREADED_TEST(Regress91517) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; LocalContext context; v8::Isolate* isolate = context->GetIsolate(); v8::HandleScope handle_scope(isolate); @@ -12939,8 +12937,8 @@ THREADED_TEST(SubclassGetConstructorName) { UNINITIALIZED_TEST(SharedObjectGetConstructorName) { if (!V8_CAN_CREATE_SHARED_HEAP_BOOL) return; - i::FLAG_shared_string_table = true; - i::FLAG_harmony_struct = true; + i::v8_flags.shared_string_table = true; + i::v8_flags.harmony_struct = true; v8::Isolate::CreateParams create_params; create_params.array_buffer_allocator = CcTest::array_buffer_allocator(); @@ -13037,7 +13035,7 @@ static unsigned linear_congruential_generator; void ApiTestFuzzer::SetUp(PartOfTest part) { - linear_congruential_generator = i::FLAG_testing_prng_seed; + linear_congruential_generator = i::v8_flags.testing_prng_seed; fuzzing_ = true; int count = RegisterThreadedTest::count(); int start = count * part / (LAST_PART + 1); @@ -13310,7 +13308,7 @@ static void CheckSurvivingGlobalObjectsCount(int expected) { TEST(DontLeakGlobalObjects) { // Regression test for issues 1139850 and 1174891. - i::FLAG_expose_gc = true; + i::v8_flags.expose_gc = true; for (int i = 0; i < 5; i++) { { v8::HandleScope scope(CcTest::isolate()); @@ -13661,14 +13659,14 @@ static void event_handler(const v8::JitCodeEvent* event) { } UNINITIALIZED_TEST(SetJitCodeEventHandler) { - i::FLAG_stress_compaction = true; - i::FLAG_incremental_marking = false; - i::FLAG_stress_concurrent_allocation = false; // For SimulateFullSpace. + i::v8_flags.stress_compaction = true; + i::v8_flags.incremental_marking = false; + i::v8_flags.stress_concurrent_allocation = false; // For SimulateFullSpace. // Batch compilation can cause different owning spaces for foo and bar. #if ENABLE_SPARKPLUG - i::FLAG_baseline_batch_compilation = false; + i::v8_flags.baseline_batch_compilation = false; #endif - if (!i::FLAG_compact) return; + if (!i::v8_flags.compact) return; const char* script = "function bar() {" " var sum = 0;" @@ -13875,7 +13873,7 @@ TEST(ExternalAllocatedMemory) { TEST(Regress51719) { - i::FLAG_incremental_marking = false; + i::v8_flags.incremental_marking = false; CcTest::InitializeVM(); const int64_t kTriggerGCSize = @@ -15399,8 +15397,8 @@ static void AsmJsWarningListener(v8::Local<v8::Message> message, } TEST(AsmJsWarning) { - i::FLAG_validate_asm = true; - if (i::FLAG_suppress_asm_messages) return; + i::v8_flags.validate_asm = true; + if (i::v8_flags.suppress_asm_messages) return; LocalContext env; v8::Isolate* isolate = env->GetIsolate(); @@ -16031,7 +16029,7 @@ void PromiseRejectCallbackConstructError( } TEST(PromiseRejectCallbackConstructError) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; LocalContext env; v8::Isolate* isolate = env->GetIsolate(); v8::HandleScope scope(isolate); @@ -16493,7 +16491,7 @@ static void CreateGarbageInOldSpace() { // Test that idle notification can be handled and eventually collects garbage. TEST(TestIdleNotification) { - if (!i::FLAG_incremental_marking) return; + if (!i::v8_flags.incremental_marking) return; ManualGCScope manual_gc_scope; const intptr_t MB = 1024 * 1024; const double IdlePauseInSeconds = 1.0; @@ -16513,10 +16511,9 @@ TEST(TestIdleNotification) { (v8::base::TimeTicks::Now().ToInternalValue() / static_cast<double>(v8::base::Time::kMicrosecondsPerSecond)) + IdlePauseInSeconds); - if (CcTest::heap()->mark_compact_collector()->sweeping_in_progress()) { - CcTest::heap()->mark_compact_collector()->EnsureSweepingCompleted( - v8::internal::MarkCompactCollector::SweepingForcedFinalizationMode:: - kV8Only); + if (CcTest::heap()->sweeping_in_progress()) { + CcTest::heap()->EnsureSweepingCompleted( + i::Heap::SweepingForcedFinalizationMode::kV8Only); } } intptr_t final_size = CcTest::heap()->SizeOfObjects(); @@ -16527,8 +16524,7 @@ TEST(TestIdleNotification) { TEST(TestMemorySavingsMode) { LocalContext context; v8::Isolate* isolate = context->GetIsolate(); - v8::internal::Isolate* i_isolate = - reinterpret_cast<v8::internal::Isolate*>(isolate); + i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate); CHECK(!i_isolate->IsMemorySavingsModeActive()); isolate->EnableMemorySavingsMode(); CHECK(i_isolate->IsMemorySavingsModeActive()); @@ -16636,7 +16632,7 @@ THREADED_TEST(GetHeapStatistics) { CHECK_EQ(0u, heap_statistics.used_heap_size()); c1->GetIsolate()->GetHeapStatistics(&heap_statistics); CHECK_NE(static_cast<int>(heap_statistics.total_heap_size()), 0); - if (!v8::internal::FLAG_enable_third_party_heap) { + if (!i::v8_flags.enable_third_party_heap) { // TODO(wenyuzhao): Get used size from third_party_heap interface CHECK_NE(static_cast<int>(heap_statistics.used_heap_size()), 0); } @@ -16649,8 +16645,7 @@ TEST(GetHeapSpaceStatistics) { v8::HeapStatistics heap_statistics; // Force allocation in LO_SPACE so that every space has non-zero size. - v8::internal::Isolate* i_isolate = - reinterpret_cast<v8::internal::Isolate*>(isolate); + i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate); auto unused = i_isolate->factory()->TryNewFixedArray(512 * 1024, i::AllocationType::kOld); USE(unused); @@ -17121,7 +17116,7 @@ THREADED_TEST(SpaghettiStackReThrow) { TEST(Regress528) { ManualGCScope manual_gc_scope; v8::Isolate* isolate = CcTest::isolate(); - i::FLAG_retain_maps_for_n_gc = 0; + i::v8_flags.retain_maps_for_n_gc = 0; v8::HandleScope scope(isolate); v8::Local<Context> other_context; int gc_count; @@ -18234,12 +18229,11 @@ static void BreakArrayGuarantees(const char* script) { v8::Local<v8::Context> context = v8::Local<v8::Context>::New(isolate1, context1); v8::Context::Scope context_scope(context); - v8::internal::Isolate* i_isolate = - reinterpret_cast<v8::internal::Isolate*>(isolate1); - CHECK(v8::internal::Protectors::IsNoElementsIntact(i_isolate)); + i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate1); + CHECK(i::Protectors::IsNoElementsIntact(i_isolate)); // Run something in new isolate. CompileRun(script); - CHECK(!v8::internal::Protectors::IsNoElementsIntact(i_isolate)); + CHECK(!i::Protectors::IsNoElementsIntact(i_isolate)); } isolate1->Exit(); isolate1->Dispose(); @@ -20490,8 +20484,8 @@ static void Helper137002(bool do_store, THREADED_TEST(Regress137002a) { - i::FLAG_allow_natives_syntax = true; - i::FLAG_compilation_cache = false; + i::v8_flags.allow_natives_syntax = true; + i::v8_flags.compilation_cache = false; v8::HandleScope scope(CcTest::isolate()); for (int i = 0; i < 16; i++) { Helper137002(i & 8, i & 4, i & 2, i & 1); @@ -20500,7 +20494,7 @@ THREADED_TEST(Regress137002a) { THREADED_TEST(Regress137002b) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; LocalContext context; v8::Isolate* isolate = context->GetIsolate(); v8::HandleScope scope(isolate); @@ -20590,7 +20584,7 @@ THREADED_TEST(Regress137002b) { THREADED_TEST(Regress142088) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; LocalContext context; v8::Isolate* isolate = context->GetIsolate(); v8::HandleScope scope(isolate); @@ -20611,7 +20605,7 @@ THREADED_TEST(Regress142088) { THREADED_TEST(Regress137496) { - i::FLAG_expose_gc = true; + i::v8_flags.expose_gc = true; LocalContext context; v8::HandleScope scope(context->GetIsolate()); @@ -20923,7 +20917,7 @@ void CheckCorrectThrow(const char* script) { TEST(AccessCheckThrows) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; v8::Isolate* isolate = CcTest::isolate(); isolate->SetFailedAccessCheckCallbackFunction(&FailedAccessCheckThrows); v8::HandleScope scope(isolate); @@ -21154,12 +21148,12 @@ class RegExpInterruptTest { TEST(RegExpInterruptAndCollectAllGarbage) { // Move all movable objects on GC. - i::FLAG_compact_on_every_full_gc = true; + i::v8_flags.compact_on_every_full_gc = true; // We want to be stuck regexp execution, so no fallback to linear-time // engine. // TODO(mbid,v8:10765): Find a way to test interrupt support of the // experimental engine. - i::FLAG_enable_experimental_regexp_engine_on_excessive_backtracks = false; + i::v8_flags.enable_experimental_regexp_engine_on_excessive_backtracks = false; RegExpInterruptTest test; test.RunTest(RegExpInterruptTest::CollectAllGarbage); } @@ -21169,7 +21163,7 @@ TEST(RegExpInterruptAndMakeSubjectOneByteExternal) { // engine. // TODO(mbid,v8:10765): Find a way to test interrupt support of the // experimental engine. - i::FLAG_enable_experimental_regexp_engine_on_excessive_backtracks = false; + i::v8_flags.enable_experimental_regexp_engine_on_excessive_backtracks = false; RegExpInterruptTest test; test.RunTest(RegExpInterruptTest::MakeSubjectOneByteExternal); } @@ -21179,7 +21173,7 @@ TEST(RegExpInterruptAndMakeSubjectTwoByteExternal) { // engine. // TODO(mbid,v8:10765): Find a way to test interrupt support of the // experimental engine. - i::FLAG_enable_experimental_regexp_engine_on_excessive_backtracks = false; + i::v8_flags.enable_experimental_regexp_engine_on_excessive_backtracks = false; RegExpInterruptTest test; test.RunTest(RegExpInterruptTest::MakeSubjectTwoByteExternal); } @@ -21401,7 +21395,7 @@ class RequestInterruptTestWithMathAbs .ToLocalChecked()) .FromJust(); - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; CompileRun( "function loopish(o) {" " var pre = 10;" @@ -21420,7 +21414,7 @@ class RequestInterruptTestWithMathAbs "%OptimizeFunctionOnNextCall(loopish);" "loopish(Math);"); - i::FLAG_allow_natives_syntax = false; + i::v8_flags.allow_natives_syntax = false; } private: @@ -21961,14 +21955,14 @@ int ApiCallOptimizationChecker::count = 0; TEST(FunctionCallOptimization) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; ApiCallOptimizationChecker checker; checker.RunAll(); } TEST(FunctionCallOptimizationMultipleArgs) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; LocalContext context; v8::Isolate* isolate = context->GetIsolate(); v8::HandleScope scope(isolate); @@ -21996,7 +21990,7 @@ static void ReturnsSymbolCallback( TEST(ApiCallbackCanReturnSymbols) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; LocalContext context; v8::Isolate* isolate = context->GetIsolate(); v8::HandleScope scope(isolate); @@ -22154,16 +22148,16 @@ void StoringEventLoggerCallback(const char* message, int status) { TEST(EventLogging) { v8::Isolate* isolate = CcTest::isolate(); isolate->SetEventLogger(StoringEventLoggerCallback); - v8::internal::NestedTimedHistogram histogram( - "V8.Test", 0, 10000, v8::internal::TimedHistogramResolution::MILLISECOND, - 50, reinterpret_cast<v8::internal::Isolate*>(isolate)->counters()); + i::NestedTimedHistogram histogram( + "V8.Test", 0, 10000, i::TimedHistogramResolution::MILLISECOND, 50, + reinterpret_cast<i::Isolate*>(isolate)->counters()); event_count = 0; int count = 0; { CHECK_EQ(0, event_count); { CHECK_EQ(0, event_count); - v8::internal::NestedTimedHistogramScope scope0(&histogram); + i::NestedTimedHistogramScope scope0(&histogram); CHECK_EQ(0, strcmp("V8.Test", last_event_message)); CHECK_EQ(v8::LogEventStatus::kStart, last_event_status); CHECK_EQ(++count, event_count); @@ -22171,27 +22165,27 @@ TEST(EventLogging) { CHECK_EQ(v8::LogEventStatus::kEnd, last_event_status); CHECK_EQ(++count, event_count); - v8::internal::NestedTimedHistogramScope scope1(&histogram); + i::NestedTimedHistogramScope scope1(&histogram); CHECK_EQ(0, strcmp("V8.Test", last_event_message)); CHECK_EQ(v8::LogEventStatus::kStart, last_event_status); CHECK_EQ(++count, event_count); { CHECK_EQ(count, event_count); - v8::internal::NestedTimedHistogramScope scope2(&histogram); + i::NestedTimedHistogramScope scope2(&histogram); CHECK_EQ(0, strcmp("V8.Test", last_event_message)); CHECK_EQ(v8::LogEventStatus::kStart, last_event_status); CHECK_EQ(++count, event_count); { CHECK_EQ(count, event_count); - v8::internal::NestedTimedHistogramScope scope3(&histogram); + i::NestedTimedHistogramScope scope3(&histogram); CHECK_EQ(++count, event_count); - v8::internal::PauseNestedTimedHistogramScope scope4(&histogram); + i::PauseNestedTimedHistogramScope scope4(&histogram); // The outer timer scope is just paused, no event is emited yet. CHECK_EQ(count, event_count); { CHECK_EQ(count, event_count); - v8::internal::NestedTimedHistogramScope scope5(&histogram); - v8::internal::NestedTimedHistogramScope scope5_1(&histogram); + i::NestedTimedHistogramScope scope5(&histogram); + i::NestedTimedHistogramScope scope5_1(&histogram); CHECK_EQ(0, strcmp("V8.Test", last_event_message)); CHECK_EQ(v8::LogEventStatus::kStart, last_event_status); count++; @@ -22205,11 +22199,11 @@ TEST(EventLogging) { CHECK_EQ(0, strcmp("V8.Test", last_event_message)); CHECK_EQ(v8::LogEventStatus::kEnd, last_event_status); CHECK_EQ(++count, event_count); - v8::internal::PauseNestedTimedHistogramScope scope6(&histogram); + i::PauseNestedTimedHistogramScope scope6(&histogram); // The outer timer scope is just paused, no event is emited yet. CHECK_EQ(count, event_count); { - v8::internal::PauseNestedTimedHistogramScope scope7(&histogram); + i::PauseNestedTimedHistogramScope scope7(&histogram); CHECK_EQ(count, event_count); } CHECK_EQ(count, event_count); @@ -22853,7 +22847,7 @@ TEST(ScriptPositionInfo) { i::Handle<i::Script> script1(i::Script::cast(obj->script()), i_isolate); - v8::internal::Script::PositionInfo info; + i::Script::PositionInfo info; for (int i = 0; i < 2; ++i) { // With offset. @@ -23712,7 +23706,7 @@ TEST(StreamingWithHarmonyScopes) { namespace { void StreamingWithIsolateScriptCache(bool run_gc) { - i::FLAG_expose_gc = true; + i::v8_flags.expose_gc = true; const char* chunks[] = {"'use strict'; (function test() { return 13; })", nullptr}; const char* full_source = chunks[0]; @@ -23802,7 +23796,7 @@ TEST(StreamingWithIsolateScriptCache) { TEST(StreamingWithIsolateScriptCacheClearingRootSFI) { // TODO(v8:12808): Remove this check once background compilation is capable of // reusing an existing Script. - if (v8::internal::FLAG_stress_background_compile) { + if (i::v8_flags.stress_background_compile) { return; } @@ -24017,7 +24011,7 @@ TEST(ModuleCodeCache) { { // Disable --always_turbofan, otherwise we try to optimize during module // instantiation, violating the DisallowCompilation scope. - i::FLAG_always_turbofan = false; + i::v8_flags.always_turbofan = false; v8::Isolate* isolate = v8::Isolate::New(create_params); { v8::Isolate::Scope iscope(isolate); @@ -24088,7 +24082,7 @@ TEST(CreateSyntheticModuleGC) { // happening during its execution. i::HeapAllocator::SetAllocationGcInterval(10); #endif - i::FLAG_inline_new = false; + i::v8_flags.inline_new = false; LocalContext env; v8::Isolate* isolate = env->GetIsolate(); @@ -24674,8 +24668,8 @@ TEST(StringConcatOverflow) { TEST(TurboAsmDisablesDetach) { #ifndef V8_LITE_MODE - i::FLAG_turbofan = true; - i::FLAG_allow_natives_syntax = true; + i::v8_flags.turbofan = true; + i::v8_flags.allow_natives_syntax = true; v8::HandleScope scope(CcTest::isolate()); LocalContext context; const char* load = @@ -24751,7 +24745,7 @@ TEST(StreamingScriptWithSourceMappingURLInTheMiddle) { TEST(NewStringRangeError) { // This test uses a lot of memory and fails with flaky OOM when run // with --stress-incremental-marking on TSAN. - i::FLAG_stress_incremental_marking = false; + i::v8_flags.stress_incremental_marking = false; v8::Isolate* isolate = CcTest::isolate(); v8::HandleScope handle_scope(isolate); const int length = i::String::kMaxLength + 1; @@ -25084,7 +25078,7 @@ class TerminateExecutionThread : public v8::base::Thread { }; TEST(FutexInterruption) { - i::FLAG_harmony_sharedarraybuffer = true; + i::v8_flags.harmony_sharedarraybuffer = true; v8::Isolate* isolate = CcTest::isolate(); v8::HandleScope scope(isolate); LocalContext env; @@ -25140,7 +25134,7 @@ TEST(AbortOnUncaughtExceptionNoAbort) { v8::ObjectTemplate::New(isolate); LocalContext env(nullptr, global_template); - i::FLAG_abort_on_uncaught_exception = true; + i::v8_flags.abort_on_uncaught_exception = true; isolate->SetAbortOnUncaughtExceptionCallback(NoAbortOnUncaughtException); CompileRun("function boom() { throw new Error(\"boom\") }"); @@ -25593,7 +25587,7 @@ class MemoryPressureThread : public v8::base::Thread { }; TEST(MemoryPressure) { - if (v8::internal::FLAG_optimize_for_size) return; + if (i::v8_flags.optimize_for_size) return; v8::Isolate* isolate = CcTest::isolate(); WeakCallCounter counter(1234); @@ -25897,8 +25891,8 @@ THREADED_TEST(GlobalAccessorInfo) { TEST(DeterministicRandomNumberGeneration) { v8::HandleScope scope(CcTest::isolate()); - int previous_seed = v8::internal::FLAG_random_seed; - v8::internal::FLAG_random_seed = 1234; + int previous_seed = i::v8_flags.random_seed; + i::v8_flags.random_seed = 1234; double first_value; double second_value; @@ -25916,7 +25910,7 @@ TEST(DeterministicRandomNumberGeneration) { } CHECK_EQ(first_value, second_value); - v8::internal::FLAG_random_seed = previous_seed; + i::v8_flags.random_seed = previous_seed; } UNINITIALIZED_TEST(AllowAtomicsWait) { @@ -26278,7 +26272,7 @@ v8::MaybeLocal<v8::Context> HostCreateShadowRealmContextCallbackStatic( } TEST(CreateShadowRealmContextHostNotSupported) { - i::FLAG_harmony_shadow_realm = true; + i::v8_flags.harmony_shadow_realm = true; LocalContext context; v8::Isolate* isolate = context->GetIsolate(); v8::HandleScope scope(isolate); @@ -26304,7 +26298,7 @@ TEST(CreateShadowRealmContextHostNotSupported) { } TEST(CreateShadowRealmContext) { - i::FLAG_harmony_shadow_realm = true; + i::v8_flags.harmony_shadow_realm = true; LocalContext context; v8::Isolate* isolate = context->GetIsolate(); v8::HandleScope scope(isolate); @@ -26333,7 +26327,7 @@ v8::MaybeLocal<v8::Context> HostCreateShadowRealmContextCallbackThrow( } TEST(CreateShadowRealmContextThrow) { - i::FLAG_harmony_shadow_realm = true; + i::v8_flags.harmony_shadow_realm = true; LocalContext context; v8::Isolate* isolate = context->GetIsolate(); v8::HandleScope scope(isolate); @@ -26754,7 +26748,8 @@ namespace v8 { namespace internal { namespace wasm { TEST(WasmI32AtomicWaitCallback) { - FlagScope<bool> wasm_threads_flag(&i::FLAG_experimental_wasm_threads, true); + FlagScope<bool> wasm_threads_flag(&i::v8_flags.experimental_wasm_threads, + true); WasmRunner<int32_t, int32_t, int32_t, double> r(TestExecutionTier::kTurbofan); r.builder().AddMemory(kWasmPageSize, SharedFlag::kShared); r.builder().SetHasSharedMemory(); @@ -26790,7 +26785,8 @@ TEST(WasmI32AtomicWaitCallback) { } TEST(WasmI64AtomicWaitCallback) { - FlagScope<bool> wasm_threads_flag(&i::FLAG_experimental_wasm_threads, true); + FlagScope<bool> wasm_threads_flag(&i::v8_flags.experimental_wasm_threads, + true); WasmRunner<int32_t, int32_t, double, double> r(TestExecutionTier::kTurbofan); r.builder().AddMemory(kWasmPageSize, SharedFlag::kShared); r.builder().SetHasSharedMemory(); @@ -27470,14 +27466,14 @@ static void CallIsolate2(const v8::FunctionCallbackInfo<v8::Value>& args) { UNINITIALIZED_TEST(NestedIsolates) { #ifdef VERIFY_HEAP - i::FLAG_verify_heap = true; + i::v8_flags.verify_heap = true; #endif // VERIFY_HEAP // Create two isolates and set up C++ functions via function templates that // call into the other isolate. Recurse a few times, trigger GC along the way, // and finally capture a stack trace. Check that the stack trace only includes // frames from its own isolate. - i::FLAG_stack_trace_limit = 20; - i::FLAG_experimental_stack_trace_frames = true; + i::v8_flags.stack_trace_limit = 20; + i::v8_flags.experimental_stack_trace_frames = true; v8::Isolate::CreateParams create_params; create_params.array_buffer_allocator = CcTest::array_buffer_allocator(); isolate_1 = v8::Isolate::New(create_params); @@ -28283,15 +28279,15 @@ void CheckDynamicTypeInfo() { TEST(FastApiStackSlot) { #ifndef V8_LITE_MODE - if (i::FLAG_jitless) return; + if (i::v8_flags.jitless) return; - v8::internal::FLAG_turbofan = true; - v8::internal::FLAG_turbo_fast_api_calls = true; - v8::internal::FLAG_allow_natives_syntax = true; + i::v8_flags.turbofan = true; + i::v8_flags.turbo_fast_api_calls = true; + i::v8_flags.allow_natives_syntax = true; // Disable --always_turbofan, otherwise we haven't generated the necessary // feedback to go down the "best optimization" path for the fast call. - v8::internal::FLAG_always_turbofan = false; - v8::internal::FlagList::EnforceFlagImplications(); + i::v8_flags.always_turbofan = false; + i::FlagList::EnforceFlagImplications(); v8::Isolate* isolate = CcTest::isolate(); i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate); @@ -28335,15 +28331,15 @@ TEST(FastApiStackSlot) { TEST(FastApiCalls) { #ifndef V8_LITE_MODE - if (i::FLAG_jitless) return; + if (i::v8_flags.jitless) return; - v8::internal::FLAG_turbofan = true; - v8::internal::FLAG_turbo_fast_api_calls = true; - v8::internal::FLAG_allow_natives_syntax = true; + i::v8_flags.turbofan = true; + i::v8_flags.turbo_fast_api_calls = true; + i::v8_flags.allow_natives_syntax = true; // Disable --always_turbofan, otherwise we haven't generated the necessary // feedback to go down the "best optimization" path for the fast call. - v8::internal::FLAG_always_turbofan = false; - v8::internal::FlagList::EnforceFlagImplications(); + i::v8_flags.always_turbofan = false; + i::FlagList::EnforceFlagImplications(); CcTest::InitializeVM(); v8::Isolate* isolate = CcTest::isolate(); @@ -28885,15 +28881,15 @@ void SequenceSlowCallback(const v8::FunctionCallbackInfo<v8::Value>& args) { TEST(FastApiSequenceOverloads) { #ifndef V8_LITE_MODE - if (i::FLAG_jitless) return; + if (i::v8_flags.jitless) return; - v8::internal::FLAG_turbofan = true; - v8::internal::FLAG_turbo_fast_api_calls = true; - v8::internal::FLAG_allow_natives_syntax = true; + i::v8_flags.turbofan = true; + i::v8_flags.turbo_fast_api_calls = true; + i::v8_flags.allow_natives_syntax = true; // Disable --always_turbofan, otherwise we haven't generated the necessary // feedback to go down the "best optimization" path for the fast call. - v8::internal::FLAG_always_turbofan = false; - v8::internal::FlagList::EnforceFlagImplications(); + i::v8_flags.always_turbofan = false; + i::FlagList::EnforceFlagImplications(); v8::Isolate* isolate = CcTest::isolate(); HandleScope handle_scope(isolate); @@ -28943,15 +28939,15 @@ TEST(FastApiSequenceOverloads) { TEST(FastApiOverloadResolution) { #ifndef V8_LITE_MODE - if (i::FLAG_jitless) return; + if (i::v8_flags.jitless) return; - v8::internal::FLAG_turbofan = true; - v8::internal::FLAG_turbo_fast_api_calls = true; - v8::internal::FLAG_allow_natives_syntax = true; + i::v8_flags.turbofan = true; + i::v8_flags.turbo_fast_api_calls = true; + i::v8_flags.allow_natives_syntax = true; // Disable --always_turbofan, otherwise we haven't generated the necessary // feedback to go down the "best optimization" path for the fast call. - v8::internal::FLAG_always_turbofan = false; - v8::internal::FlagList::EnforceFlagImplications(); + i::v8_flags.always_turbofan = false; + i::FlagList::EnforceFlagImplications(); v8::CFunction typed_array_callback = v8::CFunctionBuilder().Fn(FastCallback1TypedArray).Build(); @@ -29116,7 +29112,7 @@ TEST(TriggerDelayedMainThreadMetricsEvent) { using v8::Context; using v8::Local; using v8::MaybeLocal; - i::FLAG_stress_concurrent_allocation = false; + i::v8_flags.stress_concurrent_allocation = false; // Set up isolate and context. v8::Isolate* iso = CcTest::isolate(); @@ -29142,8 +29138,7 @@ TEST(TriggerDelayedMainThreadMetricsEvent) { CHECK_EQ(recorder->count_, 0); // Unchanged. CHECK_EQ(recorder->time_in_us_, -1); // Unchanged. v8::base::OS::Sleep(v8::base::TimeDelta::FromMilliseconds(1100)); - while (v8::platform::PumpMessageLoop(v8::internal::V8::GetCurrentPlatform(), - iso)) { + while (v8::platform::PumpMessageLoop(i::V8::GetCurrentPlatform(), iso)) { } CHECK_EQ(recorder->count_, 1); // Increased. CHECK_GT(recorder->time_in_us_, 100); @@ -29155,8 +29150,7 @@ TEST(TriggerDelayedMainThreadMetricsEvent) { // invalid. i_iso->metrics_recorder()->DelayMainThreadEvent(event, context_id); v8::base::OS::Sleep(v8::base::TimeDelta::FromMilliseconds(1100)); - while (v8::platform::PumpMessageLoop(v8::internal::V8::GetCurrentPlatform(), - iso)) { + while (v8::platform::PumpMessageLoop(i::V8::GetCurrentPlatform(), iso)) { } CHECK_EQ(recorder->count_, 1); // Unchanged. } @@ -29342,24 +29336,24 @@ TEST(TestSetSabConstructorEnabledCallback) { // {Isolate::IsSharedArrayBufferConstructorEnabled} calls the callback set by // the embedder if such a callback exists. Otherwise it returns - // {FLAG_harmony_sharedarraybuffer}. First we test that the flag is returned - // correctly if no callback is set. Then we test that the flag is ignored if - // the callback is set. + // {v8_flags.harmony_sharedarraybuffer}. First we test that the flag is + // returned correctly if no callback is set. Then we test that the flag is + // ignored if the callback is set. - i::FLAG_harmony_sharedarraybuffer = false; - i::FLAG_enable_sharedarraybuffer_per_context = false; + i::v8_flags.harmony_sharedarraybuffer = false; + i::v8_flags.enable_sharedarraybuffer_per_context = false; CHECK(!i_isolate->IsSharedArrayBufferConstructorEnabled(i_context)); - i::FLAG_harmony_sharedarraybuffer = false; - i::FLAG_enable_sharedarraybuffer_per_context = false; + i::v8_flags.harmony_sharedarraybuffer = false; + i::v8_flags.enable_sharedarraybuffer_per_context = false; CHECK(!i_isolate->IsSharedArrayBufferConstructorEnabled(i_context)); - i::FLAG_harmony_sharedarraybuffer = true; - i::FLAG_enable_sharedarraybuffer_per_context = false; + i::v8_flags.harmony_sharedarraybuffer = true; + i::v8_flags.enable_sharedarraybuffer_per_context = false; CHECK(i_isolate->IsSharedArrayBufferConstructorEnabled(i_context)); - i::FLAG_harmony_sharedarraybuffer = true; - i::FLAG_enable_sharedarraybuffer_per_context = true; + i::v8_flags.harmony_sharedarraybuffer = true; + i::v8_flags.enable_sharedarraybuffer_per_context = true; CHECK(!i_isolate->IsSharedArrayBufferConstructorEnabled(i_context)); isolate->SetSharedArrayBufferConstructorEnabledCallback( @@ -29382,7 +29376,7 @@ TEST(EmbedderInstanceTypes) { LocalContext env; v8::Isolate* isolate = env->GetIsolate(); v8::HandleScope scope(isolate); - i::FLAG_embedder_instance_types = true; + i::v8_flags.embedder_instance_types = true; Local<FunctionTemplate> node = FunctionTemplate::New(isolate); Local<ObjectTemplate> proto_template = node->PrototypeTemplate(); Local<FunctionTemplate> nodeType = v8::FunctionTemplate::New( diff --git a/deps/v8/test/cctest/test-assembler-arm64.cc b/deps/v8/test/cctest/test-assembler-arm64.cc index 5653da74927d46..efcc5ebad1a121 100644 --- a/deps/v8/test/cctest/test-assembler-arm64.cc +++ b/deps/v8/test/cctest/test-assembler-arm64.cc @@ -132,7 +132,7 @@ static void InitializeVM() { RegisterDump core; \ HandleScope handle_scope(isolate); \ Handle<Code> code; \ - if (i::FLAG_trace_sim) { \ + if (i::v8_flags.trace_sim) { \ pdis.reset(new PrintDisassembler(stdout)); \ decoder->PrependVisitor(pdis.get()); \ } @@ -167,7 +167,7 @@ static void InitializeVM() { CodeDesc desc; \ __ GetCode(masm.isolate(), &desc); \ code = Factory::CodeBuilder(isolate, desc, CodeKind::FOR_TESTING).Build(); \ - if (FLAG_print_code) code->Print(); \ + if (v8_flags.print_code) code->Print(); \ } #else // ifdef USE_SIMULATOR. @@ -215,7 +215,7 @@ static void InitializeVM() { CodeDesc desc; \ __ GetCode(masm.isolate(), &desc); \ code = Factory::CodeBuilder(isolate, desc, CodeKind::FOR_TESTING).Build(); \ - if (FLAG_print_code) code->Print(); \ + if (v8_flags.print_code) code->Print(); \ } #endif // ifdef USE_SIMULATOR. @@ -11842,7 +11842,7 @@ TEST(system_msr) { } TEST(system_pauth_b) { - i::FLAG_sim_abort_on_bad_auth = false; + i::v8_flags.sim_abort_on_bad_auth = false; SETUP(); START(); diff --git a/deps/v8/test/cctest/test-assembler-ia32.cc b/deps/v8/test/cctest/test-assembler-ia32.cc index 962fd142b4f6bf..644b2f51d63e2d 100644 --- a/deps/v8/test/cctest/test-assembler-ia32.cc +++ b/deps/v8/test/cctest/test-assembler-ia32.cc @@ -138,7 +138,6 @@ TEST(AssemblerIa322) { // some relocated stuff here, not executed __ mov(eax, isolate->factory()->true_value()); - __ jmp(kNullAddress, RelocInfo::RUNTIME_ENTRY); CodeDesc desc; assm.GetCode(isolate, &desc); diff --git a/deps/v8/test/cctest/test-assembler-mips64.cc b/deps/v8/test/cctest/test-assembler-mips64.cc index b2b1d7145b9e41..6e57ca33084ad9 100644 --- a/deps/v8/test/cctest/test-assembler-mips64.cc +++ b/deps/v8/test/cctest/test-assembler-mips64.cc @@ -6216,7 +6216,7 @@ TEST(Call_with_trampoline) { HandleScope scope(isolate); MacroAssembler assm(isolate, v8::internal::CodeObjectRequired::kYes); - int next_buffer_check_ = FLAG_force_long_branches + int next_buffer_check_ = v8_flags.force_long_branches ? kMaxInt : TurboAssembler::kMaxBranchOffset - TurboAssembler::kTrampolineSlotsSize * 16; @@ -6233,7 +6233,7 @@ TEST(Call_with_trampoline) { int pc_offset_before = __ pc_offset(); { // There should be a trampoline after this Call - __ Call(FUNCTION_ADDR(DummyFunction), RelocInfo::RUNTIME_ENTRY); + __ Call(FUNCTION_ADDR(DummyFunction), RelocInfo::EXTERNAL_REFERENCE); } int pc_offset_after = __ pc_offset(); int safepoint_pc_offset = __ pc_offset_for_safepoint(); @@ -9616,7 +9616,7 @@ void run_msa_mi10(InstFunc GenerateVectorInstructionFunc) { T* in_array_middle = in_test_vector + arraysize(in_test_vector) / 2; T* out_array_middle = out_test_vector + arraysize(out_test_vector) / 2; - v8::base::RandomNumberGenerator rand_gen(FLAG_random_seed); + v8::base::RandomNumberGenerator rand_gen(v8_flags.random_seed); for (unsigned int i = 0; i < arraysize(in_test_vector); i++) { in_test_vector[i] = static_cast<T>(rand_gen.NextInt()); out_test_vector[i] = 0; diff --git a/deps/v8/test/cctest/test-assembler-riscv32.cc b/deps/v8/test/cctest/test-assembler-riscv32.cc index 29f157055f877f..a72834e578f4c5 100644 --- a/deps/v8/test/cctest/test-assembler-riscv32.cc +++ b/deps/v8/test/cctest/test-assembler-riscv32.cc @@ -77,7 +77,7 @@ using F5 = void*(void* p0, void* p1, int p2, int p3, int p4); #define UTEST_R1_FORM_WITH_RES_C(instr_name, in_type, out_type, rs1_val, \ expected_res) \ TEST(RISCV_UTEST_##instr_name) { \ - i::FLAG_riscv_c_extension = true; \ + i::v8_flags.riscv_c_extension = true; \ CcTest::InitializeVM(); \ auto fn = [](MacroAssembler& assm) { __ instr_name(a0, a0); }; \ auto res = GenAndRunTest<out_type, in_type>(rs1_val, fn); \ @@ -1070,7 +1070,7 @@ TEST(NAN_BOX) { TEST(RVC_CI) { // Test RV64C extension CI type instructions. - i::FLAG_riscv_c_extension = true; + i::v8_flags.riscv_c_extension = true; CcTest::InitializeVM(); // Test c.addi @@ -1116,7 +1116,7 @@ TEST(RVC_CI) { } TEST(RVC_CIW) { - i::FLAG_riscv_c_extension = true; + i::v8_flags.riscv_c_extension = true; CcTest::InitializeVM(); // Test c.addi4spn @@ -1134,7 +1134,7 @@ TEST(RVC_CIW) { TEST(RVC_CR) { // Test RV64C extension CR type instructions. - i::FLAG_riscv_c_extension = true; + i::v8_flags.riscv_c_extension = true; CcTest::InitializeVM(); // Test c.add @@ -1150,7 +1150,7 @@ TEST(RVC_CR) { TEST(RVC_CA) { // Test RV64C extension CA type instructions. - i::FLAG_riscv_c_extension = true; + i::v8_flags.riscv_c_extension = true; CcTest::InitializeVM(); // Test c.sub @@ -1196,7 +1196,7 @@ TEST(RVC_CA) { TEST(RVC_LOAD_STORE_SP) { // Test RV32C extension flwsp/fswsp, lwsp/swsp. - i::FLAG_riscv_c_extension = true; + i::v8_flags.riscv_c_extension = true; CcTest::InitializeVM(); { @@ -1220,7 +1220,7 @@ TEST(RVC_LOAD_STORE_SP) { TEST(RVC_LOAD_STORE_COMPRESSED) { // Test RV64C extension fld, lw, ld. - i::FLAG_riscv_c_extension = true; + i::v8_flags.riscv_c_extension = true; CcTest::InitializeVM(); Isolate* isolate = CcTest::i_isolate(); @@ -1252,7 +1252,7 @@ TEST(RVC_LOAD_STORE_COMPRESSED) { } TEST(RVC_JUMP) { - i::FLAG_riscv_c_extension = true; + i::v8_flags.riscv_c_extension = true; CcTest::InitializeVM(); Label L, C; @@ -1278,7 +1278,7 @@ TEST(RVC_JUMP) { TEST(RVC_CB) { // Test RV64C extension CI type instructions. - FLAG_riscv_c_extension = true; + v8_flags.riscv_c_extension = true; CcTest::InitializeVM(); // Test c.srai @@ -1304,7 +1304,7 @@ TEST(RVC_CB) { } TEST(RVC_CB_BRANCH) { - FLAG_riscv_c_extension = true; + v8_flags.riscv_c_extension = true; // Test floating point compare and // branch instructions. CcTest::InitializeVM(); @@ -2145,6 +2145,11 @@ UTEST_RVV_VFW_VF_FORM_WITH_OP(vfwmul_vf, *, false, is_invalid_fmul) for (float rs1_fval : array) { \ for (float rs2_fval : array) { \ for (float rs3_fval : array) { \ + double rs1_dval = base::bit_cast<double>( \ + (uint64_t)base::bit_cast<uint32_t>(rs1_fval) << 32 | \ + base::bit_cast<uint32_t>(rs1_fval)); \ + double rs2_dval = static_cast<double>(rs2_fval); \ + double rs3_dval = static_cast<double>(rs3_fval); \ double res = \ GenAndRunTest<double, float>(rs1_fval, rs2_fval, rs3_fval, fn); \ CHECK_DOUBLE_EQ((expect_res), res); \ @@ -2170,6 +2175,11 @@ UTEST_RVV_VFW_VF_FORM_WITH_OP(vfwmul_vf, *, false, is_invalid_fmul) for (float rs1_fval : array) { \ for (float rs2_fval : array) { \ for (float rs3_fval : array) { \ + double rs1_dval = base::bit_cast<double>( \ + (uint64_t)base::bit_cast<uint32_t>(rs1_fval) << 32 | \ + base::bit_cast<uint32_t>(rs1_fval)); \ + double rs2_dval = static_cast<double>(rs2_fval); \ + double rs3_dval = static_cast<double>(rs3_fval); \ double res = \ GenAndRunTest<double, float>(rs1_fval, rs2_fval, rs3_fval, fn); \ CHECK_DOUBLE_EQ((expect_res), res); \ @@ -2180,21 +2190,21 @@ UTEST_RVV_VFW_VF_FORM_WITH_OP(vfwmul_vf, *, false, is_invalid_fmul) #define ARRAY_FLOAT compiler::ValueHelper::GetVector<float>() UTEST_RVV_VFW_FMA_VV_FORM_WITH_RES(vfwmacc_vv, ARRAY_FLOAT, - std::fma(rs2_fval, rs3_fval, rs1_fval)) + std::fma(rs2_dval, rs3_dval, rs1_dval)) UTEST_RVV_VFW_FMA_VF_FORM_WITH_RES(vfwmacc_vf, ARRAY_FLOAT, - std::fma(rs2_fval, rs3_fval, rs1_fval)) + std::fma(rs2_dval, rs3_dval, rs1_dval)) UTEST_RVV_VFW_FMA_VV_FORM_WITH_RES(vfwnmacc_vv, ARRAY_FLOAT, - std::fma(rs2_fval, -rs3_fval, -rs1_fval)) + std::fma(rs2_dval, -rs3_dval, -rs1_dval)) UTEST_RVV_VFW_FMA_VF_FORM_WITH_RES(vfwnmacc_vf, ARRAY_FLOAT, - std::fma(rs2_fval, -rs3_fval, -rs1_fval)) + std::fma(rs2_dval, -rs3_dval, -rs1_dval)) UTEST_RVV_VFW_FMA_VV_FORM_WITH_RES(vfwmsac_vv, ARRAY_FLOAT, - std::fma(rs2_fval, rs3_fval, -rs1_fval)) + std::fma(rs2_dval, rs3_dval, -rs1_dval)) UTEST_RVV_VFW_FMA_VF_FORM_WITH_RES(vfwmsac_vf, ARRAY_FLOAT, - std::fma(rs2_fval, rs3_fval, -rs1_fval)) + std::fma(rs2_dval, rs3_dval, -rs1_dval)) UTEST_RVV_VFW_FMA_VV_FORM_WITH_RES(vfwnmsac_vv, ARRAY_FLOAT, - std::fma(rs2_fval, -rs3_fval, rs1_fval)) + std::fma(rs2_dval, -rs3_dval, rs1_dval)) UTEST_RVV_VFW_FMA_VF_FORM_WITH_RES(vfwnmsac_vf, ARRAY_FLOAT, - std::fma(rs2_fval, -rs3_fval, rs1_fval)) + std::fma(rs2_dval, -rs3_dval, rs1_dval)) #undef ARRAY_FLOAT #undef UTEST_RVV_VFW_FMA_VV_FORM_WITH_RES @@ -2304,7 +2314,9 @@ UTEST_RVV_FMA_VF_FORM_WITH_RES(vfnmsac_vf, ARRAY_FLOAT, for (float rs1_fval : compiler::ValueHelper::GetVector<float>()) { \ std::vector<double> temp_arr(kRvvVLEN / 32, \ static_cast<double>(rs1_fval)); \ - double expect_res = rs1_fval; \ + double expect_res = base::bit_cast<double>( \ + (uint64_t)base::bit_cast<uint32_t>(rs1_fval) << 32 | \ + base::bit_cast<uint32_t>(rs1_fval)); \ for (double val : temp_arr) { \ expect_res += val; \ if (std::isnan(expect_res)) { \ diff --git a/deps/v8/test/cctest/test-assembler-riscv64.cc b/deps/v8/test/cctest/test-assembler-riscv64.cc index 804ca05824650e..898929b16d760d 100644 --- a/deps/v8/test/cctest/test-assembler-riscv64.cc +++ b/deps/v8/test/cctest/test-assembler-riscv64.cc @@ -79,7 +79,7 @@ using F5 = void*(void* p0, void* p1, int p2, int p3, int p4); #define UTEST_R1_FORM_WITH_RES_C(instr_name, in_type, out_type, rs1_val, \ expected_res) \ TEST(RISCV_UTEST_##instr_name) { \ - i::FLAG_riscv_c_extension = true; \ + i::v8_flags.riscv_c_extension = true; \ CcTest::InitializeVM(); \ auto fn = [](MacroAssembler& assm) { __ instr_name(a0, a0); }; \ auto res = GenAndRunTest<out_type, in_type>(rs1_val, fn); \ @@ -1211,7 +1211,7 @@ TEST(NAN_BOX) { TEST(RVC_CI) { // Test RV64C extension CI type instructions. - i::FLAG_riscv_c_extension = true; + i::v8_flags.riscv_c_extension = true; CcTest::InitializeVM(); // Test c.addi @@ -1264,7 +1264,7 @@ TEST(RVC_CI) { } TEST(RVC_CIW) { - i::FLAG_riscv_c_extension = true; + i::v8_flags.riscv_c_extension = true; CcTest::InitializeVM(); // Test c.addi4spn @@ -1282,7 +1282,7 @@ TEST(RVC_CIW) { TEST(RVC_CR) { // Test RV64C extension CR type instructions. - i::FLAG_riscv_c_extension = true; + i::v8_flags.riscv_c_extension = true; CcTest::InitializeVM(); // Test c.add @@ -1298,7 +1298,7 @@ TEST(RVC_CR) { TEST(RVC_CA) { // Test RV64C extension CA type instructions. - i::FLAG_riscv_c_extension = true; + i::v8_flags.riscv_c_extension = true; CcTest::InitializeVM(); // Test c.sub @@ -1364,7 +1364,7 @@ TEST(RVC_CA) { TEST(RVC_LOAD_STORE_SP) { // Test RV64C extension fldsp/fsdsp, lwsp/swsp, ldsp/sdsp. - i::FLAG_riscv_c_extension = true; + i::v8_flags.riscv_c_extension = true; CcTest::InitializeVM(); { @@ -1397,7 +1397,7 @@ TEST(RVC_LOAD_STORE_SP) { TEST(RVC_LOAD_STORE_COMPRESSED) { // Test RV64C extension fld, lw, ld. - i::FLAG_riscv_c_extension = true; + i::v8_flags.riscv_c_extension = true; CcTest::InitializeVM(); Isolate* isolate = CcTest::i_isolate(); @@ -1479,7 +1479,7 @@ TEST(RVC_LOAD_STORE_COMPRESSED) { } TEST(RVC_JUMP) { - i::FLAG_riscv_c_extension = true; + i::v8_flags.riscv_c_extension = true; CcTest::InitializeVM(); Label L, C; @@ -1505,7 +1505,7 @@ TEST(RVC_JUMP) { TEST(RVC_CB) { // Test RV64C extension CI type instructions. - FLAG_riscv_c_extension = true; + v8_flags.riscv_c_extension = true; CcTest::InitializeVM(); // Test c.srai @@ -1531,7 +1531,7 @@ TEST(RVC_CB) { } TEST(RVC_CB_BRANCH) { - FLAG_riscv_c_extension = true; + v8_flags.riscv_c_extension = true; // Test floating point compare and // branch instructions. CcTest::InitializeVM(); @@ -2409,6 +2409,11 @@ UTEST_RVV_VFW_VF_FORM_WITH_OP(vfwmul_vf, *, false, is_invalid_fmul) for (float rs1_fval : array) { \ for (float rs2_fval : array) { \ for (float rs3_fval : array) { \ + double rs1_dval = base::bit_cast<double>( \ + (uint64_t)base::bit_cast<uint32_t>(rs1_fval) << 32 | \ + base::bit_cast<uint32_t>(rs1_fval)); \ + double rs2_dval = static_cast<double>(rs2_fval); \ + double rs3_dval = static_cast<double>(rs3_fval); \ double res = \ GenAndRunTest<double, float>(rs1_fval, rs2_fval, rs3_fval, fn); \ CHECK_DOUBLE_EQ((expect_res), res); \ @@ -2434,6 +2439,11 @@ UTEST_RVV_VFW_VF_FORM_WITH_OP(vfwmul_vf, *, false, is_invalid_fmul) for (float rs1_fval : array) { \ for (float rs2_fval : array) { \ for (float rs3_fval : array) { \ + double rs1_dval = base::bit_cast<double>( \ + (uint64_t)base::bit_cast<uint32_t>(rs1_fval) << 32 | \ + base::bit_cast<uint32_t>(rs1_fval)); \ + double rs2_dval = static_cast<double>(rs2_fval); \ + double rs3_dval = static_cast<double>(rs3_fval); \ double res = \ GenAndRunTest<double, float>(rs1_fval, rs2_fval, rs3_fval, fn); \ CHECK_DOUBLE_EQ((expect_res), res); \ @@ -2444,21 +2454,21 @@ UTEST_RVV_VFW_VF_FORM_WITH_OP(vfwmul_vf, *, false, is_invalid_fmul) #define ARRAY_FLOAT compiler::ValueHelper::GetVector<float>() UTEST_RVV_VFW_FMA_VV_FORM_WITH_RES(vfwmacc_vv, ARRAY_FLOAT, - std::fma(rs2_fval, rs3_fval, rs1_fval)) + std::fma(rs2_dval, rs3_dval, rs1_dval)) UTEST_RVV_VFW_FMA_VF_FORM_WITH_RES(vfwmacc_vf, ARRAY_FLOAT, - std::fma(rs2_fval, rs3_fval, rs1_fval)) + std::fma(rs2_dval, rs3_dval, rs1_dval)) UTEST_RVV_VFW_FMA_VV_FORM_WITH_RES(vfwnmacc_vv, ARRAY_FLOAT, - std::fma(rs2_fval, -rs3_fval, -rs1_fval)) + std::fma(rs2_dval, -rs3_dval, -rs1_dval)) UTEST_RVV_VFW_FMA_VF_FORM_WITH_RES(vfwnmacc_vf, ARRAY_FLOAT, - std::fma(rs2_fval, -rs3_fval, -rs1_fval)) + std::fma(rs2_dval, -rs3_dval, -rs1_dval)) UTEST_RVV_VFW_FMA_VV_FORM_WITH_RES(vfwmsac_vv, ARRAY_FLOAT, - std::fma(rs2_fval, rs3_fval, -rs1_fval)) + std::fma(rs2_dval, rs3_dval, -rs1_dval)) UTEST_RVV_VFW_FMA_VF_FORM_WITH_RES(vfwmsac_vf, ARRAY_FLOAT, - std::fma(rs2_fval, rs3_fval, -rs1_fval)) + std::fma(rs2_dval, rs3_dval, -rs1_dval)) UTEST_RVV_VFW_FMA_VV_FORM_WITH_RES(vfwnmsac_vv, ARRAY_FLOAT, - std::fma(rs2_fval, -rs3_fval, rs1_fval)) + std::fma(rs2_dval, -rs3_dval, rs1_dval)) UTEST_RVV_VFW_FMA_VF_FORM_WITH_RES(vfwnmsac_vf, ARRAY_FLOAT, - std::fma(rs2_fval, -rs3_fval, rs1_fval)) + std::fma(rs2_dval, -rs3_dval, rs1_dval)) #undef ARRAY_FLOAT #undef UTEST_RVV_VFW_FMA_VV_FORM_WITH_RES @@ -2568,7 +2578,9 @@ UTEST_RVV_FMA_VF_FORM_WITH_RES(vfnmsac_vf, ARRAY_FLOAT, for (float rs1_fval : compiler::ValueHelper::GetVector<float>()) { \ std::vector<double> temp_arr(kRvvVLEN / 32, \ static_cast<double>(rs1_fval)); \ - double expect_res = rs1_fval; \ + double expect_res = base::bit_cast<double>( \ + (uint64_t)base::bit_cast<uint32_t>(rs1_fval) << 32 | \ + base::bit_cast<uint32_t>(rs1_fval)); \ for (double val : temp_arr) { \ expect_res += val; \ if (std::isnan(expect_res)) { \ diff --git a/deps/v8/test/cctest/test-code-stub-assembler.cc b/deps/v8/test/cctest/test-code-stub-assembler.cc index e2f26c98092496..53ad0a95e2e63f 100644 --- a/deps/v8/test/cctest/test-code-stub-assembler.cc +++ b/deps/v8/test/cctest/test-code-stub-assembler.cc @@ -493,7 +493,7 @@ TEST(ComputeIntegerHash) { FunctionTester ft(asm_tester.GenerateCode(), kNumParams); - base::RandomNumberGenerator rand_gen(FLAG_random_seed); + base::RandomNumberGenerator rand_gen(v8_flags.random_seed); for (int i = 0; i < 1024; i++) { int k = rand_gen.NextInt(Smi::kMaxValue); @@ -976,7 +976,7 @@ TEST(NumberDictionaryLookup) { Handle<Object> fake_value(Smi::FromInt(42), isolate); PropertyDetails fake_details = PropertyDetails::Empty(); - base::RandomNumberGenerator rand_gen(FLAG_random_seed); + base::RandomNumberGenerator rand_gen(v8_flags.random_seed); for (int i = 0; i < kKeysCount; i++) { int random_key = rand_gen.NextInt(Smi::kMaxValue); @@ -1065,7 +1065,7 @@ TEST(TransitionLookup) { Handle<Map> root_map = Map::Create(isolate, 0); Handle<Name> keys[kKeysCount]; - base::RandomNumberGenerator rand_gen(FLAG_random_seed); + base::RandomNumberGenerator rand_gen(v8_flags.random_seed); Factory* factory = isolate->factory(); Handle<FieldType> any = FieldType::Any(isolate); @@ -1457,7 +1457,7 @@ TEST(TryGetOwnProperty) { }; static_assert(arraysize(values) < arraysize(names)); - base::RandomNumberGenerator rand_gen(FLAG_random_seed); + base::RandomNumberGenerator rand_gen(v8_flags.random_seed); std::vector<Handle<JSObject>> objects; @@ -4029,8 +4029,8 @@ TEST(InstructionSchedulingCallerSavedRegisters) { // This is a regression test for v8:9775, where TF's instruction scheduler // incorrectly moved pure operations in between a ArchSaveCallerRegisters and // a ArchRestoreCallerRegisters instruction. - bool old_turbo_instruction_scheduling = FLAG_turbo_instruction_scheduling; - FLAG_turbo_instruction_scheduling = true; + bool old_turbo_instruction_scheduling = v8_flags.turbo_instruction_scheduling; + v8_flags.turbo_instruction_scheduling = true; Isolate* isolate(CcTest::InitIsolateOnce()); const int kNumParams = 1; @@ -4059,7 +4059,7 @@ TEST(InstructionSchedulingCallerSavedRegisters) { CHECK(result.ToHandleChecked()->IsSmi()); CHECK_EQ(result.ToHandleChecked()->Number(), 13); - FLAG_turbo_instruction_scheduling = old_turbo_instruction_scheduling; + v8_flags.turbo_instruction_scheduling = old_turbo_instruction_scheduling; } #if V8_ENABLE_WEBASSEMBLY @@ -4417,6 +4417,143 @@ TEST(CountTrailingZeros) { ft.Call(); } +TEST(IntPtrMulHigh) { + Isolate* isolate(CcTest::InitIsolateOnce()); + + const int kNumParams = 1; + CodeAssemblerTester asm_tester(isolate, kNumParams + 1); // Include receiver. + CodeStubAssembler m(asm_tester.state()); + + TNode<IntPtrT> a = m.IntPtrConstant(std::numeric_limits<intptr_t>::min()); + TNode<IntPtrT> b = m.SmiUntag(m.Parameter<Smi>(1)); + TNode<IntPtrT> res = m.IntPtrMulHigh(a, b); + m.Return(m.SmiTag(res)); + + FunctionTester ft(asm_tester.GenerateCode()); + CHECK_EQ(-147694, + ft.CallChecked<Smi>(handle(Smi::FromInt(295387), isolate))->value()); + CHECK_EQ(-147694, base::bits::SignedMulHigh32( + std::numeric_limits<int32_t>::min(), 295387)); + CHECK_EQ(-147694, base::bits::SignedMulHigh64( + std::numeric_limits<int64_t>::min(), 295387)); +} + +TEST(IntPtrMulHighConstantFoldable) { + Isolate* isolate(CcTest::InitIsolateOnce()); + CodeAssemblerTester asm_tester(isolate); + CodeStubAssembler m(asm_tester.state()); + + TNode<IntPtrT> a = m.IntPtrConstant(std::numeric_limits<intptr_t>::min()); + TNode<IntPtrT> b = m.IntPtrConstant(295387); + TNode<IntPtrT> res = m.IntPtrMulHigh(a, b); + m.Return(m.SmiTag(res)); + + FunctionTester ft(asm_tester.GenerateCode()); + CHECK_EQ(-147694, ft.CallChecked<Smi>()->value()); + CHECK_EQ(-147694, base::bits::SignedMulHigh32( + std::numeric_limits<int32_t>::min(), 295387)); + CHECK_EQ(-147694, base::bits::SignedMulHigh64( + std::numeric_limits<int64_t>::min(), 295387)); +} + +TEST(UintPtrMulHigh) { + Isolate* isolate(CcTest::InitIsolateOnce()); + + const int kNumParams = 1; + CodeAssemblerTester asm_tester(isolate, kNumParams + 1); // Include receiver. + CodeStubAssembler m(asm_tester.state()); + + TNode<IntPtrT> a = m.IntPtrConstant(std::numeric_limits<intptr_t>::min()); + TNode<IntPtrT> b = m.SmiUntag(m.Parameter<Smi>(1)); + TNode<IntPtrT> res = m.Signed(m.UintPtrMulHigh(m.Unsigned(a), m.Unsigned(b))); + m.Return(m.SmiTag(res)); + + FunctionTester ft(asm_tester.GenerateCode()); + CHECK_EQ(147693, + ft.CallChecked<Smi>(handle(Smi::FromInt(295387), isolate))->value()); + CHECK_EQ(147693, base::bits::UnsignedMulHigh32( + std::numeric_limits<int32_t>::min(), 295387)); + CHECK_EQ(147693, base::bits::UnsignedMulHigh64( + std::numeric_limits<int64_t>::min(), 295387)); +} + +TEST(UintPtrMulHighConstantFoldable) { + Isolate* isolate(CcTest::InitIsolateOnce()); + CodeAssemblerTester asm_tester(isolate); + CodeStubAssembler m(asm_tester.state()); + + TNode<IntPtrT> a = m.IntPtrConstant(std::numeric_limits<intptr_t>::min()); + TNode<IntPtrT> b = m.IntPtrConstant(295387); + TNode<IntPtrT> res = m.Signed(m.UintPtrMulHigh(m.Unsigned(a), m.Unsigned(b))); + m.Return(m.SmiTag(res)); + + FunctionTester ft(asm_tester.GenerateCode()); + CHECK_EQ(147693, ft.CallChecked<Smi>()->value()); + CHECK_EQ( + 147693, + base::bits::UnsignedMulHigh32( + static_cast<uint32_t>(std::numeric_limits<int32_t>::min()), 295387)); + CHECK_EQ( + 147693, + base::bits::UnsignedMulHigh64( + static_cast<uint64_t>(std::numeric_limits<int64_t>::min()), 295387)); +} + +TEST(IntPtrMulWithOverflow) { + Isolate* isolate(CcTest::InitIsolateOnce()); + + const int kNumParams = 1; + + { + CodeAssemblerTester asm_tester(isolate, + kNumParams + 1); // Include receiver. + CodeStubAssembler m(asm_tester.state()); + + TNode<IntPtrT> a = m.IntPtrConstant(std::numeric_limits<intptr_t>::min()); + TNode<IntPtrT> b = m.SmiUntag(m.Parameter<Smi>(1)); + TNode<PairT<IntPtrT, BoolT>> pair = m.IntPtrMulWithOverflow(a, b); + TNode<BoolT> overflow = m.Projection<1>(pair); + m.Return(m.SelectBooleanConstant(overflow)); + + FunctionTester ft(asm_tester.GenerateCode()); + CHECK(ft.Call(handle(Smi::FromInt(-1), isolate)) + .ToHandleChecked() + ->IsTrue(isolate)); + CHECK(ft.Call(handle(Smi::FromInt(1), isolate)) + .ToHandleChecked() + ->IsFalse(isolate)); + CHECK(ft.Call(handle(Smi::FromInt(2), isolate)) + .ToHandleChecked() + ->IsTrue(isolate)); + CHECK(ft.Call(handle(Smi::FromInt(0), isolate)) + .ToHandleChecked() + ->IsFalse(isolate)); + } + + { + CodeAssemblerTester asm_tester(isolate, + kNumParams + 1); // Include receiver. + CodeStubAssembler m(asm_tester.state()); + + TNode<IntPtrT> a = m.IntPtrConstant(std::numeric_limits<intptr_t>::max()); + TNode<IntPtrT> b = m.SmiUntag(m.Parameter<Smi>(1)); + TNode<PairT<IntPtrT, BoolT>> pair = m.IntPtrMulWithOverflow(a, b); + TNode<BoolT> overflow = m.Projection<1>(pair); + m.Return(m.SelectBooleanConstant(overflow)); + + FunctionTester ft(asm_tester.GenerateCode()); + CHECK(ft.Call(handle(Smi::FromInt(-1), isolate)) + .ToHandleChecked() + ->IsFalse(isolate)); + CHECK(ft.Call(handle(Smi::FromInt(1), isolate)) + .ToHandleChecked() + ->IsFalse(isolate)); + CHECK(ft.Call(handle(Smi::FromInt(2), isolate)) + .ToHandleChecked() + ->IsTrue(isolate)); + } +} + } // namespace compiler } // namespace internal } // namespace v8 diff --git a/deps/v8/test/cctest/test-cpu-profiler.cc b/deps/v8/test/cctest/test-cpu-profiler.cc index cf13e487355425..b1e7fb830e3a08 100644 --- a/deps/v8/test/cctest/test-cpu-profiler.cc +++ b/deps/v8/test/cctest/test-cpu-profiler.cc @@ -125,11 +125,11 @@ namespace { class TestSetup { public: - TestSetup() : old_flag_prof_browser_mode_(i::FLAG_prof_browser_mode) { - i::FLAG_prof_browser_mode = false; + TestSetup() : old_flag_prof_browser_mode_(v8_flags.prof_browser_mode) { + v8_flags.prof_browser_mode = false; } - ~TestSetup() { i::FLAG_prof_browser_mode = old_flag_prof_browser_mode_; } + ~TestSetup() { v8_flags.prof_browser_mode = old_flag_prof_browser_mode_; } private: bool old_flag_prof_browser_mode_; @@ -762,9 +762,9 @@ static const char* cpu_profiler_test_source = TEST(CollectCpuProfile) { // Skip test if concurrent sparkplug is enabled. The test becomes flaky, // since it requires a precise trace. - if (i::FLAG_concurrent_sparkplug) return; + if (v8_flags.concurrent_sparkplug) return; - i::FLAG_allow_natives_syntax = true; + v8_flags.allow_natives_syntax = true; LocalContext env; v8::HandleScope scope(env->GetIsolate()); @@ -795,9 +795,9 @@ TEST(CollectCpuProfile) { TEST(CollectCpuProfileCallerLineNumbers) { // Skip test if concurrent sparkplug is enabled. The test becomes flaky, // since it requires a precise trace. - if (i::FLAG_concurrent_sparkplug) return; + if (v8_flags.concurrent_sparkplug) return; - i::FLAG_allow_natives_syntax = true; + v8_flags.allow_natives_syntax = true; LocalContext env; v8::HandleScope scope(env->GetIsolate()); @@ -859,7 +859,7 @@ static const char* hot_deopt_no_frame_entry_test_source = // If 'foo' has no ranges the samples falling into the prologue will miss the // 'start' function on the stack, so 'foo' will be attached to the (root). TEST(HotDeoptNoFrameEntry) { - i::FLAG_allow_natives_syntax = true; + v8_flags.allow_natives_syntax = true; LocalContext env; v8::HandleScope scope(env->GetIsolate()); @@ -882,7 +882,7 @@ TEST(HotDeoptNoFrameEntry) { } TEST(CollectCpuProfileSamples) { - i::FLAG_allow_natives_syntax = true; + v8_flags.allow_natives_syntax = true; LocalContext env; v8::HandleScope scope(env->GetIsolate()); @@ -936,7 +936,7 @@ static const char* cpu_profiler_test_source2 = // 16 16 loop [-1] #5 // 14 14 (program) [-1] #2 TEST(SampleWhenFrameIsNotSetup) { - i::FLAG_allow_natives_syntax = true; + v8_flags.allow_natives_syntax = true; LocalContext env; v8::HandleScope scope(env->GetIsolate()); @@ -1235,15 +1235,15 @@ TEST(BoundFunctionCall) { // This tests checks distribution of the samples through the source lines. static void TickLines(bool optimize) { #ifndef V8_LITE_MODE - FLAG_turbofan = optimize; + v8_flags.turbofan = optimize; #ifdef V8_ENABLE_MAGLEV // TODO(v8:7700): Also test maglev here. - FLAG_maglev = false; + v8_flags.maglev = false; #endif // V8_ENABLE_MAGLEV #endif // V8_LITE_MODE CcTest::InitializeVM(); LocalContext env; - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; i::Isolate* isolate = CcTest::i_isolate(); i::Factory* factory = isolate->factory(); i::HandleScope scope(isolate); @@ -1399,9 +1399,9 @@ static const char* call_function_test_source = TEST(FunctionCallSample) { // Skip test if concurrent sparkplug is enabled. The test becomes flaky, // since it requires a precise trace. - if (i::FLAG_concurrent_sparkplug) return; + if (i::v8_flags.concurrent_sparkplug) return; - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; LocalContext env; v8::HandleScope scope(env->GetIsolate()); @@ -1460,9 +1460,9 @@ static const char* function_apply_test_source = TEST(FunctionApplySample) { // Skip test if concurrent sparkplug is enabled. The test becomes flaky, // since it requires a precise trace. - if (i::FLAG_concurrent_sparkplug) return; + if (i::v8_flags.concurrent_sparkplug) return; - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; LocalContext env; v8::HandleScope scope(env->GetIsolate()); @@ -1569,7 +1569,7 @@ static void CallJsFunction(const v8::FunctionCallbackInfo<v8::Value>& info) { // 55 1 bar #16 5 // 54 54 foo #16 6 TEST(JsNativeJsSample) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; v8::HandleScope scope(CcTest::isolate()); v8::Local<v8::Context> env = CcTest::NewContext({PROFILER_EXTENSION_ID}); v8::Context::Scope context_scope(env); @@ -1622,7 +1622,7 @@ static const char* js_native_js_runtime_js_test_source = // 51 51 foo #16 6 // 2 2 (program) #0 2 TEST(JsNativeJsRuntimeJsSample) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; v8::HandleScope scope(CcTest::isolate()); v8::Local<v8::Context> env = CcTest::NewContext({PROFILER_EXTENSION_ID}); v8::Context::Scope context_scope(env); @@ -1679,7 +1679,7 @@ static const char* js_native1_js_native2_js_test_source = // 54 54 foo #16 7 // 2 2 (program) #0 2 TEST(JsNative1JsNative2JsSample) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; v8::HandleScope scope(CcTest::isolate()); v8::Local<v8::Context> env = CcTest::NewContext({PROFILER_EXTENSION_ID}); v8::Context::Scope context_scope(env); @@ -1779,7 +1779,7 @@ static const char* js_native_js_runtime_multiple_test_source = // foo #16 6 // (program) #0 2 TEST(JsNativeJsRuntimeJsSampleMultiple) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; v8::HandleScope scope(CcTest::isolate()); v8::Local<v8::Context> env = CcTest::NewContext({PROFILER_EXTENSION_ID}); v8::Context::Scope context_scope(env); @@ -1847,7 +1847,7 @@ static const char* inlining_test_source = // action #16 7 // (program) #0 2 TEST(Inlining) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; v8::HandleScope scope(CcTest::isolate()); v8::Local<v8::Context> env = CcTest::NewContext({PROFILER_EXTENSION_ID}); v8::Context::Scope context_scope(env); @@ -1945,9 +1945,9 @@ static const char* inlining_test_source2 = R"( TEST(Inlining2) { // Skip test if concurrent sparkplug is enabled. The test becomes flaky, // since it requires a precise trace. - if (FLAG_concurrent_sparkplug) return; + if (v8_flags.concurrent_sparkplug) return; - FLAG_allow_natives_syntax = true; + v8_flags.allow_natives_syntax = true; v8::Isolate* isolate = CcTest::isolate(); LocalContext env; v8::CpuProfiler::UseDetailedSourcePositionsForProfiling(isolate); @@ -2037,9 +2037,9 @@ static const char* cross_script_source_b = R"( TEST(CrossScriptInliningCallerLineNumbers) { // Skip test if concurrent sparkplug is enabled. The test becomes flaky, // since it requires a precise trace. - if (i::FLAG_concurrent_sparkplug) return; + if (i::v8_flags.concurrent_sparkplug) return; - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; v8::Isolate* isolate = CcTest::isolate(); LocalContext env; v8::CpuProfiler::UseDetailedSourcePositionsForProfiling(isolate); @@ -2132,9 +2132,9 @@ static const char* cross_script_source_f = R"( TEST(CrossScriptInliningCallerLineNumbers2) { // Skip test if concurrent sparkplug is enabled. The test becomes flaky, // since it requires a precise trace. - if (i::FLAG_concurrent_sparkplug) return; + if (i::v8_flags.concurrent_sparkplug) return; - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; LocalContext env; v8::HandleScope scope(CcTest::isolate()); ProfilerHelper helper(env.local()); @@ -2251,7 +2251,7 @@ static void CheckFunctionDetails(v8::Isolate* isolate, } TEST(FunctionDetails) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; v8::HandleScope scope(CcTest::isolate()); v8::Local<v8::Context> env = CcTest::NewContext({PROFILER_EXTENSION_ID}); v8::Context::Scope context_scope(env); @@ -2302,8 +2302,9 @@ TEST(FunctionDetails) { } TEST(FunctionDetailsInlining) { - if (!CcTest::i_isolate()->use_optimizer() || i::FLAG_always_turbofan) return; - i::FLAG_allow_natives_syntax = true; + if (!CcTest::i_isolate()->use_optimizer() || i::v8_flags.always_turbofan) + return; + i::v8_flags.allow_natives_syntax = true; v8::HandleScope scope(CcTest::isolate()); v8::Local<v8::Context> env = CcTest::NewContext({PROFILER_EXTENSION_ID}); v8::Context::Scope context_scope(env); @@ -2434,7 +2435,7 @@ static const char* pre_profiling_osr_script = R"( // 0 startProfiling:0 2 0 #4 TEST(StartProfilingAfterOsr) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; v8::HandleScope scope(CcTest::isolate()); v8::Local<v8::Context> env = CcTest::NewContext({PROFILER_EXTENSION_ID}); v8::Context::Scope context_scope(env); @@ -2510,8 +2511,9 @@ const char* GetBranchDeoptReason(v8::Local<v8::Context> context, // deopt at top function TEST(CollectDeoptEvents) { - if (!CcTest::i_isolate()->use_optimizer() || i::FLAG_always_turbofan) return; - i::FLAG_allow_natives_syntax = true; + if (!CcTest::i_isolate()->use_optimizer() || i::v8_flags.always_turbofan) + return; + i::v8_flags.allow_natives_syntax = true; v8::HandleScope scope(CcTest::isolate()); v8::Local<v8::Context> env = CcTest::NewContext({PROFILER_EXTENSION_ID}); v8::Context::Scope context_scope(env); @@ -2625,7 +2627,7 @@ TEST(CollectDeoptEvents) { } TEST(SourceLocation) { - i::FLAG_always_turbofan = true; + i::v8_flags.always_turbofan = true; LocalContext env; v8::HandleScope scope(CcTest::isolate()); @@ -2648,8 +2650,9 @@ static const char* inlined_source = // deopt at the first level inlined function TEST(DeoptAtFirstLevelInlinedSource) { - if (!CcTest::i_isolate()->use_optimizer() || i::FLAG_always_turbofan) return; - i::FLAG_allow_natives_syntax = true; + if (!CcTest::i_isolate()->use_optimizer() || i::v8_flags.always_turbofan) + return; + i::v8_flags.allow_natives_syntax = true; v8::HandleScope scope(CcTest::isolate()); v8::Local<v8::Context> env = CcTest::NewContext({PROFILER_EXTENSION_ID}); v8::Context::Scope context_scope(env); @@ -2720,8 +2723,9 @@ TEST(DeoptAtFirstLevelInlinedSource) { // deopt at the second level inlined function TEST(DeoptAtSecondLevelInlinedSource) { - if (!CcTest::i_isolate()->use_optimizer() || i::FLAG_always_turbofan) return; - i::FLAG_allow_natives_syntax = true; + if (!CcTest::i_isolate()->use_optimizer() || i::v8_flags.always_turbofan) + return; + i::v8_flags.allow_natives_syntax = true; v8::HandleScope scope(CcTest::isolate()); v8::Local<v8::Context> env = CcTest::NewContext({PROFILER_EXTENSION_ID}); v8::Context::Scope context_scope(env); @@ -2798,8 +2802,9 @@ TEST(DeoptAtSecondLevelInlinedSource) { // deopt in untracked function TEST(DeoptUntrackedFunction) { - if (!CcTest::i_isolate()->use_optimizer() || i::FLAG_always_turbofan) return; - i::FLAG_allow_natives_syntax = true; + if (!CcTest::i_isolate()->use_optimizer() || i::v8_flags.always_turbofan) + return; + i::v8_flags.allow_natives_syntax = true; v8::HandleScope scope(CcTest::isolate()); v8::Local<v8::Context> env = CcTest::NewContext({PROFILER_EXTENSION_ID}); v8::Context::Scope context_scope(env); @@ -3017,15 +3022,15 @@ TEST(Issue763073) { class AllowNativesSyntax { public: AllowNativesSyntax() - : allow_natives_syntax_(i::FLAG_allow_natives_syntax), - trace_deopt_(i::FLAG_trace_deopt) { - i::FLAG_allow_natives_syntax = true; - i::FLAG_trace_deopt = true; + : allow_natives_syntax_(i::v8_flags.allow_natives_syntax), + trace_deopt_(i::v8_flags.trace_deopt) { + i::v8_flags.allow_natives_syntax = true; + i::v8_flags.trace_deopt = true; } ~AllowNativesSyntax() { - i::FLAG_allow_natives_syntax = allow_natives_syntax_; - i::FLAG_trace_deopt = trace_deopt_; + i::v8_flags.allow_natives_syntax = allow_natives_syntax_; + i::v8_flags.trace_deopt = trace_deopt_; } private: @@ -3079,7 +3084,7 @@ static void CallStaticCollectSample( } TEST(StaticCollectSampleAPI) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; LocalContext env; v8::HandleScope scope(env->GetIsolate()); @@ -3431,7 +3436,7 @@ class UnlockingThread : public v8::base::Thread { // Checking for crashes with multiple thread/single Isolate profiling. TEST(MultipleThreadsSingleIsolate) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; v8::Isolate* isolate = CcTest::isolate(); v8::Locker locker(isolate); v8::HandleScope scope(isolate); @@ -3891,7 +3896,7 @@ TEST(Bug9151StaleCodeEntries) { // Tests that functions from other contexts aren't recorded when filtering for // another context. TEST(ContextIsolation) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; LocalContext execution_env; i::HandleScope scope(CcTest::i_isolate()); @@ -3984,7 +3989,7 @@ void ValidateEmbedderState(v8::CpuProfile* profile, // Tests that embedder states from other contexts aren't recorded TEST(EmbedderContextIsolation) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; LocalContext execution_env; i::HandleScope scope(CcTest::i_isolate()); @@ -4047,7 +4052,7 @@ TEST(EmbedderContextIsolation) { // Tests that embedder states from same context are recorded TEST(EmbedderStatePropagate) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; LocalContext execution_env; i::HandleScope scope(CcTest::i_isolate()); @@ -4110,12 +4115,13 @@ TEST(EmbedderStatePropagate) { // even after native context move TEST(EmbedderStatePropagateNativeContextMove) { // Reusing context addresses will cause this test to fail. - if (i::FLAG_gc_global || i::FLAG_stress_compaction || - i::FLAG_stress_incremental_marking || i::FLAG_enable_third_party_heap) { + if (i::v8_flags.gc_global || i::v8_flags.stress_compaction || + i::v8_flags.stress_incremental_marking || + i::v8_flags.enable_third_party_heap) { return; } - i::FLAG_allow_natives_syntax = true; - i::FLAG_manual_evacuation_candidates_selection = true; + i::v8_flags.allow_natives_syntax = true; + i::v8_flags.manual_evacuation_candidates_selection = true; LocalContext execution_env; i::HandleScope scope(CcTest::i_isolate()); @@ -4184,9 +4190,9 @@ TEST(EmbedderStatePropagateNativeContextMove) { // Tests that when a native context that's being filtered is moved, we continue // to track its execution. TEST(ContextFilterMovedNativeContext) { - if (i::FLAG_enable_third_party_heap) return; - i::FLAG_allow_natives_syntax = true; - i::FLAG_manual_evacuation_candidates_selection = true; + if (i::v8_flags.enable_third_party_heap) return; + i::v8_flags.allow_natives_syntax = true; + i::v8_flags.manual_evacuation_candidates_selection = true; LocalContext env; i::HandleScope scope(CcTest::i_isolate()); @@ -4267,8 +4273,8 @@ int GetSourcePositionEntryCount(i::Isolate* isolate, const char* source, } UNINITIALIZED_TEST(DetailedSourcePositionAPI) { - i::FLAG_detailed_line_info = false; - i::FLAG_allow_natives_syntax = true; + i::v8_flags.detailed_line_info = false; + i::v8_flags.allow_natives_syntax = true; v8::Isolate::CreateParams create_params; create_params.array_buffer_allocator = CcTest::array_buffer_allocator(); v8::Isolate* isolate = v8::Isolate::New(create_params); @@ -4308,11 +4314,11 @@ UNINITIALIZED_TEST(DetailedSourcePositionAPI) { } UNINITIALIZED_TEST(DetailedSourcePositionAPI_Inlining) { - i::FLAG_detailed_line_info = false; - i::FLAG_turbo_inlining = true; - i::FLAG_stress_inline = true; - i::FLAG_always_turbofan = false; - i::FLAG_allow_natives_syntax = true; + i::v8_flags.detailed_line_info = false; + i::v8_flags.turbo_inlining = true; + i::v8_flags.stress_inline = true; + i::v8_flags.always_turbofan = false; + i::v8_flags.allow_natives_syntax = true; v8::Isolate::CreateParams create_params; create_params.array_buffer_allocator = CcTest::array_buffer_allocator(); v8::Isolate* isolate = v8::Isolate::New(create_params); @@ -4457,7 +4463,7 @@ TEST(CanStartStopProfilerWithTitlesAndIds) { TEST(FastApiCPUProfiler) { #if !defined(V8_LITE_MODE) && !defined(USE_SIMULATOR) // None of the following configurations include JSCallReducer. - if (i::FLAG_jitless) return; + if (i::v8_flags.jitless) return; FLAG_SCOPE(turbofan); FLAG_SCOPE(turbo_fast_api_calls); @@ -4556,15 +4562,15 @@ TEST(FastApiCPUProfiler) { TEST(BytecodeFlushEventsEagerLogging) { #ifndef V8_LITE_MODE - FLAG_turbofan = false; - FLAG_always_turbofan = false; - i::FLAG_optimize_for_size = false; + v8_flags.turbofan = false; + v8_flags.always_turbofan = false; + v8_flags.optimize_for_size = false; #endif // V8_LITE_MODE #if ENABLE_SPARKPLUG - FLAG_always_sparkplug = false; + v8_flags.always_sparkplug = false; #endif // ENABLE_SPARKPLUG - i::FLAG_flush_bytecode = true; - i::FLAG_allow_natives_syntax = true; + v8_flags.flush_bytecode = true; + v8_flags.allow_natives_syntax = true; TestSetup test_setup; ManualGCScope manual_gc_scope; diff --git a/deps/v8/test/cctest/test-debug.cc b/deps/v8/test/cctest/test-debug.cc index 21cece6ef3b3af..c3c8cef4659c65 100644 --- a/deps/v8/test/cctest/test-debug.cc +++ b/deps/v8/test/cctest/test-debug.cc @@ -803,7 +803,7 @@ TEST(BreakPointConstructorBuiltin) { } TEST(BreakPointInlinedBuiltin) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; LocalContext env; v8::HandleScope scope(env->GetIsolate()); @@ -847,7 +847,7 @@ TEST(BreakPointInlinedBuiltin) { } TEST(BreakPointInlineBoundBuiltin) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; LocalContext env; v8::HandleScope scope(env->GetIsolate()); @@ -895,7 +895,7 @@ TEST(BreakPointInlineBoundBuiltin) { } TEST(BreakPointInlinedConstructorBuiltin) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; LocalContext env; v8::HandleScope scope(env->GetIsolate()); @@ -939,7 +939,7 @@ TEST(BreakPointInlinedConstructorBuiltin) { } TEST(BreakPointBuiltinConcurrentOpt) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; LocalContext env; v8::HandleScope scope(env->GetIsolate()); @@ -980,7 +980,7 @@ TEST(BreakPointBuiltinConcurrentOpt) { } TEST(BreakPointBuiltinTFOperator) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; LocalContext env; v8::HandleScope scope(env->GetIsolate()); @@ -1401,7 +1401,7 @@ TEST(Regress1163547) { } TEST(BreakPointInlineApiFunction) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; LocalContext env; v8::HandleScope scope(env->GetIsolate()); @@ -1447,7 +1447,7 @@ TEST(BreakPointInlineApiFunction) { // Test that a break point can be set at a return store location. TEST(BreakPointConditionBuiltin) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; LocalContext env; v8::HandleScope scope(env->GetIsolate()); @@ -1576,7 +1576,7 @@ TEST(BreakPointConditionBuiltin) { } TEST(BreakPointInlining) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; break_point_hit_count = 0; LocalContext env; v8::HandleScope scope(env->GetIsolate()); @@ -2922,9 +2922,9 @@ TEST(PauseInScript) { int message_callback_count = 0; TEST(DebugBreak) { - i::FLAG_stress_compaction = false; + i::v8_flags.stress_compaction = false; #ifdef VERIFY_HEAP - i::FLAG_verify_heap = true; + i::v8_flags.verify_heap = true; #endif LocalContext env; v8::Isolate* isolate = env->GetIsolate(); @@ -3012,9 +3012,9 @@ class DebugScopingListener : public v8::debug::DebugDelegate { }; TEST(DebugBreakInWrappedScript) { - i::FLAG_stress_compaction = false; + i::v8_flags.stress_compaction = false; #ifdef VERIFY_HEAP - i::FLAG_verify_heap = true; + i::v8_flags.verify_heap = true; #endif LocalContext env; v8::Isolate* isolate = env->GetIsolate(); @@ -3067,9 +3067,9 @@ TEST(DebugScopeIteratorWithFunctionTemplate) { } TEST(DebugBreakWithoutJS) { - i::FLAG_stress_compaction = false; + i::v8_flags.stress_compaction = false; #ifdef VERIFY_HEAP - i::FLAG_verify_heap = true; + i::v8_flags.verify_heap = true; #endif LocalContext env; v8::Isolate* isolate = env->GetIsolate(); @@ -3543,8 +3543,8 @@ class ExceptionEventCounter : public v8::debug::DebugDelegate { }; UNINITIALIZED_TEST(NoBreakOnStackOverflow) { - // We must set FLAG_stack_size before initializing the isolate. - i::FLAG_stack_size = 100; + // We must set v8_flags.stack_size before initializing the isolate. + i::v8_flags.stack_size = 100; v8::Isolate::CreateParams create_params; create_params.array_buffer_allocator = CcTest::array_buffer_allocator(); v8::Isolate* isolate = v8::Isolate::New(create_params); @@ -3840,7 +3840,7 @@ class DebugBreakInlineListener : public v8::debug::DebugDelegate { }; TEST(DebugBreakInline) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; LocalContext env; v8::HandleScope scope(env->GetIsolate()); v8::Local<v8::Context> context = env.local(); @@ -4289,7 +4289,7 @@ class DebugStepOverFunctionWithCaughtExceptionListener }; TEST(DebugStepOverFunctionWithCaughtException) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; LocalContext env; v8::Isolate* isolate = env->GetIsolate(); @@ -4317,7 +4317,7 @@ size_t NearHeapLimitCallback(void* data, size_t current_heap_limit, } UNINITIALIZED_TEST(DebugSetOutOfMemoryListener) { - i::FLAG_stress_concurrent_allocation = false; + i::v8_flags.stress_concurrent_allocation = false; v8::Isolate::CreateParams create_params; create_params.array_buffer_allocator = CcTest::array_buffer_allocator(); create_params.constraints.set_max_old_generation_size_in_bytes(10 * i::MB); @@ -4340,7 +4340,7 @@ UNINITIALIZED_TEST(DebugSetOutOfMemoryListener) { } TEST(DebugCoverage) { - i::FLAG_always_turbofan = false; + i::v8_flags.always_turbofan = false; LocalContext env; v8::Isolate* isolate = env->GetIsolate(); v8::HandleScope scope(isolate); @@ -4395,7 +4395,7 @@ v8::debug::Coverage::ScriptData GetScriptDataAndDeleteCoverage( } // namespace TEST(DebugCoverageWithCoverageOutOfScope) { - i::FLAG_always_turbofan = false; + i::v8_flags.always_turbofan = false; LocalContext env; v8::Isolate* isolate = env->GetIsolate(); v8::HandleScope scope(isolate); @@ -4466,7 +4466,7 @@ v8::debug::Coverage::FunctionData GetFunctionDataAndDeleteCoverage( } // namespace TEST(DebugCoverageWithScriptDataOutOfScope) { - i::FLAG_always_turbofan = false; + i::v8_flags.always_turbofan = false; LocalContext env; v8::Isolate* isolate = env->GetIsolate(); v8::HandleScope scope(isolate); @@ -4619,7 +4619,7 @@ i::MaybeHandle<i::Script> FindScript( } // anonymous namespace UNINITIALIZED_TEST(LoadedAtStartupScripts) { - i::FLAG_expose_gc = true; + i::v8_flags.expose_gc = true; v8::Isolate::CreateParams create_params; create_params.array_buffer_allocator = CcTest::array_buffer_allocator(); diff --git a/deps/v8/test/cctest/test-descriptor-array.cc b/deps/v8/test/cctest/test-descriptor-array.cc index 7bd2640cf626f6..8642174d182d96 100644 --- a/deps/v8/test/cctest/test-descriptor-array.cc +++ b/deps/v8/test/cctest/test-descriptor-array.cc @@ -64,7 +64,7 @@ void CheckDescriptorArrayLookups(Isolate* isolate, Handle<Map> map, } // Test CSA implementation. - if (!FLAG_jitless) { + if (!v8_flags.jitless) { for (size_t i = 0; i < names.size(); ++i) { Handle<Object> name_index = Call(isolate, csa_lookup, map, names[i]).ToHandleChecked(); @@ -97,7 +97,7 @@ void CheckTransitionArrayLookups(Isolate* isolate, } // Test CSA implementation. - if (!FLAG_jitless) { + if (!v8_flags.jitless) { for (size_t i = 0; i < maps.size(); ++i) { Handle<Map> expected_map = maps[i]; Handle<Name> name(expected_map->instance_descriptors(isolate).GetKey( @@ -117,7 +117,7 @@ void CheckTransitionArrayLookups(Isolate* isolate, // or null otherwise. Handle<JSFunction> CreateCsaDescriptorArrayLookup(Isolate* isolate) { // We are not allowed to generate code in jitless mode. - if (FLAG_jitless) return Handle<JSFunction>(); + if (v8_flags.jitless) return Handle<JSFunction>(); // Preallocate handle for the result in the current handle scope. Handle<JSFunction> result_function(JSFunction{}, isolate); @@ -163,7 +163,7 @@ Handle<JSFunction> CreateCsaDescriptorArrayLookup(Isolate* isolate) { // map if transition is found or null otherwise. Handle<JSFunction> CreateCsaTransitionArrayLookup(Isolate* isolate) { // We are not allowed to generate code in jitless mode. - if (FLAG_jitless) return Handle<JSFunction>(); + if (v8_flags.jitless) return Handle<JSFunction>(); // Preallocate handle for the result in the current handle scope. Handle<JSFunction> result_function(JSFunction{}, isolate); diff --git a/deps/v8/test/cctest/test-field-type-tracking.cc b/deps/v8/test/cctest/test-field-type-tracking.cc index e5c8102c448449..b87e67d884e2b9 100644 --- a/deps/v8/test/cctest/test-field-type-tracking.cc +++ b/deps/v8/test/cctest/test-field-type-tracking.cc @@ -2276,14 +2276,15 @@ TEST(ElementsKindTransitionFromMapOwningDescriptor) { Factory* factory = isolate->factory(); TestConfig configs[] = { {FROZEN, factory->frozen_symbol(), - FLAG_enable_sealed_frozen_elements_kind ? HOLEY_FROZEN_ELEMENTS - : DICTIONARY_ELEMENTS}, + v8_flags.enable_sealed_frozen_elements_kind ? HOLEY_FROZEN_ELEMENTS + : DICTIONARY_ELEMENTS}, {SEALED, factory->sealed_symbol(), - FLAG_enable_sealed_frozen_elements_kind ? HOLEY_SEALED_ELEMENTS - : DICTIONARY_ELEMENTS}, + v8_flags.enable_sealed_frozen_elements_kind ? HOLEY_SEALED_ELEMENTS + : DICTIONARY_ELEMENTS}, {NONE, factory->nonextensible_symbol(), - FLAG_enable_sealed_frozen_elements_kind ? HOLEY_NONEXTENSIBLE_ELEMENTS - : DICTIONARY_ELEMENTS}}; + v8_flags.enable_sealed_frozen_elements_kind + ? HOLEY_NONEXTENSIBLE_ELEMENTS + : DICTIONARY_ELEMENTS}}; for (size_t i = 0; i < arraysize(configs); i++) { TestGeneralizeFieldWithSpecialTransition( &configs[i], @@ -2344,14 +2345,15 @@ TEST(ElementsKindTransitionFromMapNotOwningDescriptor) { Factory* factory = isolate->factory(); TestConfig configs[] = { {FROZEN, factory->frozen_symbol(), - FLAG_enable_sealed_frozen_elements_kind ? HOLEY_FROZEN_ELEMENTS - : DICTIONARY_ELEMENTS}, + v8_flags.enable_sealed_frozen_elements_kind ? HOLEY_FROZEN_ELEMENTS + : DICTIONARY_ELEMENTS}, {SEALED, factory->sealed_symbol(), - FLAG_enable_sealed_frozen_elements_kind ? HOLEY_SEALED_ELEMENTS - : DICTIONARY_ELEMENTS}, + v8_flags.enable_sealed_frozen_elements_kind ? HOLEY_SEALED_ELEMENTS + : DICTIONARY_ELEMENTS}, {NONE, factory->nonextensible_symbol(), - FLAG_enable_sealed_frozen_elements_kind ? HOLEY_NONEXTENSIBLE_ELEMENTS - : DICTIONARY_ELEMENTS}}; + v8_flags.enable_sealed_frozen_elements_kind + ? HOLEY_NONEXTENSIBLE_ELEMENTS + : DICTIONARY_ELEMENTS}}; for (size_t i = 0; i < arraysize(configs); i++) { TestGeneralizeFieldWithSpecialTransition( &configs[i], @@ -2914,7 +2916,7 @@ void TestStoreToConstantField_NaN(const char* store_func_source, } // namespace TEST(StoreToConstantField_PlusMinusZero) { - FLAG_allow_natives_syntax = true; + v8_flags.allow_natives_syntax = true; CcTest::InitializeVM(); v8::HandleScope scope(CcTest::isolate()); diff --git a/deps/v8/test/cctest/test-heap-profiler.cc b/deps/v8/test/cctest/test-heap-profiler.cc index 5fbea256c320f3..188c9ef867dd10 100644 --- a/deps/v8/test/cctest/test-heap-profiler.cc +++ b/deps/v8/test/cctest/test-heap-profiler.cc @@ -53,10 +53,10 @@ using i::AllocationTraceNode; using i::AllocationTraceTree; using i::AllocationTracker; using i::SourceLocation; +using i::heap::GrowNewSpaceToMaximumCapacity; using v8::base::ArrayVector; using v8::base::Optional; using v8::base::Vector; -using v8::internal::heap::GrowNewSpaceToMaximumCapacity; namespace { @@ -1297,7 +1297,7 @@ static TestStatsStream GetHeapStatsUpdate( TEST(HeapSnapshotObjectsStats) { // Concurrent allocation might break results - v8::internal::v8_flags.stress_concurrent_allocation = false; + i::v8_flags.stress_concurrent_allocation = false; LocalContext env; v8::HandleScope scope(env->GetIsolate()); @@ -2639,7 +2639,7 @@ TEST(ManyLocalsInSharedContext) { env->GetIsolate(), ok_object, v8::HeapGraphEdge::kInternal, "context"); CHECK(context_object); // Check the objects are not duplicated in the context. - CHECK_EQ(v8::internal::Context::MIN_CONTEXT_EXTENDED_SLOTS + num_objects - 1, + CHECK_EQ(i::Context::MIN_CONTEXT_EXTENDED_SLOTS + num_objects - 1, context_object->GetChildrenCount()); // Check all the objects have got their names. // ... well check just every 15th because otherwise it's too slow in debug. @@ -2695,7 +2695,7 @@ TEST(AllocationSitesAreVisible) { v8::HeapGraphEdge::kInternal, "elements"); CHECK(elements); CHECK_EQ(v8::HeapGraphNode::kCode, elements->GetType()); - CHECK_EQ(v8::internal::FixedArray::SizeFor(3), + CHECK_EQ(i::FixedArray::SizeFor(3), static_cast<int>(elements->GetShallowSize())); v8::Local<v8::Value> array_val = @@ -3704,10 +3704,10 @@ TEST(SamplingHeapProfiler) { // Turn off always_turbofan. Inlining can cause stack traces to be shorter // than what we expect in this test. - v8::internal::v8_flags.always_turbofan = false; + i::v8_flags.always_turbofan = false; // Suppress randomness to avoid flakiness in tests. - v8::internal::v8_flags.sampling_heap_profiler_suppress_randomness = true; + i::v8_flags.sampling_heap_profiler_suppress_randomness = true; // Sample should be empty if requested before sampling has started. { @@ -3788,16 +3788,16 @@ TEST(SamplingHeapProfilerRateAgnosticEstimates) { // Turn off always_turbofan. Inlining can cause stack traces to be shorter // than what we expect in this test. - v8::internal::v8_flags.always_turbofan = false; + i::v8_flags.always_turbofan = false; // Disable compilation cache to force compilation in both cases - v8::internal::v8_flags.compilation_cache = false; + i::v8_flags.compilation_cache = false; // Suppress randomness to avoid flakiness in tests. - v8::internal::v8_flags.sampling_heap_profiler_suppress_randomness = true; + i::v8_flags.sampling_heap_profiler_suppress_randomness = true; // stress_incremental_marking adds randomness to the test. - v8::internal::v8_flags.stress_incremental_marking = false; + i::v8_flags.stress_incremental_marking = false; // warmup compilation CompileRun(simple_sampling_heap_profiler_script); @@ -3869,7 +3869,7 @@ TEST(SamplingHeapProfilerApiAllocation) { v8::HeapProfiler* heap_profiler = env->GetIsolate()->GetHeapProfiler(); // Suppress randomness to avoid flakiness in tests. - v8::internal::v8_flags.sampling_heap_profiler_suppress_randomness = true; + i::v8_flags.sampling_heap_profiler_suppress_randomness = true; heap_profiler->StartSamplingHeapProfiler(256); @@ -3892,7 +3892,7 @@ TEST(SamplingHeapProfilerApiSamples) { v8::HeapProfiler* heap_profiler = env->GetIsolate()->GetHeapProfiler(); // Suppress randomness to avoid flakiness in tests. - v8::internal::v8_flags.sampling_heap_profiler_suppress_randomness = true; + i::v8_flags.sampling_heap_profiler_suppress_randomness = true; heap_profiler->StartSamplingHeapProfiler(1024); @@ -3937,7 +3937,7 @@ TEST(SamplingHeapProfilerLeftTrimming) { v8::HeapProfiler* heap_profiler = env->GetIsolate()->GetHeapProfiler(); // Suppress randomness to avoid flakiness in tests. - v8::internal::v8_flags.sampling_heap_profiler_suppress_randomness = true; + i::v8_flags.sampling_heap_profiler_suppress_randomness = true; heap_profiler->StartSamplingHeapProfiler(64); @@ -3950,7 +3950,7 @@ TEST(SamplingHeapProfilerLeftTrimming) { " a.shift();\n" "}\n"); - CcTest::CollectGarbage(v8::internal::NEW_SPACE); + CcTest::CollectGarbage(i::NEW_SPACE); // Should not crash. heap_profiler->StopSamplingHeapProfiler(); @@ -3975,7 +3975,7 @@ TEST(SamplingHeapProfilerPretenuredInlineAllocations) { v8::HeapProfiler* heap_profiler = env->GetIsolate()->GetHeapProfiler(); // Suppress randomness to avoid flakiness in tests. - v8::internal::v8_flags.sampling_heap_profiler_suppress_randomness = true; + i::v8_flags.sampling_heap_profiler_suppress_randomness = true; GrowNewSpaceToMaximumCapacity(CcTest::heap()); @@ -4037,7 +4037,7 @@ TEST(SamplingHeapProfilerLargeInterval) { v8::HeapProfiler* heap_profiler = env->GetIsolate()->GetHeapProfiler(); // Suppress randomness to avoid flakiness in tests. - v8::internal::v8_flags.sampling_heap_profiler_suppress_randomness = true; + i::v8_flags.sampling_heap_profiler_suppress_randomness = true; heap_profiler->StartSamplingHeapProfiler(512 * 1024); @@ -4075,7 +4075,7 @@ TEST(SamplingHeapProfilerSampleDuringDeopt) { v8::HeapProfiler* heap_profiler = env->GetIsolate()->GetHeapProfiler(); // Suppress randomness to avoid flakiness in tests. - v8::internal::v8_flags.sampling_heap_profiler_suppress_randomness = true; + i::v8_flags.sampling_heap_profiler_suppress_randomness = true; // Small sample interval to force each object to be sampled. heap_profiler->StartSamplingHeapProfiler(i::kTaggedSize); diff --git a/deps/v8/test/cctest/test-helper-riscv32.cc b/deps/v8/test/cctest/test-helper-riscv32.cc index b46aebceaa51e9..0e9738a7b78a57 100644 --- a/deps/v8/test/cctest/test-helper-riscv32.cc +++ b/deps/v8/test/cctest/test-helper-riscv32.cc @@ -39,7 +39,7 @@ Handle<Code> AssembleCodeImpl(Func assemble) { assm.GetCode(isolate, &desc); Handle<Code> code = Factory::CodeBuilder(isolate, desc, CodeKind::FOR_TESTING).Build(); - if (FLAG_print_code) { + if (v8_flags.print_code) { code->Print(); } return code; diff --git a/deps/v8/test/cctest/test-helper-riscv64.cc b/deps/v8/test/cctest/test-helper-riscv64.cc index 388e0ca0194acc..75263d35f5f9e1 100644 --- a/deps/v8/test/cctest/test-helper-riscv64.cc +++ b/deps/v8/test/cctest/test-helper-riscv64.cc @@ -38,7 +38,7 @@ Handle<Code> AssembleCodeImpl(Func assemble) { assm.GetCode(isolate, &desc); Handle<Code> code = Factory::CodeBuilder(isolate, desc, CodeKind::FOR_TESTING).Build(); - if (FLAG_print_code) { + if (v8_flags.print_code) { code->Print(); } return code; diff --git a/deps/v8/test/cctest/test-ignition-statistics-extension.cc b/deps/v8/test/cctest/test-ignition-statistics-extension.cc index 9a0b1336456ceb..f79ae21f8fbf0c 100644 --- a/deps/v8/test/cctest/test-ignition-statistics-extension.cc +++ b/deps/v8/test/cctest/test-ignition-statistics-extension.cc @@ -43,7 +43,7 @@ class IgnitionStatisticsTester { }; TEST(IgnitionStatisticsExtension) { - FLAG_expose_ignition_statistics = true; + v8_flags.expose_ignition_statistics = true; CcTest::InitializeVM(); v8::Isolate* isolate = CcTest::isolate(); v8::HandleScope scope(isolate); diff --git a/deps/v8/test/cctest/test-inobject-slack-tracking.cc b/deps/v8/test/cctest/test-inobject-slack-tracking.cc index 1ecec21ac34cdc..f7a3a9115f9c05 100644 --- a/deps/v8/test/cctest/test-inobject-slack-tracking.cc +++ b/deps/v8/test/cctest/test-inobject-slack-tracking.cc @@ -110,7 +110,7 @@ bool IsObjectShrinkable(JSObject obj) { TEST(JSObjectBasic) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; CcTest::InitializeVM(); v8::HandleScope scope(CcTest::isolate()); const char* source = @@ -161,14 +161,14 @@ TEST(JSObjectBasic) { TEST(JSObjectBasicNoInlineNew) { - FLAG_inline_new = false; + v8_flags.inline_new = false; TestJSObjectBasic(); } TEST(JSObjectComplex) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; CcTest::InitializeVM(); v8::HandleScope scope(CcTest::isolate()); const char* source = @@ -241,14 +241,14 @@ TEST(JSObjectComplex) { TEST(JSObjectComplexNoInlineNew) { - FLAG_inline_new = false; + v8_flags.inline_new = false; TestJSObjectComplex(); } TEST(JSGeneratorObjectBasic) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; CcTest::InitializeVM(); v8::HandleScope scope(CcTest::isolate()); const char* source = @@ -307,14 +307,14 @@ TEST(JSGeneratorObjectBasic) { TEST(JSGeneratorObjectBasicNoInlineNew) { - FLAG_inline_new = false; + v8_flags.inline_new = false; TestJSGeneratorObjectBasic(); } TEST(SubclassBasicNoBaseClassInstances) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; CcTest::InitializeVM(); v8::HandleScope scope(CcTest::isolate()); @@ -399,14 +399,14 @@ TEST(SubclassBasicNoBaseClassInstances) { TEST(SubclassBasicNoBaseClassInstancesNoInlineNew) { - FLAG_inline_new = false; + v8_flags.inline_new = false; TestSubclassBasicNoBaseClassInstances(); } TEST(SubclassBasic) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; CcTest::InitializeVM(); v8::HandleScope scope(CcTest::isolate()); @@ -502,7 +502,7 @@ TEST(SubclassBasic) { TEST(SubclassBasicNoInlineNew) { - FLAG_inline_new = false; + v8_flags.inline_new = false; TestSubclassBasic(); } @@ -608,7 +608,7 @@ static void TestClassHierarchy(const std::vector<int>& hierarchy_desc, int n) { static void TestSubclassChain(const std::vector<int>& hierarchy_desc) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; CcTest::InitializeVM(); v8::HandleScope scope(CcTest::isolate()); @@ -654,7 +654,7 @@ TEST(LongSubclassChain3) { TEST(InobjectPropetiesCountOverflowInSubclass) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; CcTest::InitializeVM(); v8::HandleScope scope(CcTest::isolate()); @@ -837,10 +837,10 @@ TEST(ObjectLiteralPropertyBackingStoreSize) { } TEST(SlowModeSubclass) { - if (FLAG_stress_concurrent_allocation) return; + if (v8_flags.stress_concurrent_allocation) return; // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; CcTest::InitializeVM(); v8::HandleScope scope(CcTest::isolate()); @@ -977,7 +977,7 @@ static void TestSubclassBuiltin(const char* subclass_name, TEST(SubclassObjectBuiltin) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; CcTest::InitializeVM(); v8::HandleScope scope(CcTest::isolate()); @@ -988,14 +988,14 @@ TEST(SubclassObjectBuiltin) { TEST(SubclassObjectBuiltinNoInlineNew) { - FLAG_inline_new = false; + v8_flags.inline_new = false; TestSubclassObjectBuiltin(); } TEST(SubclassFunctionBuiltin) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; CcTest::InitializeVM(); v8::HandleScope scope(CcTest::isolate()); @@ -1005,14 +1005,14 @@ TEST(SubclassFunctionBuiltin) { TEST(SubclassFunctionBuiltinNoInlineNew) { - FLAG_inline_new = false; + v8_flags.inline_new = false; TestSubclassFunctionBuiltin(); } TEST(SubclassBooleanBuiltin) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; CcTest::InitializeVM(); v8::HandleScope scope(CcTest::isolate()); @@ -1022,14 +1022,14 @@ TEST(SubclassBooleanBuiltin) { TEST(SubclassBooleanBuiltinNoInlineNew) { - FLAG_inline_new = false; + v8_flags.inline_new = false; TestSubclassBooleanBuiltin(); } TEST(SubclassErrorBuiltin) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; CcTest::InitializeVM(); v8::HandleScope scope(CcTest::isolate()); @@ -1046,14 +1046,14 @@ TEST(SubclassErrorBuiltin) { TEST(SubclassErrorBuiltinNoInlineNew) { - FLAG_inline_new = false; + v8_flags.inline_new = false; TestSubclassErrorBuiltin(); } TEST(SubclassNumberBuiltin) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; CcTest::InitializeVM(); v8::HandleScope scope(CcTest::isolate()); @@ -1063,14 +1063,14 @@ TEST(SubclassNumberBuiltin) { TEST(SubclassNumberBuiltinNoInlineNew) { - FLAG_inline_new = false; + v8_flags.inline_new = false; TestSubclassNumberBuiltin(); } TEST(SubclassDateBuiltin) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; CcTest::InitializeVM(); v8::HandleScope scope(CcTest::isolate()); @@ -1079,14 +1079,14 @@ TEST(SubclassDateBuiltin) { TEST(SubclassDateBuiltinNoInlineNew) { - FLAG_inline_new = false; + v8_flags.inline_new = false; TestSubclassDateBuiltin(); } TEST(SubclassStringBuiltin) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; CcTest::InitializeVM(); v8::HandleScope scope(CcTest::isolate()); @@ -1097,14 +1097,14 @@ TEST(SubclassStringBuiltin) { TEST(SubclassStringBuiltinNoInlineNew) { - FLAG_inline_new = false; + v8_flags.inline_new = false; TestSubclassStringBuiltin(); } TEST(SubclassRegExpBuiltin) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; CcTest::InitializeVM(); v8::HandleScope scope(CcTest::isolate()); @@ -1115,14 +1115,14 @@ TEST(SubclassRegExpBuiltin) { TEST(SubclassRegExpBuiltinNoInlineNew) { - FLAG_inline_new = false; + v8_flags.inline_new = false; TestSubclassRegExpBuiltin(); } TEST(SubclassArrayBuiltin) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; CcTest::InitializeVM(); v8::HandleScope scope(CcTest::isolate()); @@ -1131,14 +1131,14 @@ TEST(SubclassArrayBuiltin) { TEST(SubclassArrayBuiltinNoInlineNew) { - FLAG_inline_new = false; + v8_flags.inline_new = false; TestSubclassArrayBuiltin(); } TEST(SubclassTypedArrayBuiltin) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; CcTest::InitializeVM(); v8::HandleScope scope(CcTest::isolate()); @@ -1152,14 +1152,14 @@ TEST(SubclassTypedArrayBuiltin) { TEST(SubclassTypedArrayBuiltinNoInlineNew) { - FLAG_inline_new = false; + v8_flags.inline_new = false; TestSubclassTypedArrayBuiltin(); } TEST(SubclassCollectionBuiltin) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; CcTest::InitializeVM(); v8::HandleScope scope(CcTest::isolate()); @@ -1171,14 +1171,14 @@ TEST(SubclassCollectionBuiltin) { TEST(SubclassCollectionBuiltinNoInlineNew) { - FLAG_inline_new = false; + v8_flags.inline_new = false; TestSubclassCollectionBuiltin(); } TEST(SubclassArrayBufferBuiltin) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; CcTest::InitializeVM(); v8::HandleScope scope(CcTest::isolate()); @@ -1189,14 +1189,14 @@ TEST(SubclassArrayBufferBuiltin) { TEST(SubclassArrayBufferBuiltinNoInlineNew) { - FLAG_inline_new = false; + v8_flags.inline_new = false; TestSubclassArrayBufferBuiltin(); } TEST(SubclassPromiseBuiltin) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; CcTest::InitializeVM(); v8::HandleScope scope(CcTest::isolate()); @@ -1206,7 +1206,7 @@ TEST(SubclassPromiseBuiltin) { TEST(SubclassPromiseBuiltinNoInlineNew) { - FLAG_inline_new = false; + v8_flags.inline_new = false; TestSubclassPromiseBuiltin(); } @@ -1413,7 +1413,7 @@ TEST(InstanceFieldsArePropertiesFieldsAndConstructorLazy) { } TEST(InstanceFieldsArePropertiesDefaultConstructorEager) { - i::FLAG_lazy = false; + i::v8_flags.lazy = false; CcTest::InitializeVM(); v8::HandleScope scope(CcTest::isolate()); @@ -1435,7 +1435,7 @@ TEST(InstanceFieldsArePropertiesDefaultConstructorEager) { } TEST(InstanceFieldsArePropertiesFieldsAndConstructorEager) { - i::FLAG_lazy = false; + i::v8_flags.lazy = false; CcTest::InitializeVM(); v8::HandleScope scope(CcTest::isolate()); diff --git a/deps/v8/test/cctest/test-js-weak-refs.cc b/deps/v8/test/cctest/test-js-weak-refs.cc index 1632d4e48ee7e5..2ac7a1dcf9392d 100644 --- a/deps/v8/test/cctest/test-js-weak-refs.cc +++ b/deps/v8/test/cctest/test-js-weak-refs.cc @@ -711,7 +711,7 @@ TEST(TestJSWeakRef) { } TEST(TestJSWeakRefIncrementalMarking) { - if (!FLAG_incremental_marking) { + if (!v8_flags.incremental_marking) { return; } ManualGCScope manual_gc_scope; @@ -783,7 +783,7 @@ TEST(TestJSWeakRefKeepDuringJob) { } TEST(TestJSWeakRefKeepDuringJobIncrementalMarking) { - if (!FLAG_incremental_marking) { + if (!v8_flags.incremental_marking) { return; } ManualGCScope manual_gc_scope; @@ -872,7 +872,7 @@ TEST(TestRemoveUnregisterToken) { } TEST(JSWeakRefScavengedInWorklist) { - if (!FLAG_incremental_marking || FLAG_single_generation) { + if (!v8_flags.incremental_marking || v8_flags.single_generation) { return; } @@ -922,8 +922,8 @@ TEST(JSWeakRefScavengedInWorklist) { } TEST(JSWeakRefTenuredInWorklist) { - if (!FLAG_incremental_marking || FLAG_single_generation || - FLAG_separate_gc_phases) { + if (!v8_flags.incremental_marking || v8_flags.single_generation || + v8_flags.separate_gc_phases) { return; } @@ -975,10 +975,10 @@ TEST(JSWeakRefTenuredInWorklist) { } TEST(UnregisterTokenHeapVerifier) { - if (!FLAG_incremental_marking) return; + if (!v8_flags.incremental_marking) return; ManualGCScope manual_gc_scope; #ifdef VERIFY_HEAP - FLAG_verify_heap = true; + v8_flags.verify_heap = true; #endif CcTest::InitializeVM(); @@ -1023,10 +1023,10 @@ TEST(UnregisterTokenHeapVerifier) { } TEST(UnregisteredAndUnclearedCellHeapVerifier) { - if (!FLAG_incremental_marking) return; + if (!v8_flags.incremental_marking) return; ManualGCScope manual_gc_scope; #ifdef VERIFY_HEAP - FLAG_verify_heap = true; + v8_flags.verify_heap = true; #endif CcTest::InitializeVM(); diff --git a/deps/v8/test/cctest/test-liveedit.cc b/deps/v8/test/cctest/test-liveedit.cc index ec8a874a30e4f8..67775c74aaffc6 100644 --- a/deps/v8/test/cctest/test-liveedit.cc +++ b/deps/v8/test/cctest/test-liveedit.cc @@ -234,10 +234,10 @@ TEST(LiveEditPatchFunctions) { v8::HandleScope scope(env->GetIsolate()); v8::Local<v8::Context> context = env.local(); // Check that function is removed from compilation cache. - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; PatchFunctions(context, "42;", "%AbortJS('')"); PatchFunctions(context, "42;", "239;"); - i::FLAG_allow_natives_syntax = false; + i::v8_flags.allow_natives_syntax = false; // Basic test cases. PatchFunctions(context, "42;", "2;"); @@ -349,7 +349,7 @@ TEST(LiveEditPatchFunctions) { ->Value(), 6); - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; PatchFunctions(context, "function foo(a, b) { return a + b; }; " "%PrepareFunctionForOptimization(foo);" @@ -360,7 +360,7 @@ TEST(LiveEditPatchFunctions) { .ToLocalChecked() ->Value(), 35); - i::FLAG_allow_natives_syntax = false; + i::v8_flags.allow_natives_syntax = false; // Check inner function. PatchFunctions( diff --git a/deps/v8/test/cctest/test-lockers.cc b/deps/v8/test/cctest/test-lockers.cc index 3bcfcd3d83de51..c33c94a8180b66 100644 --- a/deps/v8/test/cctest/test-lockers.cc +++ b/deps/v8/test/cctest/test-lockers.cc @@ -122,7 +122,7 @@ namespace internal { namespace test_lockers { TEST(LazyDeoptimizationMultithread) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; v8::Isolate::CreateParams create_params; create_params.array_buffer_allocator = CcTest::array_buffer_allocator(); v8::Isolate* isolate = v8::Isolate::New(create_params); @@ -175,7 +175,7 @@ TEST(LazyDeoptimizationMultithread) { } TEST(LazyDeoptimizationMultithreadWithNatives) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; v8::Isolate::CreateParams create_params; create_params.array_buffer_allocator = CcTest::array_buffer_allocator(); v8::Isolate* isolate = v8::Isolate::New(create_params); @@ -231,7 +231,7 @@ TEST(LazyDeoptimizationMultithreadWithNatives) { } TEST(EagerDeoptimizationMultithread) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; v8::Isolate::CreateParams create_params; create_params.array_buffer_allocator = CcTest::array_buffer_allocator(); v8::Isolate* isolate = v8::Isolate::New(create_params); @@ -432,7 +432,7 @@ static void StartJoinAndDeleteThreads( // Run many threads all locking on the same isolate TEST(IsolateLockingStress) { - i::FLAG_always_turbofan = false; + i::v8_flags.always_turbofan = false; #if V8_TARGET_ARCH_MIPS const int kNThreads = 50; #else @@ -477,7 +477,7 @@ class IsolateNestedLockingThread : public JoinableThread { // Run many threads with nested locks TEST(IsolateNestedLocking) { - i::FLAG_always_turbofan = false; + i::v8_flags.always_turbofan = false; #if V8_TARGET_ARCH_MIPS const int kNThreads = 50; #else @@ -523,7 +523,7 @@ class SeparateIsolatesLocksNonexclusiveThread : public JoinableThread { // Run parallel threads that lock and access different isolates in parallel TEST(SeparateIsolatesLocksNonexclusive) { - i::FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; #if V8_TARGET_ARCH_ARM || V8_TARGET_ARCH_S390 const int kNThreads = 50; #else @@ -608,7 +608,7 @@ class LockerUnlockerThread : public JoinableThread { // Use unlocker inside of a Locker, multiple threads. TEST(LockerUnlocker) { - i::FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; #if V8_TARGET_ARCH_ARM || V8_TARGET_ARCH_S390 const int kNThreads = 50; #else @@ -666,7 +666,7 @@ class LockTwiceAndUnlockThread : public JoinableThread { // Use Unlocker inside two Lockers. TEST(LockTwiceAndUnlock) { - i::FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; #if V8_TARGET_ARCH_ARM || V8_TARGET_ARCH_S390 const int kNThreads = 50; #else diff --git a/deps/v8/test/cctest/test-log-stack-tracer.cc b/deps/v8/test/cctest/test-log-stack-tracer.cc index 445df286f5dd47..b04f57f7c1066d 100644 --- a/deps/v8/test/cctest/test-log-stack-tracer.cc +++ b/deps/v8/test/cctest/test-log-stack-tracer.cc @@ -142,7 +142,7 @@ static void CreateTraceCallerFunction(v8::Local<v8::Context> context, // walking. TEST(CFromJSStackTrace) { // BUG(1303) Inlining of JSFuncDoTrace() in JSTrace below breaks this test. - i::FLAG_turbo_inlining = false; + i::v8_flags.turbo_inlining = false; TickSample sample; i::TraceExtension::InitTraceEnv(&sample); @@ -192,7 +192,7 @@ TEST(CFromJSStackTrace) { TEST(PureJSStackTrace) { // This test does not pass with inlining enabled since inlined functions // don't appear in the stack trace. - i::FLAG_turbo_inlining = false; + i::v8_flags.turbo_inlining = false; TickSample sample; i::TraceExtension::InitTraceEnv(&sample); diff --git a/deps/v8/test/cctest/test-mementos.cc b/deps/v8/test/cctest/test-mementos.cc index 7bb19545626317..ccaa1c733f682a 100644 --- a/deps/v8/test/cctest/test-mementos.cc +++ b/deps/v8/test/cctest/test-mementos.cc @@ -61,7 +61,8 @@ static void SetUpNewSpaceWithPoisonedMementoAtTop() { TEST(Regress340063) { CcTest::InitializeVM(); - if (!i::FLAG_allocation_site_pretenuring || FLAG_single_generation) return; + if (!i::v8_flags.allocation_site_pretenuring || v8_flags.single_generation) + return; v8::HandleScope scope(CcTest::isolate()); SetUpNewSpaceWithPoisonedMementoAtTop(); @@ -77,11 +78,12 @@ TEST(Regress470390) { // With MinorMC, we may have object allocated after `new_space->top()`. If the // next object after `new_space->top()` is an invalid memento, heap // verification should fail. - if (FLAG_minor_mc) return; + if (v8_flags.minor_mc) return; #endif // VERIFY_HEAP CcTest::InitializeVM(); - if (!i::FLAG_allocation_site_pretenuring || FLAG_single_generation) return; + if (!i::v8_flags.allocation_site_pretenuring || v8_flags.single_generation) + return; v8::HandleScope scope(CcTest::isolate()); SetUpNewSpaceWithPoisonedMementoAtTop(); @@ -98,7 +100,8 @@ TEST(Regress470390) { TEST(BadMementoAfterTopForceScavenge) { CcTest::InitializeVM(); - if (!i::FLAG_allocation_site_pretenuring || FLAG_single_generation) return; + if (!i::v8_flags.allocation_site_pretenuring || v8_flags.single_generation) + return; v8::HandleScope scope(CcTest::isolate()); SetUpNewSpaceWithPoisonedMementoAtTop(); diff --git a/deps/v8/test/cctest/test-profile-generator.cc b/deps/v8/test/cctest/test-profile-generator.cc index 6cfb0cd3bb9810..e0164a7d8e005f 100644 --- a/deps/v8/test/cctest/test-profile-generator.cc +++ b/deps/v8/test/cctest/test-profile-generator.cc @@ -383,14 +383,11 @@ namespace { class TestSetup { public: - TestSetup() - : old_flag_prof_browser_mode_(i::FLAG_prof_browser_mode) { - i::FLAG_prof_browser_mode = false; + TestSetup() : old_flag_prof_browser_mode_(i::v8_flags.prof_browser_mode) { + i::v8_flags.prof_browser_mode = false; } - ~TestSetup() { - i::FLAG_prof_browser_mode = old_flag_prof_browser_mode_; - } + ~TestSetup() { i::v8_flags.prof_browser_mode = old_flag_prof_browser_mode_; } private: bool old_flag_prof_browser_mode_; @@ -725,7 +722,7 @@ static const ProfileNode* PickChild(const ProfileNode* parent, TEST(RecordStackTraceAtStartProfiling) { // This test does not pass with inlining enabled since inlined functions // don't appear in the stack trace. - i::FLAG_turbo_inlining = false; + i::v8_flags.turbo_inlining = false; v8::HandleScope scope(CcTest::isolate()); v8::Local<v8::Context> env = CcTest::NewContext({PROFILER_EXTENSION_ID}); @@ -804,7 +801,7 @@ static const v8::CpuProfileNode* PickChild(const v8::CpuProfileNode* parent, TEST(ProfileNodeScriptId) { // This test does not pass with inlining enabled since inlined functions // don't appear in the stack trace. - i::FLAG_turbo_inlining = false; + i::v8_flags.turbo_inlining = false; v8::HandleScope scope(CcTest::isolate()); v8::Local<v8::Context> env = CcTest::NewContext({PROFILER_EXTENSION_ID}); @@ -889,7 +886,7 @@ TEST(LineNumber) { profiler.processor()->StopSynchronously(); - bool is_lazy = i::FLAG_lazy; + bool is_lazy = i::v8_flags.lazy; CHECK_EQ(1, GetFunctionLineNumber(&profiler, &env, isolate, "foo_at_the_first_line")); CHECK_EQ(is_lazy ? 0 : 4, GetFunctionLineNumber(&profiler, &env, isolate, @@ -904,9 +901,9 @@ TEST(LineNumber) { TEST(BailoutReason) { #ifndef V8_LITE_MODE - i::FLAG_allow_natives_syntax = true; - i::FLAG_always_turbofan = false; - i::FLAG_turbofan = true; + i::v8_flags.allow_natives_syntax = true; + i::v8_flags.always_turbofan = false; + i::v8_flags.turbofan = true; v8::HandleScope scope(CcTest::isolate()); v8::Local<v8::Context> env = CcTest::NewContext({PROFILER_EXTENSION_ID}); v8::Context::Scope context_scope(env); diff --git a/deps/v8/test/cctest/test-random-number-generator.cc b/deps/v8/test/cctest/test-random-number-generator.cc index c05c83db1fad11..4217a5f5936a80 100644 --- a/deps/v8/test/cctest/test-random-number-generator.cc +++ b/deps/v8/test/cctest/test-random-number-generator.cc @@ -38,7 +38,7 @@ static const int64_t kRandomSeeds[] = {-1, 1, 42, 100, 1234567890, 987654321}; TEST(RandomSeedFlagIsUsed) { for (unsigned n = 0; n < arraysize(kRandomSeeds); ++n) { - FLAG_random_seed = static_cast<int>(kRandomSeeds[n]); + v8_flags.random_seed = static_cast<int>(kRandomSeeds[n]); v8::Isolate::CreateParams create_params; create_params.array_buffer_allocator = CcTest::array_buffer_allocator(); v8::Isolate* i = v8::Isolate::New(create_params); @@ -63,7 +63,7 @@ double ChiSquared(int m, int n) { // Test for correlations between recent bits from the PRNG, or bits that are // biased. void RandomBitCorrelation(int random_bit) { - FLAG_random_seed = 31415926; + v8_flags.random_seed = 31415926; v8::Isolate::CreateParams create_params; create_params.array_buffer_allocator = CcTest::array_buffer_allocator(); v8::Isolate* isolate = v8::Isolate::New(create_params); diff --git a/deps/v8/test/cctest/test-sampler-api.cc b/deps/v8/test/cctest/test-sampler-api.cc index 530da7faa9b54c..9ecb7e2e671933 100644 --- a/deps/v8/test/cctest/test-sampler-api.cc +++ b/deps/v8/test/cctest/test-sampler-api.cc @@ -201,7 +201,7 @@ TEST(BuiltinsInSamples) { // ^ ^ ^ // sample.stack indices 2 1 0 TEST(StackFramesConsistent) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; const char* test_script = "function test_sampler_api_inner() {" " CollectSample();" diff --git a/deps/v8/test/cctest/test-serialize.cc b/deps/v8/test/cctest/test-serialize.cc index 9b417b5d698f77..d9df36b85bfc0c 100644 --- a/deps/v8/test/cctest/test-serialize.cc +++ b/deps/v8/test/cctest/test-serialize.cc @@ -69,7 +69,7 @@ enum CodeCacheType { kLazy, kEager, kAfterExecute }; void DisableAlwaysOpt() { // Isolates prepared for serialization do not optimize. The only exception is // with the flag --always-turbofan. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; } // A convenience struct to simplify management of the blobs required to @@ -962,7 +962,7 @@ void TypedArrayTestHelper( const Int32Expectations& after_restore_expectations = Int32Expectations(), v8::ArrayBuffer::Allocator* allocator = nullptr) { DisableAlwaysOpt(); - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; DisableEmbeddedBlobRefcounting(); v8::StartupData blob; { @@ -1133,7 +1133,7 @@ UNINITIALIZED_TEST(CustomSnapshotDataBlobDetachedArrayBuffer) { std::make_tuple("x.length", 0)}; DisableAlwaysOpt(); - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; DisableEmbeddedBlobRefcounting(); v8::StartupData blob; { @@ -1203,7 +1203,7 @@ UNINITIALIZED_TEST(CustomSnapshotDataBlobOnOrOffHeapTypedArray) { std::make_tuple("y[2]", 48), std::make_tuple("z[0]", 96)}; DisableAlwaysOpt(); - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; DisableEmbeddedBlobRefcounting(); v8::StartupData blob; { @@ -1263,7 +1263,7 @@ UNINITIALIZED_TEST(CustomSnapshotDataBlobOnOrOffHeapTypedArray) { UNINITIALIZED_TEST(CustomSnapshotDataBlobTypedArrayNoEmbedderFieldCallback) { const char* code = "var x = new Uint8Array(8);"; DisableAlwaysOpt(); - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; DisableEmbeddedBlobRefcounting(); v8::StartupData blob; { @@ -1671,8 +1671,8 @@ static Handle<SharedFunctionInfo> CompileScriptAndProduceCache( } TEST(CodeSerializerWithProfiler) { - FLAG_enable_lazy_source_positions = true; - FLAG_stress_lazy_source_positions = false; + v8_flags.enable_lazy_source_positions = true; + v8_flags.stress_lazy_source_positions = false; LocalContext context; Isolate* isolate = CcTest::i_isolate(); @@ -1773,7 +1773,7 @@ TEST(CodeSerializerOnePlusOne) { TestCodeSerializerOnePlusOneImpl(); } // See bug v8:9122 TEST(CodeSerializerOnePlusOneWithInterpretedFramesNativeStack) { - FLAG_interpreted_frames_native_stack = true; + v8_flags.interpreted_frames_native_stack = true; // We pass false because this test will create IET copies (which are // builtins). TestCodeSerializerOnePlusOneImpl(false); @@ -2019,7 +2019,7 @@ TEST(CodeSerializerLargeCodeObject) { // The serializer only tests the shared code, which is always the unoptimized // code. Don't even bother generating optimized code to avoid timeouts. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; base::Vector<const char> source = ConstructSource( base::StaticCharVector("var j=1; if (j == 0) {"), @@ -2064,13 +2064,13 @@ TEST(CodeSerializerLargeCodeObject) { } TEST(CodeSerializerLargeCodeObjectWithIncrementalMarking) { - if (!FLAG_incremental_marking) return; - if (!FLAG_compact) return; + if (!v8_flags.incremental_marking) return; + if (!v8_flags.compact) return; ManualGCScope manual_gc_scope; - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; const char* filter_flag = "--turbo-filter=NOTHING"; FlagList::SetFlagsFromString(filter_flag, strlen(filter_flag)); - FLAG_manual_evacuation_candidates_selection = true; + v8_flags.manual_evacuation_candidates_selection = true; LocalContext context; Isolate* isolate = CcTest::i_isolate(); @@ -2665,8 +2665,8 @@ TEST(CodeSerializerIsolatesEager) { TEST(CodeSerializerAfterExecute) { // We test that no compilations happen when running this code. Forcing // to always optimize breaks this test. - bool prev_always_turbofan_value = FLAG_always_turbofan; - FLAG_always_turbofan = false; + bool prev_always_turbofan_value = v8_flags.always_turbofan; + v8_flags.always_turbofan = false; const char* js_source = "function f() { return 'abc'; }; f() + 'def'"; v8::ScriptCompiler::CachedData* cache = CompileRunAndProduceCache(js_source, CodeCacheType::kAfterExecute); @@ -2712,7 +2712,7 @@ TEST(CodeSerializerAfterExecute) { isolate2->Dispose(); // Restore the flags. - FLAG_always_turbofan = prev_always_turbofan_value; + v8_flags.always_turbofan = prev_always_turbofan_value; } TEST(CodeSerializerFlagChange) { @@ -2723,7 +2723,8 @@ TEST(CodeSerializerFlagChange) { create_params.array_buffer_allocator = CcTest::array_buffer_allocator(); v8::Isolate* isolate2 = v8::Isolate::New(create_params); - FLAG_allow_natives_syntax = true; // Flag change should trigger cache reject. + v8_flags.allow_natives_syntax = + true; // Flag change should trigger cache reject. FlagList::EnforceFlagImplications(); { v8::Isolate::Scope iscope(isolate2); @@ -2743,7 +2744,7 @@ TEST(CodeSerializerFlagChange) { } TEST(CodeSerializerBitFlip) { - i::FLAG_verify_snapshot_checksum = true; + i::v8_flags.verify_snapshot_checksum = true; const char* js_source = "function f() { return 'abc'; }; f() + 'def'"; v8::ScriptCompiler::CachedData* cache = CompileRunAndProduceCache(js_source); @@ -2844,7 +2845,7 @@ TEST(CodeSerializerWithHarmonyScoping) { } TEST(Regress503552) { - if (!FLAG_incremental_marking) return; + if (!v8_flags.incremental_marking) return; // Test that the code serializer can deal with weak cells that form a linked // list during incremental marking. CcTest::InitializeVM(); @@ -4111,9 +4112,9 @@ UNINITIALIZED_TEST(SnapshotCreatorIncludeGlobalProxy) { UNINITIALIZED_TEST(ReinitializeHashSeedJSCollectionRehashable) { DisableAlwaysOpt(); - i::FLAG_rehash_snapshot = true; - i::FLAG_hash_seed = 42; - i::FLAG_allow_natives_syntax = true; + i::v8_flags.rehash_snapshot = true; + i::v8_flags.hash_seed = 42; + i::v8_flags.allow_natives_syntax = true; DisableEmbeddedBlobRefcounting(); v8::StartupData blob; { @@ -4141,7 +4142,7 @@ UNINITIALIZED_TEST(ReinitializeHashSeedJSCollectionRehashable) { CHECK(blob.CanBeRehashed()); } - i::FLAG_hash_seed = 1337; + i::v8_flags.hash_seed = 1337; v8::Isolate::CreateParams create_params; create_params.array_buffer_allocator = CcTest::array_buffer_allocator(); create_params.snapshot_blob = &blob; @@ -4166,9 +4167,9 @@ UNINITIALIZED_TEST(ReinitializeHashSeedJSCollectionRehashable) { UNINITIALIZED_TEST(ReinitializeHashSeedRehashable) { DisableAlwaysOpt(); - i::FLAG_rehash_snapshot = true; - i::FLAG_hash_seed = 42; - i::FLAG_allow_natives_syntax = true; + i::v8_flags.rehash_snapshot = true; + i::v8_flags.hash_seed = 42; + i::v8_flags.allow_natives_syntax = true; DisableEmbeddedBlobRefcounting(); v8::StartupData blob; { @@ -4209,7 +4210,7 @@ UNINITIALIZED_TEST(ReinitializeHashSeedRehashable) { CHECK(blob.CanBeRehashed()); } - i::FLAG_hash_seed = 1337; + i::v8_flags.hash_seed = 1337; v8::Isolate::CreateParams create_params; create_params.array_buffer_allocator = CcTest::array_buffer_allocator(); create_params.snapshot_blob = &blob; @@ -4239,9 +4240,9 @@ UNINITIALIZED_TEST(ReinitializeHashSeedRehashable) { UNINITIALIZED_TEST(ClassFields) { DisableAlwaysOpt(); - i::FLAG_rehash_snapshot = true; - i::FLAG_hash_seed = 42; - i::FLAG_allow_natives_syntax = true; + i::v8_flags.rehash_snapshot = true; + i::v8_flags.hash_seed = 42; + i::v8_flags.allow_natives_syntax = true; DisableEmbeddedBlobRefcounting(); v8::StartupData blob; { @@ -4320,9 +4321,9 @@ UNINITIALIZED_TEST(ClassFields) { UNINITIALIZED_TEST(ClassFieldsReferencePrivateInInitializer) { DisableAlwaysOpt(); - i::FLAG_rehash_snapshot = true; - i::FLAG_hash_seed = 42; - i::FLAG_allow_natives_syntax = true; + i::v8_flags.rehash_snapshot = true; + i::v8_flags.hash_seed = 42; + i::v8_flags.allow_natives_syntax = true; DisableEmbeddedBlobRefcounting(); v8::StartupData blob; { @@ -4375,9 +4376,9 @@ UNINITIALIZED_TEST(ClassFieldsReferencePrivateInInitializer) { UNINITIALIZED_TEST(ClassFieldsReferenceClassVariable) { DisableAlwaysOpt(); - i::FLAG_rehash_snapshot = true; - i::FLAG_hash_seed = 42; - i::FLAG_allow_natives_syntax = true; + i::v8_flags.rehash_snapshot = true; + i::v8_flags.hash_seed = 42; + i::v8_flags.allow_natives_syntax = true; DisableEmbeddedBlobRefcounting(); v8::StartupData blob; { @@ -4426,9 +4427,9 @@ UNINITIALIZED_TEST(ClassFieldsReferenceClassVariable) { UNINITIALIZED_TEST(ClassFieldsNested) { DisableAlwaysOpt(); - i::FLAG_rehash_snapshot = true; - i::FLAG_hash_seed = 42; - i::FLAG_allow_natives_syntax = true; + i::v8_flags.rehash_snapshot = true; + i::v8_flags.hash_seed = 42; + i::v8_flags.allow_natives_syntax = true; DisableEmbeddedBlobRefcounting(); v8::StartupData blob; { @@ -4487,9 +4488,9 @@ UNINITIALIZED_TEST(ClassFieldsNested) { UNINITIALIZED_TEST(ClassPrivateMethods) { DisableAlwaysOpt(); - i::FLAG_rehash_snapshot = true; - i::FLAG_hash_seed = 42; - i::FLAG_allow_natives_syntax = true; + i::v8_flags.rehash_snapshot = true; + i::v8_flags.hash_seed = 42; + i::v8_flags.allow_natives_syntax = true; DisableEmbeddedBlobRefcounting(); v8::StartupData blob; { @@ -4561,9 +4562,9 @@ UNINITIALIZED_TEST(ClassPrivateMethods) { UNINITIALIZED_TEST(ClassFieldsWithInheritance) { DisableAlwaysOpt(); - i::FLAG_rehash_snapshot = true; - i::FLAG_hash_seed = 42; - i::FLAG_allow_natives_syntax = true; + i::v8_flags.rehash_snapshot = true; + i::v8_flags.hash_seed = 42; + i::v8_flags.allow_natives_syntax = true; DisableEmbeddedBlobRefcounting(); v8::StartupData blob; { @@ -4656,9 +4657,9 @@ UNINITIALIZED_TEST(ClassFieldsWithInheritance) { UNINITIALIZED_TEST(ClassFieldsRecalcPrivateNames) { DisableAlwaysOpt(); - i::FLAG_rehash_snapshot = true; - i::FLAG_hash_seed = 42; - i::FLAG_allow_natives_syntax = true; + i::v8_flags.rehash_snapshot = true; + i::v8_flags.hash_seed = 42; + i::v8_flags.allow_natives_syntax = true; DisableEmbeddedBlobRefcounting(); v8::StartupData blob; { @@ -4718,9 +4719,9 @@ UNINITIALIZED_TEST(ClassFieldsRecalcPrivateNames) { UNINITIALIZED_TEST(ClassFieldsWithBindings) { DisableAlwaysOpt(); - i::FLAG_rehash_snapshot = true; - i::FLAG_hash_seed = 42; - i::FLAG_allow_natives_syntax = true; + i::v8_flags.rehash_snapshot = true; + i::v8_flags.hash_seed = 42; + i::v8_flags.allow_natives_syntax = true; DisableEmbeddedBlobRefcounting(); v8::StartupData blob; { @@ -4842,7 +4843,7 @@ UNINITIALIZED_TEST(WeakArraySerializationInSnapshot) { DisableAlwaysOpt(); DisableEmbeddedBlobRefcounting(); - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; v8::StartupData blob; { v8::SnapshotCreator creator; @@ -5023,11 +5024,11 @@ UNINITIALIZED_TEST(SnapshotCreatorDontDeferByteArrayForTypedArray) { class V8_NODISCARD DisableLazySourcePositionScope { public: DisableLazySourcePositionScope() - : backup_value_(FLAG_enable_lazy_source_positions) { - FLAG_enable_lazy_source_positions = false; + : backup_value_(v8_flags.enable_lazy_source_positions) { + v8_flags.enable_lazy_source_positions = false; } ~DisableLazySourcePositionScope() { - FLAG_enable_lazy_source_positions = backup_value_; + v8_flags.enable_lazy_source_positions = backup_value_; } private: @@ -5093,16 +5094,19 @@ UNINITIALIZED_TEST(SharedStrings) { // Make all the flags that require a shared heap false before creating the // isolate to serialize. - FLAG_shared_string_table = false; - FLAG_harmony_struct = false; + v8_flags.shared_string_table = false; + v8_flags.harmony_struct = false; v8::Isolate* isolate_to_serialize = TestSerializer::NewIsolateInitialized(); StartupBlobs blobs = Serialize(isolate_to_serialize); isolate_to_serialize->Dispose(); - FLAG_shared_string_table = true; + v8_flags.shared_string_table = true; + + if (!v8_flags.shared_space) { + TestSerializer::InitializeProcessWideSharedIsolateFromBlob(blobs); + } - TestSerializer::InitializeProcessWideSharedIsolateFromBlob(blobs); v8::Isolate* isolate1 = TestSerializer::NewIsolateFromBlob(blobs); v8::Isolate* isolate2 = TestSerializer::NewIsolateFromBlob(blobs); Isolate* i_isolate1 = reinterpret_cast<Isolate*>(isolate1); @@ -5116,11 +5120,14 @@ UNINITIALIZED_TEST(SharedStrings) { // Because both isolate1 and isolate2 are considered running on the main // thread, one must be parked to avoid deadlock in the shared heap // verification that may happen on client heap disposal. - ParkedScope parked(i_isolate2->main_thread_local_isolate()); - isolate1->Dispose(); + ParkedScope parked(i_isolate1->main_thread_local_isolate()); + isolate2->Dispose(); + } + isolate1->Dispose(); + + if (!v8_flags.shared_space) { + TestSerializer::DeleteProcessWideSharedIsolate(); } - isolate2->Dispose(); - TestSerializer::DeleteProcessWideSharedIsolate(); blobs.Dispose(); FreeCurrentEmbeddedBlob(); diff --git a/deps/v8/test/cctest/test-shared-strings.cc b/deps/v8/test/cctest/test-shared-strings.cc index 29a9818fa4a5fa..41ff4ef054226c 100644 --- a/deps/v8/test/cctest/test-shared-strings.cc +++ b/deps/v8/test/cctest/test-shared-strings.cc @@ -81,10 +81,10 @@ class MultiClientIsolateTest { }; UNINITIALIZED_TEST(InPlaceInternalizableStringsAreShared) { - if (FLAG_single_generation) return; + if (v8_flags.single_generation) return; if (!V8_CAN_CREATE_SHARED_HEAP_BOOL) return; - FLAG_shared_string_table = true; + v8_flags.shared_string_table = true; MultiClientIsolateTest test; Isolate* i_isolate1 = test.i_main_isolate(); @@ -130,7 +130,7 @@ UNINITIALIZED_TEST(InPlaceInternalizableStringsAreShared) { UNINITIALIZED_TEST(InPlaceInternalization) { if (!V8_CAN_CREATE_SHARED_HEAP_BOOL) return; - FLAG_shared_string_table = true; + v8_flags.shared_string_table = true; MultiClientIsolateTest test; IsolateParkOnDisposeWrapper isolate_wrapper(test.NewClientIsolate(), @@ -192,10 +192,10 @@ UNINITIALIZED_TEST(InPlaceInternalization) { } UNINITIALIZED_TEST(YoungInternalization) { - if (FLAG_single_generation) return; + if (v8_flags.single_generation) return; if (!V8_CAN_CREATE_SHARED_HEAP_BOOL) return; - FLAG_shared_string_table = true; + v8_flags.shared_string_table = true; MultiClientIsolateTest test; IsolateParkOnDisposeWrapper isolate_wrapper(test.NewClientIsolate(), @@ -378,7 +378,7 @@ Handle<FixedArray> CreateSharedOneByteStrings(Isolate* isolate, void TestConcurrentInternalization(TestHitOrMiss hit_or_miss) { if (!V8_CAN_CREATE_SHARED_HEAP_BOOL) return; - FLAG_shared_string_table = true; + v8_flags.shared_string_table = true; constexpr int kThreads = 4; constexpr int kStrings = 4096; @@ -460,7 +460,7 @@ class ConcurrentStringTableLookupThread final UNINITIALIZED_TEST(ConcurrentStringTableLookup) { if (!V8_CAN_CREATE_SHARED_HEAP_BOOL) return; - FLAG_shared_string_table = true; + v8_flags.shared_string_table = true; constexpr int kTotalThreads = 4; constexpr int kInternalizationThreads = 1; @@ -610,7 +610,7 @@ class ExternalResourceFactory { UNINITIALIZED_TEST(StringShare) { if (!V8_CAN_CREATE_SHARED_HEAP_BOOL) return; - FLAG_shared_string_table = true; + v8_flags.shared_string_table = true; ExternalResourceFactory resource_factory; MultiClientIsolateTest test; @@ -674,8 +674,9 @@ UNINITIALIZED_TEST(StringShare) { TwoByteResource* two_byte_res = resource_factory.CreateTwoByte(two_byte); CHECK(one_byte_ext->MakeExternal(one_byte_res)); CHECK(two_byte_ext->MakeExternal(two_byte_res)); - if (FLAG_always_use_string_forwarding_table) { - i_isolate->heap()->CollectSharedGarbage( + if (v8_flags.always_use_string_forwarding_table) { + i_isolate->heap()->CollectGarbageShared( + i_isolate->main_thread_local_heap(), GarbageCollectionReason::kTesting); } CHECK(one_byte_ext->IsExternalString()); @@ -691,7 +692,7 @@ UNINITIALIZED_TEST(StringShare) { // All other strings are flattened then copied if the flatten didn't already // create a new copy. - if (!FLAG_single_generation) { + if (!v8_flags.single_generation) { // Young strings Handle<String> young_one_byte_seq = factory->NewStringFromAsciiChecked( raw_one_byte, AllocationType::kYoung); @@ -710,7 +711,7 @@ UNINITIALIZED_TEST(StringShare) { CheckSharedStringIsEqualCopy(shared_two_byte, young_two_byte_seq); } - if (!FLAG_always_use_string_forwarding_table) { + if (!v8_flags.always_use_string_forwarding_table) { // Thin strings Handle<String> one_byte_seq1 = factory->NewStringFromAsciiChecked(raw_one_byte); @@ -756,12 +757,12 @@ UNINITIALIZED_TEST(StringShare) { } UNINITIALIZED_TEST(PromotionMarkCompact) { - if (FLAG_single_generation) return; + if (v8_flags.single_generation) return; if (!V8_CAN_CREATE_SHARED_HEAP_BOOL) return; - FLAG_stress_concurrent_allocation = false; // For SealCurrentObjects. - FLAG_shared_string_table = true; - FLAG_manual_evacuation_candidates_selection = true; + v8_flags.stress_concurrent_allocation = false; // For SealCurrentObjects. + v8_flags.shared_string_table = true; + v8_flags.manual_evacuation_candidates_selection = true; MultiClientIsolateTest test; Isolate* i_isolate = test.i_main_isolate(); @@ -791,18 +792,18 @@ UNINITIALIZED_TEST(PromotionMarkCompact) { // In-place-internalizable strings are promoted into the shared heap when // sharing. - CHECK(!heap->Contains(*one_byte_seq)); + CHECK_IMPLIES(!v8_flags.shared_space, !heap->Contains(*one_byte_seq)); CHECK(heap->SharedHeapContains(*one_byte_seq)); } } UNINITIALIZED_TEST(PromotionScavenge) { - if (FLAG_minor_mc) return; - if (FLAG_single_generation) return; + if (v8_flags.minor_mc) return; + if (v8_flags.single_generation) return; if (!V8_CAN_CREATE_SHARED_HEAP_BOOL) return; - FLAG_stress_concurrent_allocation = false; // For SealCurrentObjects. - FLAG_shared_string_table = true; + v8_flags.stress_concurrent_allocation = false; // For SealCurrentObjects. + v8_flags.shared_string_table = true; MultiClientIsolateTest test; Isolate* i_isolate = test.i_main_isolate(); @@ -830,22 +831,21 @@ UNINITIALIZED_TEST(PromotionScavenge) { // In-place-internalizable strings are promoted into the shared heap when // sharing. - CHECK(!heap->Contains(*one_byte_seq)); CHECK(heap->SharedHeapContains(*one_byte_seq)); } } UNINITIALIZED_TEST(PromotionScavengeOldToShared) { - if (FLAG_minor_mc) { + if (v8_flags.minor_mc) { // Promoting from new space directly to shared heap is not implemented in // MinorMC. return; } - if (FLAG_single_generation) return; + if (v8_flags.single_generation) return; if (!V8_CAN_CREATE_SHARED_HEAP_BOOL) return; - if (FLAG_stress_concurrent_allocation) return; + if (v8_flags.stress_concurrent_allocation) return; - FLAG_shared_string_table = true; + v8_flags.shared_string_table = true; MultiClientIsolateTest test; Isolate* i_isolate = test.i_main_isolate(); @@ -879,7 +879,6 @@ UNINITIALIZED_TEST(PromotionScavengeOldToShared) { // In-place-internalizable strings are promoted into the shared heap when // sharing. - CHECK(!heap->Contains(*one_byte_seq)); CHECK(heap->SharedHeapContains(*one_byte_seq)); // Since the GC promoted that string into shared heap, it also needs to @@ -890,13 +889,13 @@ UNINITIALIZED_TEST(PromotionScavengeOldToShared) { } UNINITIALIZED_TEST(PromotionMarkCompactNewToShared) { - if (FLAG_single_generation) return; + if (v8_flags.single_generation) return; if (!V8_CAN_CREATE_SHARED_HEAP_BOOL) return; - if (FLAG_stress_concurrent_allocation) return; + if (v8_flags.stress_concurrent_allocation) return; - FLAG_shared_string_table = true; - FLAG_manual_evacuation_candidates_selection = true; - FLAG_page_promotion = false; + v8_flags.shared_string_table = true; + v8_flags.manual_evacuation_candidates_selection = true; + v8_flags.page_promotion = false; MultiClientIsolateTest test; Isolate* i_isolate = test.i_main_isolate(); @@ -928,7 +927,6 @@ UNINITIALIZED_TEST(PromotionMarkCompactNewToShared) { // In-place-internalizable strings are promoted into the shared heap when // sharing. - CHECK(!heap->Contains(*one_byte_seq)); CHECK(heap->SharedHeapContains(*one_byte_seq)); // Since the GC promoted that string into shared heap, it also needs to @@ -940,11 +938,15 @@ UNINITIALIZED_TEST(PromotionMarkCompactNewToShared) { UNINITIALIZED_TEST(PromotionMarkCompactOldToShared) { if (!V8_CAN_CREATE_SHARED_HEAP_BOOL) return; - if (FLAG_stress_concurrent_allocation) return; - if (!FLAG_page_promotion) return; + if (v8_flags.stress_concurrent_allocation) return; + if (!v8_flags.page_promotion) return; + if (v8_flags.single_generation) { + // String allocated in old space may be "pretenured" to the shared heap. + return; + } - FLAG_shared_string_table = true; - FLAG_manual_evacuation_candidates_selection = true; + v8_flags.shared_string_table = true; + v8_flags.manual_evacuation_candidates_selection = true; MultiClientIsolateTest test; Isolate* i_isolate = test.i_main_isolate(); @@ -986,7 +988,6 @@ UNINITIALIZED_TEST(PromotionMarkCompactOldToShared) { // In-place-internalizable strings are promoted into the shared heap when // sharing. - CHECK(!heap->Contains(*one_byte_seq)); CHECK(heap->SharedHeapContains(*one_byte_seq)); // Since the GC promoted that string into shared heap, it also needs to @@ -997,12 +998,12 @@ UNINITIALIZED_TEST(PromotionMarkCompactOldToShared) { } UNINITIALIZED_TEST(PagePromotionRecordingOldToShared) { - if (FLAG_single_generation) return; + if (v8_flags.single_generation) return; if (!V8_CAN_CREATE_SHARED_HEAP_BOOL) return; - if (FLAG_stress_concurrent_allocation) return; + if (v8_flags.stress_concurrent_allocation) return; - FLAG_shared_string_table = true; - FLAG_manual_evacuation_candidates_selection = true; + v8_flags.shared_string_table = true; + v8_flags.manual_evacuation_candidates_selection = true; MultiClientIsolateTest test; Isolate* i_isolate = test.i_main_isolate(); @@ -1050,7 +1051,7 @@ UNINITIALIZED_TEST(PagePromotionRecordingOldToShared) { UNINITIALIZED_TEST(InternalizedSharedStringsTransitionDuringGC) { if (!V8_CAN_CREATE_SHARED_HEAP_BOOL) return; - FLAG_shared_string_table = true; + v8_flags.shared_string_table = true; constexpr int kStrings = 4096; @@ -1092,10 +1093,10 @@ UNINITIALIZED_TEST(InternalizedSharedStringsTransitionDuringGC) { } UNINITIALIZED_TEST(ShareExternalString) { - if (FLAG_single_generation) return; + if (v8_flags.single_generation) return; if (!V8_CAN_CREATE_SHARED_HEAP_BOOL) return; - FLAG_shared_string_table = true; + v8_flags.shared_string_table = true; ExternalResourceFactory resource_factory; MultiClientIsolateTest test; @@ -1113,8 +1114,10 @@ UNINITIALIZED_TEST(ShareExternalString) { OneByteResource* resource = resource_factory.CreateOneByte(raw_one_byte); one_byte->MakeExternal(resource); - if (FLAG_always_use_string_forwarding_table) { - i_isolate1->heap()->CollectSharedGarbage(GarbageCollectionReason::kTesting); + if (v8_flags.always_use_string_forwarding_table) { + i_isolate1->heap()->CollectGarbageShared( + i_isolate1->main_thread_local_heap(), + GarbageCollectionReason::kTesting); } CHECK(one_byte->IsExternalString()); Handle<ExternalOneByteString> one_byte_external = @@ -1145,10 +1148,10 @@ void CheckExternalStringResource( } // namespace UNINITIALIZED_TEST(ExternalizeSharedString) { - if (FLAG_single_generation) return; + if (v8_flags.single_generation) return; if (!V8_CAN_CREATE_SHARED_HEAP_BOOL) return; - FLAG_shared_string_table = true; + v8_flags.shared_string_table = true; ExternalResourceFactory resource_factory; MultiClientIsolateTest test; @@ -1191,7 +1194,7 @@ UNINITIALIZED_TEST(ExternalizeSharedString) { UNINITIALIZED_TEST(ExternalizedSharedStringsTransitionDuringGC) { if (!V8_CAN_CREATE_SHARED_HEAP_BOOL) return; - FLAG_shared_string_table = true; + v8_flags.shared_string_table = true; ExternalResourceFactory resource_factory; MultiClientIsolateTest test; @@ -1225,7 +1228,8 @@ UNINITIALIZED_TEST(ExternalizedSharedStringsTransitionDuringGC) { } // Trigger garbage collection on the shared isolate. - i_isolate->heap()->CollectSharedGarbage(GarbageCollectionReason::kTesting); + i_isolate->heap()->CollectGarbageShared(i_isolate->main_thread_local_heap(), + GarbageCollectionReason::kTesting); // Check that GC cleared the forwarding table. CHECK_EQ(i_isolate->string_forwarding_table()->size(), 0); @@ -1240,10 +1244,10 @@ UNINITIALIZED_TEST(ExternalizedSharedStringsTransitionDuringGC) { } UNINITIALIZED_TEST(ExternalizeInternalizedString) { - if (FLAG_single_generation) return; + if (v8_flags.single_generation) return; if (!V8_CAN_CREATE_SHARED_HEAP_BOOL) return; - FLAG_shared_string_table = true; + v8_flags.shared_string_table = true; ExternalResourceFactory resource_factory; MultiClientIsolateTest test; @@ -1269,8 +1273,10 @@ UNINITIALIZED_TEST(ExternalizeInternalizedString) { factory1->NewStringFromTwoByte(two_byte_vec).ToHandleChecked()); Handle<String> one_byte_intern = factory1->InternalizeString(one_byte); Handle<String> two_byte_intern = factory1->InternalizeString(two_byte); - if (FLAG_always_use_string_forwarding_table) { - i_isolate1->heap()->CollectSharedGarbage(GarbageCollectionReason::kTesting); + if (v8_flags.always_use_string_forwarding_table) { + i_isolate1->heap()->CollectGarbageShared( + i_isolate1->main_thread_local_heap(), + GarbageCollectionReason::kTesting); } CHECK(one_byte->IsThinString()); CHECK(two_byte->IsThinString()); @@ -1304,10 +1310,10 @@ UNINITIALIZED_TEST(ExternalizeInternalizedString) { } UNINITIALIZED_TEST(InternalizeSharedExternalString) { - if (FLAG_single_generation) return; + if (v8_flags.single_generation) return; if (!V8_CAN_CREATE_SHARED_HEAP_BOOL) return; - FLAG_shared_string_table = true; + v8_flags.shared_string_table = true; ExternalResourceFactory resource_factory; MultiClientIsolateTest test; @@ -1337,7 +1343,8 @@ UNINITIALIZED_TEST(InternalizeSharedExternalString) { CHECK(shared_two_byte->HasExternalForwardingIndex(kAcquireLoad)); // Trigger GC to externalize the shared string. - i_isolate1->heap()->CollectSharedGarbage(GarbageCollectionReason::kTesting); + i_isolate1->heap()->CollectGarbageShared(i_isolate1->main_thread_local_heap(), + GarbageCollectionReason::kTesting); CHECK(shared_one_byte->IsShared()); CHECK(shared_one_byte->IsExternalString()); CHECK(shared_two_byte->IsShared()); @@ -1372,7 +1379,8 @@ UNINITIALIZED_TEST(InternalizeSharedExternalString) { // Another GC should create an externalized internalized string of the cached // (one byte) string and turn the uncached (two byte) string into a // ThinString, disposing the external resource. - i_isolate1->heap()->CollectSharedGarbage(GarbageCollectionReason::kTesting); + i_isolate1->heap()->CollectGarbageShared(i_isolate1->main_thread_local_heap(), + GarbageCollectionReason::kTesting); CHECK_EQ(shared_one_byte->map().instance_type(), InstanceType::EXTERNAL_ONE_BYTE_INTERNALIZED_STRING_TYPE); if (is_uncached) { @@ -1385,10 +1393,10 @@ UNINITIALIZED_TEST(InternalizeSharedExternalString) { } UNINITIALIZED_TEST(ExternalizeAndInternalizeMissSharedString) { - if (FLAG_single_generation) return; + if (v8_flags.single_generation) return; if (!V8_CAN_CREATE_SHARED_HEAP_BOOL) return; - FLAG_shared_string_table = true; + v8_flags.shared_string_table = true; ExternalResourceFactory resource_factory; MultiClientIsolateTest test; @@ -1420,10 +1428,10 @@ UNINITIALIZED_TEST(ExternalizeAndInternalizeMissSharedString) { } UNINITIALIZED_TEST(InternalizeHitAndExternalizeSharedString) { - if (FLAG_single_generation) return; + if (v8_flags.single_generation) return; if (!V8_CAN_CREATE_SHARED_HEAP_BOOL) return; - FLAG_shared_string_table = true; + v8_flags.shared_string_table = true; ExternalResourceFactory resource_factory; MultiClientIsolateTest test; @@ -1473,10 +1481,10 @@ UNINITIALIZED_TEST(InternalizeHitAndExternalizeSharedString) { } UNINITIALIZED_TEST(InternalizeMissAndExternalizeSharedString) { - if (FLAG_single_generation) return; + if (v8_flags.single_generation) return; if (!V8_CAN_CREATE_SHARED_HEAP_BOOL) return; - FLAG_shared_string_table = true; + v8_flags.shared_string_table = true; ExternalResourceFactory resource_factory; MultiClientIsolateTest test; @@ -1571,7 +1579,7 @@ void CreateExternalResources(Isolate* i_isolate, Handle<FixedArray> strings, void TestConcurrentExternalization(bool share_resources) { if (!V8_CAN_CREATE_SHARED_HEAP_BOOL) return; - FLAG_shared_string_table = true; + v8_flags.shared_string_table = true; ExternalResourceFactory resource_factory; MultiClientIsolateTest test; @@ -1624,7 +1632,8 @@ void TestConcurrentExternalization(bool share_resources) { sema_execute_complete.ParkedWait(local_isolate); } - i_isolate->heap()->CollectSharedGarbage(GarbageCollectionReason::kTesting); + i_isolate->heap()->CollectGarbageShared(i_isolate->main_thread_local_heap(), + GarbageCollectionReason::kTesting); for (int i = 0; i < shared_strings->length(); i++) { Handle<String> input_string(String::cast(shared_strings->get(i)), @@ -1667,7 +1676,7 @@ void TestConcurrentExternalizationAndInternalization( TestHitOrMiss hit_or_miss) { if (!V8_CAN_CREATE_SHARED_HEAP_BOOL) return; - FLAG_shared_string_table = true; + v8_flags.shared_string_table = true; ExternalResourceFactory resource_factory; MultiClientIsolateTest test; @@ -1720,7 +1729,8 @@ void TestConcurrentExternalizationAndInternalization( sema_execute_complete.ParkedWait(local_isolate); } - i_isolate->heap()->CollectSharedGarbage(GarbageCollectionReason::kTesting); + i_isolate->heap()->CollectGarbageShared(i_isolate->main_thread_local_heap(), + GarbageCollectionReason::kTesting); for (int i = 0; i < shared_strings->length(); i++) { Handle<String> input_string(String::cast(shared_strings->get(i)), diff --git a/deps/v8/test/cctest/test-stack-unwinding-win64.cc b/deps/v8/test/cctest/test-stack-unwinding-win64.cc index a7b19af6c0cd42..61c9dc1bf11c22 100644 --- a/deps/v8/test/cctest/test-stack-unwinding-win64.cc +++ b/deps/v8/test/cctest/test-stack-unwinding-win64.cc @@ -79,8 +79,8 @@ UNINITIALIZED_TEST(StackUnwindingWin64) { return; } - i::FLAG_allow_natives_syntax = true; - i::FLAG_win64_unwinding_info = true; + i::v8_flags.allow_natives_syntax = true; + i::v8_flags.win64_unwinding_info = true; v8::Isolate::CreateParams create_params; create_params.array_buffer_allocator = CcTest::array_buffer_allocator(); diff --git a/deps/v8/test/cctest/test-strings.cc b/deps/v8/test/cctest/test-strings.cc index bfabd3ae386d4b..837d42669a3bf3 100644 --- a/deps/v8/test/cctest/test-strings.cc +++ b/deps/v8/test/cctest/test-strings.cc @@ -649,7 +649,7 @@ static inline void PrintStats(const ConsStringGenerationData& data) { template <typename BuildString> void TestStringCharacterStream(BuildString build, int test_cases) { - FLAG_gc_global = true; + v8_flags.gc_global = true; CcTest::InitializeVM(); Isolate* isolate = CcTest::i_isolate(); HandleScope outer_scope(isolate); @@ -1154,7 +1154,7 @@ TEST(ReplaceInvalidUtf8) { } TEST(JSONStringifySliceMadeExternal) { - if (!FLAG_string_slices) return; + if (!v8_flags.string_slices) return; CcTest::InitializeVM(); // Create a sliced string from a one-byte string. The latter is turned // into a two-byte external string. Check that JSON.stringify works. @@ -1330,7 +1330,7 @@ TEST(CachedHashOverflow) { } TEST(SliceFromCons) { - if (!FLAG_string_slices) return; + if (!v8_flags.string_slices) return; CcTest::InitializeVM(); Factory* factory = CcTest::i_isolate()->factory(); v8::HandleScope scope(CcTest::isolate()); @@ -1365,7 +1365,7 @@ class OneByteVectorResource : public v8::String::ExternalOneByteStringResource { }; TEST(InternalizeExternal) { - FLAG_stress_incremental_marking = false; + v8_flags.stress_incremental_marking = false; CcTest::InitializeVM(); i::Isolate* isolate = CcTest::i_isolate(); Factory* factory = isolate->factory(); @@ -1394,7 +1394,7 @@ TEST(InternalizeExternal) { } TEST(SliceFromExternal) { - if (!FLAG_string_slices) return; + if (!v8_flags.string_slices) return; CcTest::InitializeVM(); Factory* factory = CcTest::i_isolate()->factory(); v8::HandleScope scope(CcTest::isolate()); @@ -1417,7 +1417,7 @@ TEST(SliceFromExternal) { TEST(TrivialSlice) { // This tests whether a slice that contains the entire parent string // actually creates a new string (it should not). - if (!FLAG_string_slices) return; + if (!v8_flags.string_slices) return; CcTest::InitializeVM(); Factory* factory = CcTest::i_isolate()->factory(); v8::HandleScope scope(CcTest::isolate()); @@ -1446,7 +1446,7 @@ TEST(TrivialSlice) { TEST(SliceFromSlice) { // This tests whether a slice that contains the entire parent string // actually creates a new string (it should not). - if (!FLAG_string_slices) return; + if (!v8_flags.string_slices) return; CcTest::InitializeVM(); v8::HandleScope scope(CcTest::isolate()); v8::Local<v8::Value> result; @@ -1854,9 +1854,9 @@ class OneByteStringResource : public v8::String::ExternalOneByteStringResource { TEST(Regress876759) { // Thin strings are used in conjunction with young gen - if (FLAG_single_generation) return; + if (v8_flags.single_generation) return; // We don't create ThinStrings immediately when using the forwarding table. - if (FLAG_always_use_string_forwarding_table) return; + if (v8_flags.always_use_string_forwarding_table) return; Isolate* isolate = CcTest::i_isolate(); Factory* factory = isolate->factory(); diff --git a/deps/v8/test/cctest/test-unwinder-code-pages.cc b/deps/v8/test/cctest/test-unwinder-code-pages.cc index 16573ae6b3fc06..86479697a6d01e 100644 --- a/deps/v8/test/cctest/test-unwinder-code-pages.cc +++ b/deps/v8/test/cctest/test-unwinder-code-pages.cc @@ -265,7 +265,7 @@ bool PagesContainsAddress(size_t length, MemoryRange* pages, // Check that we can unwind when the pc is within an optimized code object on // the V8 heap. TEST(Unwind_CodeObjectPCInMiddle_Success_CodePagesAPI) { - FLAG_allow_natives_syntax = true; + v8_flags.allow_natives_syntax = true; LocalContext env; v8::Isolate* isolate = env->GetIsolate(); Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate); @@ -653,7 +653,7 @@ TEST(PCIsInV8_InJSEntryRange_CodePagesAPI) { // Large code objects can be allocated in large object space. Check that this is // inside the CodeRange. TEST(PCIsInV8_LargeCodeObject_CodePagesAPI) { - FLAG_allow_natives_syntax = true; + v8_flags.allow_natives_syntax = true; LocalContext env; v8::Isolate* isolate = env->GetIsolate(); Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate); @@ -746,7 +746,7 @@ class UnwinderTestHelper { UnwinderTestHelper* UnwinderTestHelper::instance_; TEST(Unwind_TwoNestedFunctions_CodePagesAPI) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; const char* test_script = "function test_unwinder_api_inner() {" " TryUnwind();" diff --git a/deps/v8/test/cctest/wasm/test-gc.cc b/deps/v8/test/cctest/wasm/test-gc.cc index 4a579dbf31ac81..945f100dab31a1 100644 --- a/deps/v8/test/cctest/wasm/test-gc.cc +++ b/deps/v8/test/cctest/wasm/test-gc.cc @@ -40,10 +40,6 @@ class WasmGCTester { execution_tier == TestExecutionTier::kLiftoff), flag_wasm_dynamic_tiering(&v8::internal::v8_flags.wasm_dynamic_tiering, v8::internal::v8_flags.liftoff_only != true), - // Test both setups with canonicalization and without. - flag_canonicalization( - &v8::internal::v8_flags.wasm_type_canonicalization, - execution_tier == TestExecutionTier::kTurbofan), flag_tierup(&v8::internal::v8_flags.wasm_tier_up, false), zone_(&allocator, ZONE_NAME), builder_(&zone_), @@ -200,7 +196,6 @@ class WasmGCTester { const FlagScope<bool> flag_liftoff; const FlagScope<bool> flag_liftoff_only; const FlagScope<bool> flag_wasm_dynamic_tiering; - const FlagScope<bool> flag_canonicalization; const FlagScope<bool> flag_tierup; byte DefineFunctionImpl(WasmFunctionBuilder* fun, @@ -1190,27 +1185,31 @@ WASM_COMPILED_EXEC_TEST(RefTrivialCastsStatic) { FunctionSig sig(1, 2, sig_types); byte sig_index = tester.DefineSignature(&sig); - const byte kRefTestNull = tester.DefineFunction( + const byte kRefTestNullDeprecated = tester.DefineFunction( tester.sigs.i_v(), {}, - {WASM_REF_TEST(WASM_REF_NULL(type_index), subtype_index), kExprEnd}); + {WASM_REF_TEST_DEPRECATED(WASM_REF_NULL(type_index), subtype_index), + kExprEnd}); // Upcasts should be optimized away for nominal types. - const byte kRefTestUpcast = tester.DefineFunction( + const byte kRefTestUpcastDeprecated = tester.DefineFunction( tester.sigs.i_v(), {}, - {WASM_REF_TEST(WASM_STRUCT_NEW_DEFAULT(subtype_index), type_index), + {WASM_REF_TEST_DEPRECATED(WASM_STRUCT_NEW_DEFAULT(subtype_index), + type_index), kExprEnd}); - const byte kRefTestUpcastNull = tester.DefineFunction( + const byte kRefTestUpcastNullDeprecated = tester.DefineFunction( tester.sigs.i_v(), {}, - {WASM_REF_TEST(WASM_REF_NULL(subtype_index), type_index), kExprEnd}); - const byte kRefTestUnrelatedNullable = tester.DefineFunction( + {WASM_REF_TEST_DEPRECATED(WASM_REF_NULL(subtype_index), type_index), + kExprEnd}); + const byte kRefTestUnrelatedNullableDeprecated = tester.DefineFunction( tester.sigs.i_v(), {refNull(subtype_index)}, {WASM_LOCAL_SET(0, WASM_STRUCT_NEW_DEFAULT(subtype_index)), - WASM_REF_TEST(WASM_LOCAL_GET(0), sig_index), kExprEnd}); - const byte kRefTestUnrelatedNull = tester.DefineFunction( + WASM_REF_TEST_DEPRECATED(WASM_LOCAL_GET(0), sig_index), kExprEnd}); + const byte kRefTestUnrelatedNullDeprecated = tester.DefineFunction( tester.sigs.i_v(), {}, - {WASM_REF_TEST(WASM_REF_NULL(subtype_index), sig_index), kExprEnd}); - const byte kRefTestUnrelatedNonNullable = tester.DefineFunction( + {WASM_REF_TEST_DEPRECATED(WASM_REF_NULL(subtype_index), sig_index), + kExprEnd}); + const byte kRefTestUnrelatedNonNullableDeprecated = tester.DefineFunction( tester.sigs.i_v(), {}, - {WASM_REF_TEST(WASM_STRUCT_NEW_DEFAULT(type_index), sig_index), + {WASM_REF_TEST_DEPRECATED(WASM_STRUCT_NEW_DEFAULT(type_index), sig_index), kExprEnd}); const byte kRefCastNull = @@ -1333,12 +1332,12 @@ WASM_COMPILED_EXEC_TEST(RefTrivialCastsStatic) { tester.CompileModule(); - tester.CheckResult(kRefTestNull, 0); - tester.CheckResult(kRefTestUpcast, 1); - tester.CheckResult(kRefTestUpcastNull, 0); - tester.CheckResult(kRefTestUnrelatedNullable, 0); - tester.CheckResult(kRefTestUnrelatedNull, 0); - tester.CheckResult(kRefTestUnrelatedNonNullable, 0); + tester.CheckResult(kRefTestNullDeprecated, 0); + tester.CheckResult(kRefTestUpcastDeprecated, 1); + tester.CheckResult(kRefTestUpcastNullDeprecated, 0); + tester.CheckResult(kRefTestUnrelatedNullableDeprecated, 0); + tester.CheckResult(kRefTestUnrelatedNullDeprecated, 0); + tester.CheckResult(kRefTestUnrelatedNonNullableDeprecated, 0); tester.CheckResult(kRefCastNull, 1); tester.CheckResult(kRefCastUpcast, 0); @@ -1485,6 +1484,16 @@ WASM_COMPILED_EXEC_TEST(FunctionRefs) { const byte cast_reference = tester.DefineFunction( &sig_func, {}, {WASM_REF_FUNC(sig_index), kExprEnd}); + const byte test_deprecated = tester.DefineFunction( + tester.sigs.i_v(), {kWasmFuncRef}, + {WASM_LOCAL_SET(0, WASM_REF_FUNC(func_index)), + WASM_REF_TEST_DEPRECATED(WASM_LOCAL_GET(0), sig_index), kExprEnd}); + + const byte test_fail_deprecated = tester.DefineFunction( + tester.sigs.i_v(), {kWasmFuncRef}, + {WASM_LOCAL_SET(0, WASM_REF_FUNC(func_index)), + WASM_REF_TEST_DEPRECATED(WASM_LOCAL_GET(0), other_sig_index), kExprEnd}); + const byte test = tester.DefineFunction( tester.sigs.i_v(), {kWasmFuncRef}, {WASM_LOCAL_SET(0, WASM_REF_FUNC(func_index)), @@ -1513,6 +1522,8 @@ WASM_COMPILED_EXEC_TEST(FunctionRefs) { CHECK_EQ(cast_function->code().raw_instruction_start(), cast_function_reference->code().raw_instruction_start()); + tester.CheckResult(test_deprecated, 1); + tester.CheckResult(test_fail_deprecated, 0); tester.CheckResult(test, 1); tester.CheckResult(test_fail, 0); } diff --git a/deps/v8/test/cctest/wasm/test-run-wasm-bulk-memory.cc b/deps/v8/test/cctest/wasm/test-run-wasm-bulk-memory.cc index c38d81a049eb17..f8523b114ce47e 100644 --- a/deps/v8/test/cctest/wasm/test-run-wasm-bulk-memory.cc +++ b/deps/v8/test/cctest/wasm/test-run-wasm-bulk-memory.cc @@ -565,7 +565,7 @@ void TestTableCopyElems(TestExecutionTier execution_tier, int table_dst, WASM_LOCAL_GET(1), WASM_LOCAL_GET(2)), kExprI32Const, 0); - r.builder().FreezeSignatureMapAndInitializeWrapperCache(); + r.builder().InitializeWrapperCache(); auto table = handle(WasmTableObject::cast( @@ -714,7 +714,7 @@ void TestTableCopyOobWrites(TestExecutionTier execution_tier, int table_dst, WASM_LOCAL_GET(1), WASM_LOCAL_GET(2)), kExprI32Const, 0); - r.builder().FreezeSignatureMapAndInitializeWrapperCache(); + r.builder().InitializeWrapperCache(); auto table = handle(WasmTableObject::cast( diff --git a/deps/v8/test/cctest/wasm/test-run-wasm-exceptions.cc b/deps/v8/test/cctest/wasm/test-run-wasm-exceptions.cc index 282d5832398254..c125a427f82351 100644 --- a/deps/v8/test/cctest/wasm/test-run-wasm-exceptions.cc +++ b/deps/v8/test/cctest/wasm/test-run-wasm-exceptions.cc @@ -598,7 +598,7 @@ UNINITIALIZED_WASM_EXEC_TEST(TestStackOverflowNotCaught) { TestSignatures sigs; EXPERIMENTAL_FLAG_SCOPE(eh); // v8_flags.stack_size must be set before isolate initialization. - FlagScope<int32_t> stack_size(&v8::internal::v8_flags.stack_size, 8); + FlagScope<int32_t> stack_size(&v8_flags.stack_size, 8); IsolateScope isolate_scope; LocalContext context(isolate_scope.isolate()); diff --git a/deps/v8/test/cctest/wasm/test-run-wasm-interpreter.cc b/deps/v8/test/cctest/wasm/test-run-wasm-interpreter.cc index 4023f94b07267b..d1526ac2457676 100644 --- a/deps/v8/test/cctest/wasm/test-run-wasm-interpreter.cc +++ b/deps/v8/test/cctest/wasm/test-run-wasm-interpreter.cc @@ -157,7 +157,7 @@ static T factorial(T v) { TEST(Run_Wasm_returnCallFactorial) { EXPERIMENTAL_FLAG_SCOPE(return_call); // Run in bounded amount of stack - 8kb. - FlagScope<int32_t> stack_size(&v8::internal::v8_flags.stack_size, 8); + FlagScope<int32_t> stack_size(&v8_flags.stack_size, 8); WasmRunner<uint32_t, int32_t> r(TestExecutionTier::kInterpreter); diff --git a/deps/v8/test/cctest/wasm/test-run-wasm-js.cc b/deps/v8/test/cctest/wasm/test-run-wasm-js.cc index 253dc56397d906..2d51f0fe9dd87c 100644 --- a/deps/v8/test/cctest/wasm/test-run-wasm-js.cc +++ b/deps/v8/test/cctest/wasm/test-run-wasm-js.cc @@ -116,42 +116,6 @@ WASM_COMPILED_EXEC_TEST(Run_CallJS_Add_jswrapped) { r.CheckCallViaJS(-666666801, -666666900); } -WASM_COMPILED_EXEC_TEST(Run_IndirectCallJSFunction) { - Isolate* isolate = CcTest::InitIsolateOnce(); - HandleScope scope(isolate); - TestSignatures sigs; - - const char* source = "(function(a, b, c) { if(c) return a; return b; })"; - Handle<JSFunction> js_function = - Handle<JSFunction>::cast(v8::Utils::OpenHandle( - *v8::Local<v8::Function>::Cast(CompileRun(source)))); - - ManuallyImportedJSFunction import = {sigs.i_iii(), js_function}; - - WasmRunner<int32_t, int32_t> r(execution_tier, &import); - - const uint32_t js_index = 0; - const int32_t left = -2; - const int32_t right = 3; - - WasmFunctionCompiler& rc_fn = r.NewFunction(sigs.i_i(), "rc"); - - byte sig_index = r.builder().AddSignature(sigs.i_iii()); - uint16_t indirect_function_table[] = {static_cast<uint16_t>(js_index)}; - - r.builder().AddIndirectFunctionTable(indirect_function_table, - arraysize(indirect_function_table)); - - BUILD(rc_fn, WASM_CALL_INDIRECT(sig_index, WASM_I32V(left), WASM_I32V(right), - WASM_LOCAL_GET(0), WASM_I32V(js_index))); - - Handle<Object> args_left[] = {isolate->factory()->NewNumber(1)}; - r.CheckCallApplyViaJS(left, rc_fn.function_index(), args_left, 1); - - Handle<Object> args_right[] = {isolate->factory()->NewNumber(0)}; - r.CheckCallApplyViaJS(right, rc_fn.function_index(), args_right, 1); -} - void RunJSSelectTest(TestExecutionTier tier, int which) { const int kMaxParams = 8; PredictableInputValues inputs(0x100); @@ -561,10 +525,6 @@ WASM_COMPILED_EXEC_TEST(Run_ReturnCallImportedFunction) { RunPickerTest(execution_tier, false); } -WASM_COMPILED_EXEC_TEST(Run_ReturnCallIndirectImportedFunction) { - RunPickerTest(execution_tier, true); -} - } // namespace wasm } // namespace internal } // namespace v8 diff --git a/deps/v8/test/cctest/wasm/test-run-wasm.cc b/deps/v8/test/cctest/wasm/test-run-wasm.cc index 4173532584b4b6..9b967e239f1070 100644 --- a/deps/v8/test/cctest/wasm/test-run-wasm.cc +++ b/deps/v8/test/cctest/wasm/test-run-wasm.cc @@ -2514,7 +2514,7 @@ class IsolateScope { UNINITIALIZED_WASM_EXEC_TEST(ReturnCall_Factorial) { EXPERIMENTAL_FLAG_SCOPE(return_call); // Run in bounded amount of stack - 8kb. - FlagScope<int32_t> stack_size(&v8::internal::v8_flags.stack_size, 8); + FlagScope<int32_t> stack_size(&v8_flags.stack_size, 8); IsolateScope isolate_scope; LocalContext current(isolate_scope.isolate()); @@ -2552,7 +2552,7 @@ UNINITIALIZED_WASM_EXEC_TEST(ReturnCall_Factorial) { UNINITIALIZED_WASM_EXEC_TEST(ReturnCall_MutualFactorial) { EXPERIMENTAL_FLAG_SCOPE(return_call); // Run in bounded amount of stack - 8kb. - FlagScope<int32_t> stack_size(&v8::internal::v8_flags.stack_size, 8); + FlagScope<int32_t> stack_size(&v8_flags.stack_size, 8); IsolateScope isolate_scope; LocalContext current(isolate_scope.isolate()); @@ -2599,7 +2599,7 @@ UNINITIALIZED_WASM_EXEC_TEST(ReturnCall_MutualFactorial) { UNINITIALIZED_WASM_EXEC_TEST(ReturnCall_IndirectFactorial) { EXPERIMENTAL_FLAG_SCOPE(return_call); // Run in bounded amount of stack - 8kb. - FlagScope<int32_t> stack_size(&v8::internal::v8_flags.stack_size, 8); + FlagScope<int32_t> stack_size(&v8_flags.stack_size, 8); IsolateScope isolate_scope; LocalContext current(isolate_scope.isolate()); @@ -2648,7 +2648,7 @@ UNINITIALIZED_WASM_EXEC_TEST(ReturnCall_IndirectFactorial) { UNINITIALIZED_WASM_EXEC_TEST(ReturnCall_Sum) { EXPERIMENTAL_FLAG_SCOPE(return_call); // Run in bounded amount of stack - 8kb. - FlagScope<int32_t> stack_size(&v8::internal::v8_flags.stack_size, 8); + FlagScope<int32_t> stack_size(&v8_flags.stack_size, 8); IsolateScope isolate_scope; LocalContext current(isolate_scope.isolate()); @@ -2690,7 +2690,7 @@ UNINITIALIZED_WASM_EXEC_TEST(ReturnCall_Sum) { UNINITIALIZED_WASM_EXEC_TEST(ReturnCall_Bounce_Sum) { EXPERIMENTAL_FLAG_SCOPE(return_call); // Run in bounded amount of stack - 8kb. - FlagScope<int32_t> stack_size(&v8::internal::v8_flags.stack_size, 8); + FlagScope<int32_t> stack_size(&v8_flags.stack_size, 8); IsolateScope isolate_scope; LocalContext current(isolate_scope.isolate()); @@ -2755,7 +2755,7 @@ static void Run_WasmMixedCall_N(TestExecutionTier execution_tier, int start) { int num_params = static_cast<int>(arraysize(mixed)) - start; for (int which = 0; which < num_params; ++which) { - v8::internal::AccountingAllocator allocator; + AccountingAllocator allocator; Zone zone(&allocator, ZONE_NAME); WasmRunner<int32_t> r(execution_tier); r.builder().AddMemory(kWasmPageSize); diff --git a/deps/v8/test/cctest/wasm/test-streaming-compilation.cc b/deps/v8/test/cctest/wasm/test-streaming-compilation.cc index 32566c7fca580a..393a088adffcae 100644 --- a/deps/v8/test/cctest/wasm/test-streaming-compilation.cc +++ b/deps/v8/test/cctest/wasm/test-streaming-compilation.cc @@ -1253,9 +1253,8 @@ STREAM_TEST(TestIncrementalCaching) { Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate); Handle<Script> script = GetWasmEngine()->GetOrCreateScript( i_isolate, tester.shared_native_module(), kNoSourceUrl); - Handle<FixedArray> export_wrappers = i_isolate->factory()->NewFixedArray(3); - Handle<WasmModuleObject> module_object = WasmModuleObject::New( - i_isolate, tester.shared_native_module(), script, export_wrappers); + Handle<WasmModuleObject> module_object = + WasmModuleObject::New(i_isolate, tester.shared_native_module(), script); ErrorThrower thrower(i_isolate, "Instantiation"); // We instantiated before, so the second instantiation must also succeed: Handle<WasmInstanceObject> instance = diff --git a/deps/v8/test/cctest/wasm/test-wasm-import-wrapper-cache.cc b/deps/v8/test/cctest/wasm/test-wasm-import-wrapper-cache.cc index b8a22392a8468a..9e11ab13e02ef7 100644 --- a/deps/v8/test/cctest/wasm/test-wasm-import-wrapper-cache.cc +++ b/deps/v8/test/cctest/wasm/test-wasm-import-wrapper-cache.cc @@ -37,16 +37,19 @@ TEST(CacheHit) { auto kind = compiler::WasmImportCallKind::kJSFunctionArityMatch; auto sig = sigs.i_i(); + uint32_t canonical_type_index = + GetTypeCanonicalizer()->AddRecursiveGroup(sig); int expected_arity = static_cast<int>(sig->parameter_count()); - WasmCode* c1 = - CompileImportWrapper(module.get(), isolate->counters(), kind, sig, - expected_arity, kNoSuspend, &cache_scope); + WasmCode* c1 = CompileImportWrapper(module.get(), isolate->counters(), kind, + sig, canonical_type_index, expected_arity, + kNoSuspend, &cache_scope); CHECK_NOT_NULL(c1); CHECK_EQ(WasmCode::Kind::kWasmToJsWrapper, c1->kind()); - WasmCode* c2 = cache_scope[{kind, sig, expected_arity, kNoSuspend}]; + WasmCode* c2 = + cache_scope[{kind, canonical_type_index, expected_arity, kNoSuspend}]; CHECK_NOT_NULL(c2); CHECK_EQ(c1, c2); @@ -63,17 +66,22 @@ TEST(CacheMissSig) { auto kind = compiler::WasmImportCallKind::kJSFunctionArityMatch; auto sig1 = sigs.i_i(); int expected_arity1 = static_cast<int>(sig1->parameter_count()); + uint32_t canonical_type_index1 = + GetTypeCanonicalizer()->AddRecursiveGroup(sig1); auto sig2 = sigs.i_ii(); int expected_arity2 = static_cast<int>(sig2->parameter_count()); + uint32_t canonical_type_index2 = + GetTypeCanonicalizer()->AddRecursiveGroup(sig2); - WasmCode* c1 = - CompileImportWrapper(module.get(), isolate->counters(), kind, sig1, - expected_arity1, kNoSuspend, &cache_scope); + WasmCode* c1 = CompileImportWrapper( + module.get(), isolate->counters(), kind, sig1, canonical_type_index1, + expected_arity1, kNoSuspend, &cache_scope); CHECK_NOT_NULL(c1); CHECK_EQ(WasmCode::Kind::kWasmToJsWrapper, c1->kind()); - WasmCode* c2 = cache_scope[{kind, sig2, expected_arity2, kNoSuspend}]; + WasmCode* c2 = + cache_scope[{kind, canonical_type_index2, expected_arity2, kNoSuspend}]; CHECK_NULL(c2); } @@ -90,15 +98,18 @@ TEST(CacheMissKind) { auto kind2 = compiler::WasmImportCallKind::kJSFunctionArityMismatch; auto sig = sigs.i_i(); int expected_arity = static_cast<int>(sig->parameter_count()); + uint32_t canonical_type_index = + GetTypeCanonicalizer()->AddRecursiveGroup(sig); - WasmCode* c1 = - CompileImportWrapper(module.get(), isolate->counters(), kind1, sig, - expected_arity, kNoSuspend, &cache_scope); + WasmCode* c1 = CompileImportWrapper(module.get(), isolate->counters(), kind1, + sig, canonical_type_index, expected_arity, + kNoSuspend, &cache_scope); CHECK_NOT_NULL(c1); CHECK_EQ(WasmCode::Kind::kWasmToJsWrapper, c1->kind()); - WasmCode* c2 = cache_scope[{kind2, sig, expected_arity, kNoSuspend}]; + WasmCode* c2 = + cache_scope[{kind2, canonical_type_index, expected_arity, kNoSuspend}]; CHECK_NULL(c2); } @@ -114,31 +125,39 @@ TEST(CacheHitMissSig) { auto kind = compiler::WasmImportCallKind::kJSFunctionArityMatch; auto sig1 = sigs.i_i(); int expected_arity1 = static_cast<int>(sig1->parameter_count()); + uint32_t canonical_type_index1 = + GetTypeCanonicalizer()->AddRecursiveGroup(sig1); auto sig2 = sigs.i_ii(); int expected_arity2 = static_cast<int>(sig2->parameter_count()); + uint32_t canonical_type_index2 = + GetTypeCanonicalizer()->AddRecursiveGroup(sig2); - WasmCode* c1 = - CompileImportWrapper(module.get(), isolate->counters(), kind, sig1, - expected_arity1, kNoSuspend, &cache_scope); + WasmCode* c1 = CompileImportWrapper( + module.get(), isolate->counters(), kind, sig1, canonical_type_index1, + expected_arity1, kNoSuspend, &cache_scope); CHECK_NOT_NULL(c1); CHECK_EQ(WasmCode::Kind::kWasmToJsWrapper, c1->kind()); - WasmCode* c2 = cache_scope[{kind, sig2, expected_arity2, kNoSuspend}]; + WasmCode* c2 = + cache_scope[{kind, canonical_type_index2, expected_arity2, kNoSuspend}]; CHECK_NULL(c2); c2 = CompileImportWrapper(module.get(), isolate->counters(), kind, sig2, - expected_arity2, kNoSuspend, &cache_scope); + canonical_type_index2, expected_arity2, kNoSuspend, + &cache_scope); CHECK_NE(c1, c2); - WasmCode* c3 = cache_scope[{kind, sig1, expected_arity1, kNoSuspend}]; + WasmCode* c3 = + cache_scope[{kind, canonical_type_index1, expected_arity1, kNoSuspend}]; CHECK_NOT_NULL(c3); CHECK_EQ(c1, c3); - WasmCode* c4 = cache_scope[{kind, sig2, expected_arity2, kNoSuspend}]; + WasmCode* c4 = + cache_scope[{kind, canonical_type_index2, expected_arity2, kNoSuspend}]; CHECK_NOT_NULL(c4); CHECK_EQ(c2, c4); diff --git a/deps/v8/test/cctest/wasm/wasm-run-utils.cc b/deps/v8/test/cctest/wasm/wasm-run-utils.cc index d7e05141b1fe65..a2044abbe7a12c 100644 --- a/deps/v8/test/cctest/wasm/wasm-run-utils.cc +++ b/deps/v8/test/cctest/wasm/wasm-run-utils.cc @@ -82,14 +82,17 @@ TestingModuleBuilder::TestingModuleBuilder( Handle<JSReceiver> callable = resolved.callable; WasmImportWrapperCache::ModificationScope cache_scope( native_module_->import_wrapper_cache()); + uint32_t canonical_type_index = + GetTypeCanonicalizer()->AddRecursiveGroup(maybe_import->sig); WasmImportWrapperCache::CacheKey key( - kind, maybe_import->sig, + kind, canonical_type_index, static_cast<int>(maybe_import->sig->parameter_count()), kNoSuspend); auto import_wrapper = cache_scope[key]; if (import_wrapper == nullptr) { CodeSpaceWriteScope write_scope(native_module_); import_wrapper = CompileImportWrapper( native_module_, isolate_->counters(), kind, maybe_import->sig, + canonical_type_index, static_cast<int>(maybe_import->sig->parameter_count()), kNoSuspend, &cache_scope); } @@ -155,7 +158,6 @@ uint32_t TestingModuleBuilder::AddFunction(const FunctionSig* sig, index, // func_index 0, // sig_index {0, 0}, // code - 0, // feedback slots false, // imported false, // exported false}); // declared @@ -182,18 +184,14 @@ uint32_t TestingModuleBuilder::AddFunction(const FunctionSig* sig, return index; } -void TestingModuleBuilder::FreezeSignatureMapAndInitializeWrapperCache() { - if (test_module_->signature_map.is_frozen()) return; - test_module_->signature_map.Freeze(); - size_t max_num_sigs = MaxNumExportWrappers(test_module_.get()); - Handle<FixedArray> export_wrappers = - isolate_->factory()->NewFixedArray(static_cast<int>(max_num_sigs)); - instance_object_->module_object().set_export_wrappers(*export_wrappers); +void TestingModuleBuilder::InitializeWrapperCache() { + isolate_->heap()->EnsureWasmCanonicalRttsSize( + test_module_->MaxCanonicalTypeIndex() + 1); } Handle<JSFunction> TestingModuleBuilder::WrapCode(uint32_t index) { CHECK(!interpreter_); - FreezeSignatureMapAndInitializeWrapperCache(); + InitializeWrapperCache(); return handle( JSFunction::cast(WasmInstanceObject::GetOrCreateWasmInternalFunction( isolate_, instance_object(), index) @@ -241,10 +239,7 @@ void TestingModuleBuilder::AddIndirectFunctionTable( for (uint32_t i = 0; i < table_size; ++i) { WasmFunction& function = test_module_->functions[function_indexes[i]]; int sig_id = - v8_flags.wasm_type_canonicalization - ? test_module_ - ->isorecursive_canonical_type_ids[function.sig_index] - : test_module_->signature_map.Find(*function.sig); + test_module_->isorecursive_canonical_type_ids[function.sig_index]; FunctionTargetAndRef entry(instance, function.func_index); instance->GetIndirectFunctionTable(isolate_, table_index) ->Set(i, sig_id, entry.call_target(), *entry.ref()); diff --git a/deps/v8/test/cctest/wasm/wasm-run-utils.h b/deps/v8/test/cctest/wasm/wasm-run-utils.h index 0ce398339c6b1b..fc968701578b7b 100644 --- a/deps/v8/test/cctest/wasm/wasm-run-utils.h +++ b/deps/v8/test/cctest/wasm/wasm-run-utils.h @@ -135,14 +135,10 @@ class TestingModuleBuilder { } byte AddSignature(const FunctionSig* sig) { - DCHECK_EQ(test_module_->types.size(), - test_module_->per_module_canonical_type_ids.size()); test_module_->add_signature(sig, kNoSuperType); - if (v8_flags.wasm_type_canonicalization) { - GetTypeCanonicalizer()->AddRecursiveGroup(test_module_.get(), 1); - instance_object_->set_isorecursive_canonical_types( - test_module_->isorecursive_canonical_type_ids.data()); - } + GetTypeCanonicalizer()->AddRecursiveGroup(test_module_.get(), 1); + instance_object_->set_isorecursive_canonical_types( + test_module_->isorecursive_canonical_type_ids.data()); size_t size = test_module_->types.size(); CHECK_GT(127, size); return static_cast<byte>(size - 1); @@ -213,7 +209,7 @@ class TestingModuleBuilder { // Freezes the signature map of the module and allocates the storage for // export wrappers. - void FreezeSignatureMapAndInitializeWrapperCache(); + void InitializeWrapperCache(); // Wrap the code so it can be called as a JS function. Handle<JSFunction> WrapCode(uint32_t index); diff --git a/deps/v8/test/common/wasm/wasm-interpreter.cc b/deps/v8/test/common/wasm/wasm-interpreter.cc index 468bfe1e4f1cf1..bb34b53b43d3f5 100644 --- a/deps/v8/test/common/wasm/wasm-interpreter.cc +++ b/deps/v8/test/common/wasm/wasm-interpreter.cc @@ -20,7 +20,6 @@ #include "src/utils/utils.h" #include "src/wasm/decoder.h" #include "src/wasm/function-body-decoder-impl.h" -#include "src/wasm/function-body-decoder.h" #include "src/wasm/memory-tracing.h" #include "src/wasm/module-compiler.h" #include "src/wasm/wasm-arguments.h" @@ -750,7 +749,7 @@ class SideTable : public ZoneObject { max_exception_arity, static_cast<int>(tag.sig->parameter_count())); } } - for (BytecodeIterator i(code->start, code->end, &code->locals); + for (BytecodeIterator i(code->start, code->end, &code->locals, zone); i.has_next(); i.next()) { WasmOpcode opcode = i.current(); int32_t exceptional_stack_height = 0; @@ -1120,8 +1119,8 @@ class CodeMap { void AddFunction(const WasmFunction* function, const byte* code_start, const byte* code_end) { - InterpreterCode code = {function, BodyLocalDecls(zone_), code_start, - code_end, nullptr}; + InterpreterCode code = {function, BodyLocalDecls{}, code_start, code_end, + nullptr}; DCHECK_EQ(interpreter_code_.size(), function->func_index); interpreter_code_.push_back(code); @@ -1335,7 +1334,7 @@ class WasmInterpreterInternals { // Limit of parameters. sp_t plimit() { return sp + code->function->sig->parameter_count(); } // Limit of locals. - sp_t llimit() { return plimit() + code->locals.type_list.size(); } + sp_t llimit() { return plimit() + code->locals.num_locals; } Handle<FixedArray> caught_exception_stack; }; @@ -1408,7 +1407,7 @@ class WasmInterpreterInternals { // Check if there is room for a function's activation. void EnsureStackSpaceForCall(InterpreterCode* code) { EnsureStackSpace(code->side_table->max_stack_height_ + - code->locals.type_list.size()); + code->locals.num_locals); DCHECK_GE(StackHeight(), code->function->sig->parameter_count()); } @@ -1431,7 +1430,8 @@ class WasmInterpreterInternals { } pc_t InitLocals(InterpreterCode* code) { - for (ValueType p : code->locals.type_list) { + for (ValueType p : + base::VectorOf(code->locals.local_types, code->locals.num_locals)) { WasmValue val; switch (p.kind()) { #define CASE_TYPE(valuetype, ctype) \ @@ -3314,8 +3314,7 @@ class WasmInterpreterInternals { DCHECK(!frames_.empty()); // There must be enough space on the stack to hold the arguments, locals, // and the value stack. - DCHECK_LE(code->function->sig->parameter_count() + - code->locals.type_list.size() + + DCHECK_LE(code->function->sig->parameter_count() + code->locals.num_locals + code->side_table->max_stack_height_, stack_limit_ - stack_.get() - frames_.back().sp); // Seal the surrounding {HandleScope} to ensure that all cases within the @@ -3668,8 +3667,7 @@ class WasmInterpreterInternals { FOREACH_WASMVALUE_CTYPES(CASE_TYPE) #undef CASE_TYPE case kRef: - case kRefNull: - case kRtt: { + case kRefNull: { // TODO(7748): Type checks or DCHECKs for ref types? HandleScope handle_scope(isolate_); // Avoid leaking handles. Handle<FixedArray> global_buffer; // The buffer of the global. @@ -3681,6 +3679,7 @@ class WasmInterpreterInternals { global_buffer->set(global_index, *ref); break; } + case kRtt: case kI8: case kI16: case kVoid: @@ -4103,13 +4102,7 @@ class WasmInterpreterInternals { uint32_t sig_index) { HandleScope handle_scope(isolate_); // Avoid leaking handles. uint32_t expected_sig_id; - if (v8_flags.wasm_type_canonicalization) { - expected_sig_id = module()->isorecursive_canonical_type_ids[sig_index]; - } else { - expected_sig_id = module()->per_module_canonical_type_ids[sig_index]; - DCHECK_EQ(static_cast<int>(expected_sig_id), - module()->signature_map.Find(*module()->signature(sig_index))); - } + expected_sig_id = module()->isorecursive_canonical_type_ids[sig_index]; Handle<WasmIndirectFunctionTable> table = instance_object_->GetIndirectFunctionTable(isolate_, table_index); @@ -4246,11 +4239,10 @@ ControlTransferMap WasmInterpreter::ComputeControlTransfersForTesting( 0, // func_index 0, // sig_index {0, 0}, // code - 0, // feedback slots false, // imported false, // exported false}; // declared - InterpreterCode code{&function, BodyLocalDecls(zone), start, end, nullptr}; + InterpreterCode code{&function, BodyLocalDecls{}, start, end, nullptr}; // Now compute and return the control transfers. SideTable side_table(zone, module, &code); diff --git a/deps/v8/test/common/wasm/wasm-macro-gen.h b/deps/v8/test/common/wasm/wasm-macro-gen.h index f2ed1fd4f9030b..d7f825d11e5760 100644 --- a/deps/v8/test/common/wasm/wasm-macro-gen.h +++ b/deps/v8/test/common/wasm/wasm-macro-gen.h @@ -521,6 +521,8 @@ inline uint16_t ExtractPrefixedOpcodeBytes(WasmOpcode opcode) { #define WASM_REF_IS_NULL(val) val, kExprRefIsNull #define WASM_REF_AS_NON_NULL(val) val, kExprRefAsNonNull #define WASM_REF_EQ(lhs, rhs) lhs, rhs, kExprRefEq +#define WASM_REF_TEST_DEPRECATED(ref, typeidx) \ + ref, WASM_GC_OP(kExprRefTestDeprecated), static_cast<byte>(typeidx) #define WASM_REF_TEST(ref, typeidx) \ ref, WASM_GC_OP(kExprRefTest), static_cast<byte>(typeidx) #define WASM_REF_CAST(ref, typeidx) \ diff --git a/deps/v8/test/debugger/debug/debug-evaluate-shadowed-context-2.js b/deps/v8/test/debugger/debug/debug-evaluate-shadowed-context-2.js deleted file mode 100644 index 5edd03ca582eeb..00000000000000 --- a/deps/v8/test/debugger/debug/debug-evaluate-shadowed-context-2.js +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright 2016 the V8 project authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - - -// Test that debug-evaluate correctly collects free outer variables -// and does not get confused by variables in nested scopes. - -Debug = debug.Debug - -var exception = null; -function listener(event, exec_state, event_data, data) { - if (event != Debug.DebugEvent.Break) return; - try { - assertThrows(() => exec_state.frame(0).evaluate("x").value()); - } catch (e) { - exception = e; - print(e + e.stack); - } -} - -Debug.setListener(listener); - -(function() { - var x = 1; // context allocate x - (() => x); - (function() { - var x = 2; // stack allocate shadowing x - (function() { - { // context allocate x in a nested scope - let x = 3; - (() => x); - } - debugger; - })(); - })(); -})(); - -Debug.setListener(null); -assertNull(exception); diff --git a/deps/v8/test/debugger/debug/debug-evaluate-shadowed-context-3.js b/deps/v8/test/debugger/debug/debug-evaluate-shadowed-context-3.js deleted file mode 100644 index 2a41109565cae7..00000000000000 --- a/deps/v8/test/debugger/debug/debug-evaluate-shadowed-context-3.js +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright 2019 the V8 project authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -// Test that debug-evaluate properly shadows stack-allocated variables. - -Debug = debug.Debug - -let exception = null; -function listener(event, exec_state, event_data, data) { - if (event != Debug.DebugEvent.Break) return; - try { - assertEquals(2, exec_state.frame(0).evaluate("b").value()); - assertEquals(3, exec_state.frame(0).evaluate("c").value()) - assertThrows(() => exec_state.frame(0).evaluate("a").value()); - } catch (e) { - exception = e; - print(e + e.stack); - } -} - -Debug.setListener(listener); - -(function f() { - let a = 1; - let b = 2; - let c = 3; - () => a + c; // a and c are context-allocated - return function g() { - let a = 2; // a is stack-allocated - return function h() { - b; // b is allocated onto f's context. - debugger; - } - } -})()()(); - -Debug.setListener(null); -assertNull(exception); diff --git a/deps/v8/test/debugger/debug/debug-evaluate-shadowed-context-4.js b/deps/v8/test/debugger/debug/debug-evaluate-shadowed-context-4.js deleted file mode 100644 index 52f0ac5c456ef5..00000000000000 --- a/deps/v8/test/debugger/debug/debug-evaluate-shadowed-context-4.js +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright 2022 the V8 project authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -// Test that debug-evaluate properly shadows stack-allocated variables. - -Debug = debug.Debug - -let exception = null; -function listener(event, exec_state, event_data, data) { - if (event != Debug.DebugEvent.Break) return; - try { - assertThrows(() => exec_state.frame(0).evaluate("a").value()); - } catch (e) { - exception = e; - print(e + e.stack); - } -} - -Debug.setListener(listener); - -(function f() { - let a = 1; - () => a; // a is context-allocated - return function g() { - let a = 2; // a is stack-allocated - { - let b = 3; - return function h() { - debugger; - } - } - } -})()()(); - -Debug.setListener(null); -assertNull(exception); diff --git a/deps/v8/test/debugger/debug/debug-evaluate-shadowed-context-reuse.js b/deps/v8/test/debugger/debug/debug-evaluate-shadowed-context-reuse.js new file mode 100644 index 00000000000000..afd74a520e0070 --- /dev/null +++ b/deps/v8/test/debugger/debug/debug-evaluate-shadowed-context-reuse.js @@ -0,0 +1,248 @@ +// Copyright 2015 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --no-analyze-environment-liveness --experimental-reuse-locals-blocklists + +// Test that debug-evaluate only resolves variables that are used by +// the function inside which we debug-evaluate. This is to avoid +// incorrect variable resolution when a context-allocated variable is +// shadowed by a stack-allocated variable. +// +// This test is an exact copy of `debug-evaluate-shadowed-context` modulo an +// experimental flag. While the feature is in developement, we want to test both +// configurations without having to introduce a separate bot. + +Debug = debug.Debug + +let test_name; +let listener_delegate; +let listener_called; +let exception; +let begin_test_count = 0; +let end_test_count = 0; +let break_count = 0; + +// Debug event listener which delegates. +function listener(event, exec_state, event_data, data) { + try { + if (event == Debug.DebugEvent.Break) { + break_count++; + listener_called = true; + listener_delegate(exec_state); + } + } catch (e) { + exception = e; + print(e, e.stack); + } +} +Debug.setListener(listener); + +function BeginTest(name) { + test_name = name; + listener_called = false; + exception = null; + begin_test_count++; +} + +function EndTest() { + assertTrue(listener_called, "listener not called for " + test_name); + assertNull(exception, test_name + " / " + exception); + end_test_count++; +} + +BeginTest("Check that 'x' resolves correctly and 'a' is written correctly"); +var a = "a"; +function f1() { + var x = 1; // context allocate x + (() => x); + var y = "y"; + var z = "z"; + (function () { + var x = 2; // stack allocate shadowing x + (function () { + y; // access y + debugger; // ReferenceError + })(); // 2 + })(); // 1 + return y; +} + +listener_delegate = function(exec_state) { + for (var i = 0; i < exec_state.frameCount() - 1; i++) { + var frame = exec_state.frame(i); + var value; + try { + value = frame.evaluate("x").value(); + } catch (e) { + value = e.name; + } + print(frame.sourceLineText()); + var expected = frame.sourceLineText().match(/\/\/ (.*$)/)[1]; + assertEquals(String(expected), String(value)); + } + assertEquals("[object global]", + String(exec_state.frame(0).evaluate("this").value())); + assertEquals("y", exec_state.frame(0).evaluate("y").value()); + assertEquals("a", exec_state.frame(0).evaluate("a").value()); + exec_state.frame(0).evaluate("a = 'A'"); + assertThrows(() => exec_state.frame(0).evaluate("z"), ReferenceError); +} +f1(); +assertEquals("A", a); +a = "a"; +EndTest(); + +BeginTest("Check that a context-allocated 'this' works") +function f2() { + var x = 1; // context allocate x + (() => x); + var y = "y"; + var z = "z"; + (function() { + var x = 2; // stack allocate shadowing x + (() => { + y; + a; + this; // context allocate receiver + debugger; // ReferenceError + })(); // 2 + })(); // 1 + return y; +}; + +// Uses the same listener delgate as for `f1`. +f2(); +assertEquals("A", a); +EndTest(); + +BeginTest("Check that we don't get confused with nested scopes"); +function f3() { + var x = 1; // context allocate x + (() => x); + (function() { + var x = 2; // stack allocate shadowing x + (function() { + { // context allocate x in a nested scope + let x = 3; + (() => x); + } + debugger; + })(); + })(); +} + +listener_delegate = function(exec_state) { + assertThrows(() => exec_state.frame(0).evaluate("x").value()); +} +f3(); +EndTest(); + +BeginTest("Check that stack-allocated variable is unavailable"); +function f4() { + let a = 1; + let b = 2; + let c = 3; + () => a + c; // a and c are context-allocated + return function g() { + let a = 2; // a is stack-allocated + return function h() { + b; // b is allocated onto f's context. + debugger; + } + } +} + +listener_delegate = function(exec_state) { + assertEquals(2, exec_state.frame(0).evaluate("b").value()); + assertEquals(3, exec_state.frame(0).evaluate("c").value()) + assertThrows(() => exec_state.frame(0).evaluate("a").value()); +}; +(f4())()(); +EndTest(); + +BeginTest("Check that block lists on the closure boundary work as expected"); +function f5() { + let a = 1; + () => a; // a is context-allocated + return function g() { + let a = 2; // a is stack-allocated + { + let b = 3; + return function h() { + debugger; + } + } + } +} + +listener_delegate = function(exec_state) { + assertThrows(() => exec_state.frame(0).evaluate("a").value()); +}; +(f5())()(); +EndTest(); + +BeginTest("Check that outer functions also get the correct block list calculated"); +// This test is important once we re-use block list info. The block list for `g` +// needs to be correctly calculated already when we stop on break_position 1. + +let break_position; +function f6() { + let a = 1; // stack-allocated + return function g() { // g itself doesn't require a context. + if (break_position === 2) debugger; + let a = 2; (() => a); // context-allocated + return function h() { + if (break_position === 1) debugger; + } + } +} + +listener_delegate = function (exec_state) { + assertEquals(2, exec_state.frame(0).evaluate("a").value()); +} +break_position = 1; +(f6())()(); +EndTest(); + +BeginTest("Check that outer functions also get the correct block list calculated (continued)"); +listener_delegate = function (exec_state) { + assertThrows(() => exec_state.frame(0).evaluate("a").value()); +} +break_position = 2; +(f6())()(); +EndTest(); + +BeginTest("Check that 'inner' block lists, calculated on a previous pause, don't block the lookup"); + +function f7(o) { + let a = 1; // stack-allocated. + with (o) { // create a with-scope whos block-list has 'a' in it. + if (break_position === 2) debugger; + (function g() { + if (break_position === 1) debugger; // Trigger block-list calculation for the with-scope. + })(); + } +} + +listener_delegate = function (exec_state) { + assertThrows(() => exec_state.frame(0).evaluate("a").value()); +} +break_position = 1; +f7({}); +EndTest(); + +BeginTest("Check that 'inner' block lists, calculated on a previous pause, don't block the lookup (continued)"); +// The second time we pause the with-scope already has a block-list, but 'a' should be accessible as a +// materialized stack-local. +listener_delegate = function (exec_state) { + assertEquals(1, exec_state.frame(0).evaluate("a").value()); +} +break_position = 2; +f7({}); +EndTest(); + +assertEquals(begin_test_count, break_count, + 'one or more tests did not enter the debugger'); +assertEquals(begin_test_count, end_test_count, + 'one or more tests did not have its result checked'); diff --git a/deps/v8/test/debugger/debug/debug-evaluate-shadowed-context.js b/deps/v8/test/debugger/debug/debug-evaluate-shadowed-context.js index 564bdc6fc34a0a..b91174b0688eb5 100644 --- a/deps/v8/test/debugger/debug/debug-evaluate-shadowed-context.js +++ b/deps/v8/test/debugger/debug/debug-evaluate-shadowed-context.js @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -// Flags: --no-analyze-environment-liveness +// Flags: --no-analyze-environment-liveness --no-experimental-reuse-locals-blocklists // Test that debug-evaluate only resolves variables that are used by // the function inside which we debug-evaluate. This is to avoid @@ -11,56 +11,86 @@ Debug = debug.Debug -var exception = null; +let test_name; +let listener_delegate; +let listener_called; +let exception; +let begin_test_count = 0; +let end_test_count = 0; +let break_count = 0; + +// Debug event listener which delegates. function listener(event, exec_state, event_data, data) { - if (event != Debug.DebugEvent.Break) return; try { - for (var i = 0; i < exec_state.frameCount() - 1; i++) { - var frame = exec_state.frame(i); - var value; - try { - value = frame.evaluate("x").value(); - } catch (e) { - value = e.name; - } - print(frame.sourceLineText()); - var expected = frame.sourceLineText().match(/\/\/ (.*$)/)[1]; - assertEquals(String(expected), String(value)); + if (event == Debug.DebugEvent.Break) { + break_count++; + listener_called = true; + listener_delegate(exec_state); } - assertEquals("[object global]", - String(exec_state.frame(0).evaluate("this").value())); - assertEquals("y", exec_state.frame(0).evaluate("y").value()); - assertEquals("a", exec_state.frame(0).evaluate("a").value()); - exec_state.frame(0).evaluate("a = 'A'"); - assertThrows(() => exec_state.frame(0).evaluate("z"), ReferenceError); } catch (e) { exception = e; - print(e + e.stack); + print(e, e.stack); } } - Debug.setListener(listener); +function BeginTest(name) { + test_name = name; + listener_called = false; + exception = null; + begin_test_count++; +} + +function EndTest() { + assertTrue(listener_called, "listener not called for " + test_name); + assertNull(exception, test_name + " / " + exception); + end_test_count++; +} + +BeginTest("Check that 'x' resolves correctly and 'a' is written correctly"); var a = "a"; -(function() { +function f1() { var x = 1; // context allocate x (() => x); var y = "y"; var z = "z"; - (function() { + (function () { var x = 2; // stack allocate shadowing x - (function() { + (function () { y; // access y debugger; // ReferenceError })(); // 2 })(); // 1 return y; -})(); +} +listener_delegate = function(exec_state) { + for (var i = 0; i < exec_state.frameCount() - 1; i++) { + var frame = exec_state.frame(i); + var value; + try { + value = frame.evaluate("x").value(); + } catch (e) { + value = e.name; + } + print(frame.sourceLineText()); + var expected = frame.sourceLineText().match(/\/\/ (.*$)/)[1]; + assertEquals(String(expected), String(value)); + } + assertEquals("[object global]", + String(exec_state.frame(0).evaluate("this").value())); + assertEquals("y", exec_state.frame(0).evaluate("y").value()); + assertEquals("a", exec_state.frame(0).evaluate("a").value()); + exec_state.frame(0).evaluate("a = 'A'"); + assertThrows(() => exec_state.frame(0).evaluate("z"), ReferenceError); +} +f1(); assertEquals("A", a); a = "a"; +EndTest(); -(function() { +BeginTest("Check that a context-allocated 'this' works") +function f2() { var x = 1; // context allocate x (() => x); var y = "y"; @@ -75,9 +105,111 @@ a = "a"; })(); // 2 })(); // 1 return y; -})(); +}; +// Uses the same listener delgate as for `f1`. +f2(); assertEquals("A", a); +EndTest(); + +BeginTest("Check that we don't get confused with nested scopes"); +function f3() { + var x = 1; // context allocate x + (() => x); + (function() { + var x = 2; // stack allocate shadowing x + (function() { + { // context allocate x in a nested scope + let x = 3; + (() => x); + } + debugger; + })(); + })(); +} + +listener_delegate = function(exec_state) { + assertThrows(() => exec_state.frame(0).evaluate("x").value()); +} +f3(); +EndTest(); + +BeginTest("Check that stack-allocated variable is unavailable"); +function f4() { + let a = 1; + let b = 2; + let c = 3; + () => a + c; // a and c are context-allocated + return function g() { + let a = 2; // a is stack-allocated + return function h() { + b; // b is allocated onto f's context. + debugger; + } + } +} + +listener_delegate = function(exec_state) { + assertEquals(2, exec_state.frame(0).evaluate("b").value()); + assertEquals(3, exec_state.frame(0).evaluate("c").value()) + assertThrows(() => exec_state.frame(0).evaluate("a").value()); +}; +(f4())()(); +EndTest(); + +BeginTest("Check that block lists on the closure boundary work as expected"); +function f5() { + let a = 1; + () => a; // a is context-allocated + return function g() { + let a = 2; // a is stack-allocated + { + let b = 3; + return function h() { + debugger; + } + } + } +} + +listener_delegate = function(exec_state) { + assertThrows(() => exec_state.frame(0).evaluate("a").value()); +}; +(f5())()(); +EndTest(); + +BeginTest("Check that outer functions also get the correct block list calculated"); +// This test is important once we re-use block list info. The block list for `g` +// needs to be correctly calculated already when we stop on break_position 1. + +let break_position; +function f6() { + let a = 1; // stack-allocated + return function g() { // g itself doesn't require a context. + if (break_position === 2) debugger; + let a = 2; (() => a); // context-allocated + return function h() { + if (break_position === 1) debugger; + } + } +} + +listener_delegate = function (exec_state) { + assertEquals(2, exec_state.frame(0).evaluate("a").value()); +} +break_position = 1; +(f6())()(); +EndTest(); + +BeginTest("Check that outer functions also get the correct block list calculated (continued)"); +listener_delegate = function (exec_state) { + assertThrows(() => exec_state.frame(0).evaluate("a").value()); +} +break_position = 2; +(f6())()(); +EndTest(); -Debug.setListener(null); -assertNull(exception); +assertEquals(begin_test_count, break_count, + 'one or more tests did not enter the debugger'); +assertEquals(begin_test_count, end_test_count, + 'one or more tests did not have its result checked'); diff --git a/deps/v8/test/debugger/debug/es6/debug-step-destructuring-assignment.js b/deps/v8/test/debugger/debug/es6/debug-step-destructuring-assignment.js deleted file mode 100644 index e6412abbf7303c..00000000000000 --- a/deps/v8/test/debugger/debug/es6/debug-step-destructuring-assignment.js +++ /dev/null @@ -1,85 +0,0 @@ -// Copyright 2015 the V8 project authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - - -var exception = null; -var Debug = debug.Debug; -var break_count = 0; - -function listener(event, exec_state, event_data, data) { - if (event != Debug.DebugEvent.Break) return; - try { - var source = exec_state.frame(0).sourceLineText(); - print(source); - assertTrue(source.indexOf(`// B${break_count++}`) > 0); - if (source.indexOf("assertEquals") > 0) { - exec_state.prepareStep(Debug.StepAction.StepOver); - } else { - exec_state.prepareStep(Debug.StepAction.StepInto); - } - } catch (e) { - exception = e; - print(e); - } -}; - -Debug.setListener(listener); - -function f() { - var a, b, c, d; - debugger; // B0 - [ // B1 - a, // B2 - b, // B3 - c = 3 // B4 - ] = [1, 2]; - assertEquals({a:1,b:2,c:3}, {a, b, c}); // B5 - - [ // B6 - a, // B7 - [ - b, // B8 - c // B9 - ], - d // B10 - ] = [5, [6, 7], 8]; - assertEquals({a:5,b:6,c:7,d:8}, {a, b, c, d}); // B11 - - [ // B12 - a, // B13 - b, // B14 - ...c // B15 - ] = [1, 2, 3, 4]; - assertEquals({a:1,b:2,c:[3,4]}, {a, b, c}); // B16 - - ({ // B17 - a, // B18 - b, // B19 - c = 7 // B20 - } = {a: 5, b: 6}); - assertEquals({a:5,b:6,c:7}, {a, b, c}); // B21 - - ({ // B22 - a, // B23 - b = return1(), // B24 - c = return1() // B25 - } = {a: 5, b: 6}); - assertEquals({a:5,b:6,c:1}, {a, b, c}); // B28 - - ({ // B29 - x : a, // B30 - y : b, // B31 - z : c = 3 // B32 - } = {x: 1, y: 2}); - assertEquals({a:1,b:2,c:3}, {a, b, c}); // B33 -} // B34 - -function return1() { - return 1 // B26 - ; // B27 -} - -f(); -Debug.setListener(null); // B35 -assertNull(exception); diff --git a/deps/v8/test/debugger/debug/es6/debug-step-destructuring-bind.js b/deps/v8/test/debugger/debug/es6/debug-step-destructuring-bind.js deleted file mode 100644 index a610eef60426ab..00000000000000 --- a/deps/v8/test/debugger/debug/es6/debug-step-destructuring-bind.js +++ /dev/null @@ -1,109 +0,0 @@ -// Copyright 2015 the V8 project authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - - -var exception = null; -var Debug = debug.Debug; -var break_count = 0; - -function listener(event, exec_state, event_data, data) { - if (event != Debug.DebugEvent.Break) return; - try { - var source = exec_state.frame(0).sourceLineText(); - print(source, break_count); - assertTrue(source.indexOf(`B${break_count++}`) > 0); - if (source.indexOf("assertEquals") > 0) { - exec_state.prepareStep(Debug.StepAction.StepOver); - } else { - exec_state.prepareStep(Debug.StepAction.StepInto); - } - } catch (e) { - exception = e; - print(e); - } // B34 -}; - -Debug.setListener(listener); - -var id = x => x; // B9 B10 B36 B37 - -function test() { - debugger; // B0 - function fx1([ - a, // B2 - b // B3 - ]) { - assertEquals([1, 2], [a, b]); // B4 - } // B5 - fx1([1, 2, 3]); // B1 - - function f2([ - a, // B7 - b = id(3) // B8 - ]) { - assertEquals([4, 3], [a, b]); // B11 - } // B12 - f2([4]); // B6 - - function f3({ - x: a, // B14 - y: b // B15 - }) { - assertEquals([5, 6], [a, b]); // B16 - } // B17 - f3({y: 6, x: 5}); // B13 - - function f4([ - a, // B19 - { - b, // B20 - c, // B21 - } - ]) { // B19 - assertEquals([2, 4, 6], [a, b, c]); // B22 - } // B23 - f4([2, {c: 6, b: 4}]); // B18 - - function f5([ - { - a, // B25 - b = 7 // B26 - }, - c = 3 // B27 - ] = [{a:1}]) { - assertEquals([1, 7, 3], [a, b, c]); // B28 - } // B29 - f5(); // B24 - - var name = "x"; // B30 - function f6({ - [id(name)]: a, // B34 B35 - b = a // B38 - }) { - assertEquals([9, 9], [a, b]); // B39 - } // B40 - var o6 = {}; // B31 - o6[name] = 9; // B32 - f6(o6); // B33 - - try { - throw [3, 4]; // B41 - } catch ([ - a, // B42 - b, // B43 - c = 6 // B44 - ]) { - assertEquals([3, 4, 6], [a, b, c]); // B45 - } - - var { - x: a, // B47 - y: b = 9 // B48 - } = { x: 4 }; // B46 - assertEquals([4, 9], [a, b]); // B49 -} // B50 - -test(); -Debug.setListener(null); // B51 -assertNull(exception); diff --git a/deps/v8/test/debugger/debug/es6/debug-stepnext-for.js b/deps/v8/test/debugger/debug/es6/debug-stepnext-for.js index c3c68423cd5084..b6c93fc974af01 100644 --- a/deps/v8/test/debugger/debug/es6/debug-stepnext-for.js +++ b/deps/v8/test/debugger/debug/es6/debug-stepnext-for.js @@ -98,35 +98,109 @@ print("log:\n"+ JSON.stringify(log)); // based on other values. var expected = [ // Entry - "a2", + 'a2', // Empty for-in-var: get enumerable - "c16", + 'c16', // Empty for-in: get enumerable - "d12", + 'd12', // For-in-var: get enumerable, assign, body, assign, body, ... - "e16","e11","E4","e11","E4","e11","E4","e11", + 'e16', + 'e11', + 'E4', + 'e11', + 'E4', + 'e11', + 'E4', + 'e11', // For-in: get enumerable, assign, body, assign, body, ... - "f12","f7","F4","f7","F4","f7","F4","f7", + 'f12', + 'f7', + 'F4', + 'f7', + 'F4', + 'f7', + 'F4', + 'f7', // For-in-let: get enumerable, next, body, next, ... - "g16","g11","G4","g11","G4","g11","G4","g11", + 'g16', + 'g11', + 'G4', + 'g11', + 'G4', + 'g11', + 'G4', + 'g11', // For-of-var: [Symbol.iterator](), next(), body, next(), body, ... - "h16","h11","H4","h11","H4","h11","H4","h11", + 'h16', + 'h11', + 'H4', + 'h11', + 'H4', + 'h11', + 'H4', + 'h11', // For-of: [Symbol.iterator](), next(), body, next(), body, ... - "i12","i7","I4","i7","I4","i7","I4","i7", + 'i12', + 'i7', + 'I4', + 'i7', + 'I4', + 'i7', + 'I4', + 'i7', // For-of-let: [Symbol.iterator](), next(), body, next(), ... - "j18","j11","J4","j11","J4","j11","J4","j11", + 'j18', + 'j11', + 'J4', + 'j11', + 'J4', + 'j11', + 'J4', + 'j11', // For-var: init, condition, body, next, condition, body, ... - "k15","k20","K4","k26","k20","K4","k26","k20","K4","k26","k20", + 'k15', + 'k20', + 'K4', + 'k26', + 'k20', + 'K4', + 'k26', + 'k20', + 'K4', + 'k26', + 'k20', // For: init, condition, body, next, condition, body, ... - "l7","l16","L4","l22","l16","L4","l22","l16","L4","l22","l16", + 'l7', + 'l16', + 'L4', + 'l22', + 'l16', + 'L4', + 'l22', + 'l16', + 'L4', + 'l22', + 'l16', // For-let: init, condition, body, next, condition, body, ... - "m15","m20","M4","m26","m20","M4","m26","m20","M4","m26","m20", + 'm15', + 'm20', + 'M4', + 'm26', + 'm20', + 'M4', + 'm26', + 'm20', + 'M4', + 'm26', + 'm20', // For-of, empty: [Symbol.iterator](), next() once - "n16", "n11", - // Spread: expression statement, spread - "o2", "o9", + 'n16', + 'n11', + // Spread: expression statement + 'o2', // Exit. - "y0","z0", + 'y0', + 'z0', ] print("expected:\n"+ JSON.stringify(expected)); diff --git a/deps/v8/test/debugger/debug/side-effect/debug-evaluate-no-side-effect-builtins.js b/deps/v8/test/debugger/debug/side-effect/debug-evaluate-no-side-effect-builtins.js index e9c996aff68410..138b3e7b9268c2 100644 --- a/deps/v8/test/debugger/debug/side-effect/debug-evaluate-no-side-effect-builtins.js +++ b/deps/v8/test/debugger/debug/side-effect/debug-evaluate-no-side-effect-builtins.js @@ -77,7 +77,8 @@ function listener(event, exec_state, event_data, data) { fail(`Array.of(1, 2, 3)`); var function_param = [ "flatMap", "forEach", "every", "some", "reduce", "reduceRight", "find", - "filter", "map", "findIndex", "findLast", "findLastIndex" + "filter", "map", "findIndex", "findLast", "findLastIndex", "group", + "groupToMap" ]; var fails = ["pop", "push", "reverse", "shift", "unshift", "splice", "sort", "copyWithin", "fill"]; diff --git a/deps/v8/test/fuzzer/fuzzer-support.cc b/deps/v8/test/fuzzer/fuzzer-support.cc index 7f80d83c937d2d..2116039a381cd3 100644 --- a/deps/v8/test/fuzzer/fuzzer-support.cc +++ b/deps/v8/test/fuzzer/fuzzer-support.cc @@ -17,12 +17,25 @@ namespace v8_fuzzer { FuzzerSupport::FuzzerSupport(int* argc, char*** argv) { + // Disable hard abort, which generates a trap instead of a proper abortion. + // Traps by default do not cause libfuzzer to generate a crash file. + i::FLAG_hard_abort = false; + i::FLAG_expose_gc = true; // Allow changing flags in fuzzers. // TODO(12887): Refactor fuzzers to not change flags after initialization. i::FLAG_freeze_flags_after_init = false; +#if V8_ENABLE_WEBASSEMBLY + if (V8_TRAP_HANDLER_SUPPORTED) { + constexpr bool kUseDefaultTrapHandler = true; + if (!v8::V8::EnableWebAssemblyTrapHandler(kUseDefaultTrapHandler)) { + FATAL("Could not register trap handler"); + } + } +#endif // V8_ENABLE_WEBASSEMBLY + v8::V8::SetFlagsFromCommandLine(argc, *argv, true); v8::V8::InitializeICUDefaultLocation((*argv)[0]); v8::V8::InitializeExternalStartupData((*argv)[0]); @@ -69,14 +82,6 @@ std::unique_ptr<FuzzerSupport> FuzzerSupport::fuzzer_support_; // static void FuzzerSupport::InitializeFuzzerSupport(int* argc, char*** argv) { -#if V8_ENABLE_WEBASSEMBLY - if (V8_TRAP_HANDLER_SUPPORTED) { - constexpr bool kUseDefaultTrapHandler = true; - if (!v8::V8::EnableWebAssemblyTrapHandler(kUseDefaultTrapHandler)) { - FATAL("Could not register trap handler"); - } - } -#endif // V8_ENABLE_WEBASSEMBLY DCHECK_NULL(FuzzerSupport::fuzzer_support_); FuzzerSupport::fuzzer_support_ = std::make_unique<v8_fuzzer::FuzzerSupport>(argc, argv); diff --git a/deps/v8/test/fuzzer/wasm-compile.cc b/deps/v8/test/fuzzer/wasm-compile.cc index a10e16235d48dd..0a8adf24cfa963 100644 --- a/deps/v8/test/fuzzer/wasm-compile.cc +++ b/deps/v8/test/fuzzer/wasm-compile.cc @@ -842,7 +842,7 @@ class WasmGenerator { Var local = GetRandomLocal(data); // TODO(manoskouk): Ideally we would check for subtyping here over type // equality, but we don't have a module. - // TODO(7748): Remove this condition if non-nullable locals are allowed. + // TODO(7748): Allow initialized non-nullable locals. if (nullable == kNullable && local.is_valid() && local.type.is_object_reference() && type == local.type.heap_type()) { builder_->EmitWithU32V(kExprLocalGet, local.index); @@ -2522,8 +2522,8 @@ class WasmCompileFuzzer : public WasmExecutionFuzzer { // performed by adding a function by {FunctionSig}, because we emit // everything in one recursive group which blocks signature // canonicalization. - // TODO(7748): Relax this when we implement type canonicalization and - // proper recursive-group support. + // TODO(7748): Relax this when we implement proper recursive-group + // support. functions.push_back(liftoff_as_reference ? builder.AddFunction(function_signatures[i]) : builder.AddFunction(sig)); diff --git a/deps/v8/test/fuzzer/wasm-fuzzer-common.cc b/deps/v8/test/fuzzer/wasm-fuzzer-common.cc index c9756947c89891..7193ffc3ef595f 100644 --- a/deps/v8/test/fuzzer/wasm-fuzzer-common.cc +++ b/deps/v8/test/fuzzer/wasm-fuzzer-common.cc @@ -81,10 +81,9 @@ Handle<WasmModuleObject> CompileReferenceModule(Zone* zone, Isolate* isolate, constexpr base::Vector<const char> kNoSourceUrl; Handle<Script> script = GetWasmEngine()->GetOrCreateScript(isolate, native_module, kNoSourceUrl); - Handle<FixedArray> export_wrappers = isolate->factory()->NewFixedArray( - static_cast<int>(module->functions.size())); - return WasmModuleObject::New(isolate, std::move(native_module), script, - export_wrappers); + isolate->heap()->EnsureWasmCanonicalRttsSize(module->MaxCanonicalTypeIndex() + + 1); + return WasmModuleObject::New(isolate, std::move(native_module), script); } void InterpretAndExecuteModule(i::Isolate* isolate, @@ -682,15 +681,15 @@ void GenerateTestCase(Isolate* isolate, ModuleWireBytes wire_bytes, << " /* sig */)\n"; // Add locals. - BodyLocalDecls decls(&tmp_zone); + BodyLocalDecls decls; DecodeLocalDecls(enabled_features, &decls, module, func_code.begin(), - func_code.end()); - if (!decls.type_list.empty()) { + func_code.end(), &tmp_zone); + if (decls.num_locals) { os << " "; - for (size_t pos = 0, count = 1, locals = decls.type_list.size(); - pos < locals; pos += count, count = 1) { - ValueType type = decls.type_list[pos]; - while (pos + count < locals && decls.type_list[pos + count] == type) { + for (size_t pos = 0, count = 1, locals = decls.num_locals; pos < locals; + pos += count, count = 1) { + ValueType type = decls.local_types[pos]; + while (pos + count < locals && decls.local_types[pos + count] == type) { ++count; } os << ".addLocals(" << ValueTypeToConstantName(type) << ", " << count diff --git a/deps/v8/test/inspector/debugger/array-destructuring-expected.txt b/deps/v8/test/inspector/debugger/array-destructuring-expected.txt new file mode 100644 index 00000000000000..f33e2d5c9d86db --- /dev/null +++ b/deps/v8/test/inspector/debugger/array-destructuring-expected.txt @@ -0,0 +1,172 @@ +Tests breakable locations in array destructuring. + +Running test: testBreakLocations + +function testFunction() { + var [a, b, c = 4] = |C|generator1(); + |_|[[a, b], c] = |C|generator2(); + |_|[a, ...b] = |C|generator1(); +|R|} + +function generator1() { + |_|return { + [Symbol.iterator]() { + const it = |_|[1, 2].|C|values(); + |_|return {next() { |_|return it.|C|next();|R| }};|R| + } + };|R| +} + +function generator2() { + |_|return { + [Symbol.iterator]() { + const it = |_|[|C|generator1(), 3].|C|values(); + |_|return {next() { |_|return it.|C|next();|R| }};|R| + } + };|R| +} +|R| + + +Running test: testStepping +Execution paused in testFunction: +function testFunction() { + var [a, b, c = 4] = #generator1(); + [[a, b], c] = generator2(); + +Execution paused in generator1: +function generator1() { + #return { + [Symbol.iterator]() { + +Called from testFunction: +function testFunction() { + var [a, b, c = 4] = #generator1(); + [[a, b], c] = generator2(); + +Execution paused in [Symbol.iterator]: + [Symbol.iterator]() { + const it = #[1, 2].values(); + return {next() { return it.next(); }}; + +Called from testFunction: +function testFunction() { + var [a, b, c = 4] = #generator1(); + [[a, b], c] = generator2(); + +Execution paused in next: + const it = [1, 2].values(); + return {next() { #return it.next(); }}; + } + +Called from testFunction: +function testFunction() { + var [#a, b, c = 4] = generator1(); + [[a, b], c] = generator2(); + +Execution paused in testFunction: + var [a, b, c = 4] = generator1(); + #[[a, b], c] = generator2(); + [a, ...b] = generator1(); + +Execution paused in generator2: +function generator2() { + #return { + [Symbol.iterator]() { + +Called from testFunction: + var [a, b, c = 4] = generator1(); + [[a, b], c] = #generator2(); + [a, ...b] = generator1(); + +Execution paused in [Symbol.iterator]: + [Symbol.iterator]() { + const it = #[generator1(), 3].values(); + return {next() { return it.next(); }}; + +Called from testFunction: + var [a, b, c = 4] = generator1(); + [[a, b], c] #= generator2(); + [a, ...b] = generator1(); + +Execution paused in next: + const it = [generator1(), 3].values(); + return {next() { #return it.next(); }}; + } + +Called from testFunction: + var [a, b, c = 4] = generator1(); + [#[a, b], c] = generator2(); + [a, ...b] = generator1(); + +Execution paused in [Symbol.iterator]: + [Symbol.iterator]() { + const it = #[1, 2].values(); + return {next() { return it.next(); }}; + +Called from testFunction: + var [a, b, c = 4] = generator1(); + [#[a, b], c] = generator2(); + [a, ...b] = generator1(); + +Execution paused in next: + const it = [1, 2].values(); + return {next() { #return it.next(); }}; + } + +Called from testFunction: + var [a, b, c = 4] = generator1(); + [[#a, b], c] = generator2(); + [a, ...b] = generator1(); + +Execution paused in next: + const it = [generator1(), 3].values(); + return {next() { #return it.next(); }}; + } + +Called from testFunction: + var [a, b, c = 4] = generator1(); + [[a, b], #c] = generator2(); + [a, ...b] = generator1(); + +Execution paused in testFunction: + [[a, b], c] = generator2(); + #[a, ...b] = generator1(); +} + +Execution paused in generator1: +function generator1() { + #return { + [Symbol.iterator]() { + +Called from testFunction: + [[a, b], c] = generator2(); + [a, ...b] = #generator1(); +} + +Execution paused in [Symbol.iterator]: + [Symbol.iterator]() { + const it = #[1, 2].values(); + return {next() { return it.next(); }}; + +Called from testFunction: + [[a, b], c] = generator2(); + [a, ...b] #= generator1(); +} + +Execution paused in next: + const it = [1, 2].values(); + return {next() { #return it.next(); }}; + } + +Called from testFunction: + [[a, b], c] = generator2(); + [#a, ...b] = generator1(); +} + +Execution paused in testFunction: + [a, ...b] = generator1(); +#} + + +Resuming and finishing... diff --git a/deps/v8/test/inspector/debugger/array-destructuring.js b/deps/v8/test/inspector/debugger/array-destructuring.js new file mode 100644 index 00000000000000..6997c755b22c74 --- /dev/null +++ b/deps/v8/test/inspector/debugger/array-destructuring.js @@ -0,0 +1,102 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +let {session, contextGroup, Protocol} = + InspectorTest.start('Tests breakable locations in array destructuring.'); + +const source = ` +function testFunction() { + var [a, b, c = 4] = generator1(); + [[a, b], c] = generator2(); + [a, ...b] = generator1(); +} + +function generator1() { + return { + [Symbol.iterator]() { + const it = [1, 2].values(); + return {next() { return it.next(); }}; + } + }; +} + +function generator2() { + return { + [Symbol.iterator]() { + const it = [generator1(), 3].values(); + return {next() { return it.next(); }}; + } + }; +} +`; + +const url = 'test.js'; +contextGroup.addScript(source, 0, 0, url); +session.setupScriptMap(); + +InspectorTest.runAsyncTestSuite([ + async function testBreakLocations() { + let [, , {params: {scriptId}}] = await Promise.all([ + Protocol.Runtime.enable(), + Protocol.Debugger.enable(), + Protocol.Debugger.onceScriptParsed(), + ]); + let {result: {locations}} = await Protocol.Debugger.getPossibleBreakpoints( + {start: {lineNumber: 0, columnNumber: 0, scriptId}}); + await session.logBreakLocations(locations); + await Promise.all([ + Protocol.Debugger.disable(), + Protocol.Runtime.disable(), + ]); + }, + + async function testStepping() { + let [, , {params: {scriptId}}] = await Promise.all([ + Protocol.Runtime.enable(), + Protocol.Debugger.enable(), + Protocol.Debugger.onceScriptParsed(), + ]); + const {breakpointId} = await Protocol.Debugger.setBreakpoint({ + location: { + scriptId, + lineNumber: 2, + } + }); + const evalPromise = + Protocol.Runtime.evaluate({expression: 'testFunction()'}); + for (;;) { + const {method, params} = await Promise.race([ + evalPromise, + Protocol.Debugger.oncePaused(), + ]); + if (method !== 'Debugger.paused') { + break; + } + const callFrames = params.callFrames.filter( + callFrame => callFrame.location.scriptId === scriptId); + if (callFrames.length === 0) { + InspectorTest.log('Resuming and finishing...'); + await Protocol.Debugger.resume(); + } else { + const [{functionName, location}, ...callerFrames] = callFrames; + InspectorTest.log(`Execution paused in ${functionName}:`); + await session.logSourceLocation(location); + for (const {location, functionName} of callerFrames) { + InspectorTest.log(`Called from ${functionName}:`); + await session.logSourceLocation(location); + } + if (functionName === 'testFunction') { + await Protocol.Debugger.stepInto(); + } else { + await Protocol.Debugger.stepOut(); + } + } + } + await Promise.all([ + Protocol.Debugger.removeBreakpoint({breakpointId}), + Protocol.Debugger.disable(), + Protocol.Runtime.disable(), + ]); + } +]); diff --git a/deps/v8/test/inspector/debugger/array-spread-expected.txt b/deps/v8/test/inspector/debugger/array-spread-expected.txt new file mode 100644 index 00000000000000..948974885c9602 --- /dev/null +++ b/deps/v8/test/inspector/debugger/array-spread-expected.txt @@ -0,0 +1,75 @@ +Tests breakable locations in array spread. + +Running test: testBreakLocations + +function testFunction() { + var a = |_|[...iterable]; + var b = [...|_|a, ...iterable, ...a]; +|R|} + +const iterable = |_|{ + [Symbol.iterator]() { + const it = |_|[1, 2].|C|values(); + |_|return {next() { |_|return it.|C|next();|R| }};|R| + } +}; +|R| + + +Running test: testStepping +Execution paused in testFunction: +function testFunction() { + var a = #[...iterable]; + var b = [...a, ...iterable, ...a]; + +Execution paused in [Symbol.iterator]: + [Symbol.iterator]() { + const it = #[1, 2].values(); + return {next() { return it.next(); }}; + +Called from testFunction: +function testFunction() { + var a = [...#iterable]; + var b = [...a, ...iterable, ...a]; + +Execution paused in next: + const it = [1, 2].values(); + return {next() { #return it.next(); }}; + } + +Called from testFunction: +function testFunction() { + var a = [...#iterable]; + var b = [...a, ...iterable, ...a]; + +Execution paused in testFunction: + var a = [...iterable]; + var b = [...#a, ...iterable, ...a]; +} + +Execution paused in [Symbol.iterator]: + [Symbol.iterator]() { + const it = #[1, 2].values(); + return {next() { return it.next(); }}; + +Called from testFunction: + var a = [...iterable]; + var b = [...a, ...#iterable, ...a]; +} + +Execution paused in next: + const it = [1, 2].values(); + return {next() { #return it.next(); }}; + } + +Called from testFunction: + var a = [...iterable]; + var b = [...a, ...#iterable, ...a]; +} + +Execution paused in testFunction: + var b = [...a, ...iterable, ...a]; +#} + + +Resuming and finishing... diff --git a/deps/v8/test/inspector/debugger/array-spread.js b/deps/v8/test/inspector/debugger/array-spread.js new file mode 100644 index 00000000000000..ce4d5d35892635 --- /dev/null +++ b/deps/v8/test/inspector/debugger/array-spread.js @@ -0,0 +1,90 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +let {session, contextGroup, Protocol} = + InspectorTest.start('Tests breakable locations in array spread.'); + +const source = ` +function testFunction() { + var a = [...iterable]; + var b = [...a, ...iterable, ...a]; +} + +const iterable = { + [Symbol.iterator]() { + const it = [1, 2].values(); + return {next() { return it.next(); }}; + } +}; +`; + +const url = 'test.js'; +contextGroup.addScript(source, 0, 0, url); +session.setupScriptMap(); + +InspectorTest.runAsyncTestSuite([ + async function testBreakLocations() { + let [, , {params: {scriptId}}] = await Promise.all([ + Protocol.Runtime.enable(), + Protocol.Debugger.enable(), + Protocol.Debugger.onceScriptParsed(), + ]); + let {result: {locations}} = await Protocol.Debugger.getPossibleBreakpoints( + {start: {lineNumber: 0, columnNumber: 0, scriptId}}); + await session.logBreakLocations(locations); + await Promise.all([ + Protocol.Debugger.disable(), + Protocol.Runtime.disable(), + ]); + }, + + async function testStepping() { + let [, , {params: {scriptId}}] = await Promise.all([ + Protocol.Runtime.enable(), + Protocol.Debugger.enable(), + Protocol.Debugger.onceScriptParsed(), + ]); + const {breakpointId} = await Protocol.Debugger.setBreakpoint({ + location: { + scriptId, + lineNumber: 2, + } + }); + const evalPromise = + Protocol.Runtime.evaluate({expression: 'testFunction()'}); + for (;;) { + const {method, params} = await Promise.race([ + evalPromise, + Protocol.Debugger.oncePaused(), + ]); + if (method !== 'Debugger.paused') { + break; + } + const callFrames = params.callFrames.filter( + callFrame => callFrame.location.scriptId === scriptId); + if (callFrames.length === 0) { + InspectorTest.log('Resuming and finishing...'); + await Protocol.Debugger.resume(); + } else { + const [{functionName, location}, ...callerFrames] = callFrames; + InspectorTest.log(`Execution paused in ${functionName}:`); + await session.logSourceLocation(location); + for (const {location, functionName} of callerFrames) { + InspectorTest.log(`Called from ${functionName}:`); + await session.logSourceLocation(location); + } + if (functionName === 'testFunction') { + await Protocol.Debugger.stepInto(); + } else { + await Protocol.Debugger.stepOut(); + } + } + } + await Promise.all([ + Protocol.Debugger.removeBreakpoint({breakpointId}), + Protocol.Debugger.disable(), + Protocol.Runtime.disable(), + ]); + } +]); diff --git a/deps/v8/test/inspector/debugger/break-locations-await-expected.txt b/deps/v8/test/inspector/debugger/break-locations-await-expected.txt index ac194fdd1062fb..4f9c1ad008450c 100644 --- a/deps/v8/test/inspector/debugger/break-locations-await-expected.txt +++ b/deps/v8/test/inspector/debugger/break-locations-await-expected.txt @@ -5,11 +5,11 @@ Running test: testBreakLocations function testFunction() { async function f1() { for (let x = |_|0; x |_|< 1; ++|_|x) |_|await x; - |_|return |_|await Promise.|C|resolve(2);|R| + |_|return await Promise.|C|resolve(2);|R| } async function f2() { - let r = |_|await |C|f1() + |_|await |C|f1(); + let r = |_|await |C|f1() + await |C|f1(); |_|await |C|f1(); |_|await |C|f1().|C|then(x => x |_|* 2|R|); |_|await [1].|C|map(x => Promise.|C|resolve(x)|R|)[0]; diff --git a/deps/v8/test/inspector/debugger/break-locations-var-init-expected.txt b/deps/v8/test/inspector/debugger/break-locations-var-init-expected.txt index 4af1b05d66160f..46089a2a56832b 100644 --- a/deps/v8/test/inspector/debugger/break-locations-var-init-expected.txt +++ b/deps/v8/test/inspector/debugger/break-locations-var-init-expected.txt @@ -15,7 +15,7 @@ function testFunction() { var y = |_|(a = 100); var z = |_|x + (a = 1) + (a = 2) + (a = 3) + |C|f(); function f() { - for (let { |_|x, |_|y } = |_|{ x: 0, y: 1 }; y |_|> 0; --|_|y) { let z = |_|x + y; } + for (let { x, y } = |_|{ x: 0, y: 1 }; y |_|> 0; --|_|y) { let z = |_|x + y; } |R|} var b = obj1.|_|a; |_|(async function asyncF() { @@ -89,20 +89,6 @@ testFunction (test.js:10:44) for (let { x, y } = #{ x: 0, y: 1 }; y > 0; --y) { let z = x + y; } } -f (test.js:12:15) -testFunction (test.js:10:44) -(anonymous) (expr.js:0:0) - function f() { - for (let { #x, y } = { x: 0, y: 1 }; y > 0; --y) { let z = x + y; } - } - -f (test.js:12:18) -testFunction (test.js:10:44) -(anonymous) (expr.js:0:0) - function f() { - for (let { x, #y } = { x: 0, y: 1 }; y > 0; --y) { let z = x + y; } - } - f (test.js:12:42) testFunction (test.js:10:44) (anonymous) (expr.js:0:0) diff --git a/deps/v8/test/inspector/debugger/comma-expression-expected.txt b/deps/v8/test/inspector/debugger/comma-expression-expected.txt new file mode 100644 index 00000000000000..eabf6f681dbdd9 --- /dev/null +++ b/deps/v8/test/inspector/debugger/comma-expression-expected.txt @@ -0,0 +1,225 @@ +Tests breakable locations in comma expressions. + +Running test: testBreakLocations + +function testFunction() { + |C|foo(), |C|foo(), |C|foo(); + |_|return |C|bar(), |C|bar();|R| +} + +function foo() { + |_|return |C|baz(), |C|baz();|R| +} + +function bar() { + |_|return |C|foo(), + 1 |_|+ |C|foo(), + 2 |_|+ |C|foo();|R| +} + +const baz = |_|() => |_|0|R|; +|R| + + +Running test: testStepping +Execution paused in testFunction: +function testFunction() { + #foo(), foo(), foo(); + return bar(), bar(); + +Execution paused in foo: +function foo() { + #return baz(), baz(); +} + +Called from testFunction: +function testFunction() { + #foo(), foo(), foo(); + return bar(), bar(); + +Execution paused in foo: +function foo() { + return baz(), #baz(); +} + +Called from testFunction: +function testFunction() { + #foo(), foo(), foo(); + return bar(), bar(); + +Execution paused in foo: +function foo() { + return baz(), baz();# +} + +Called from testFunction: +function testFunction() { + #foo(), foo(), foo(); + return bar(), bar(); + +Execution paused in testFunction: +function testFunction() { + foo(), #foo(), foo(); + return bar(), bar(); + +Execution paused in foo: +function foo() { + #return baz(), baz(); +} + +Called from testFunction: +function testFunction() { + foo(), #foo(), foo(); + return bar(), bar(); + +Execution paused in foo: +function foo() { + return baz(), #baz(); +} + +Called from testFunction: +function testFunction() { + foo(), #foo(), foo(); + return bar(), bar(); + +Execution paused in foo: +function foo() { + return baz(), baz();# +} + +Called from testFunction: +function testFunction() { + foo(), #foo(), foo(); + return bar(), bar(); + +Execution paused in testFunction: +function testFunction() { + foo(), foo(), #foo(); + return bar(), bar(); + +Execution paused in foo: +function foo() { + #return baz(), baz(); +} + +Called from testFunction: +function testFunction() { + foo(), foo(), #foo(); + return bar(), bar(); + +Execution paused in foo: +function foo() { + return baz(), #baz(); +} + +Called from testFunction: +function testFunction() { + foo(), foo(), #foo(); + return bar(), bar(); + +Execution paused in foo: +function foo() { + return baz(), baz();# +} + +Called from testFunction: +function testFunction() { + foo(), foo(), #foo(); + return bar(), bar(); + +Execution paused in testFunction: + foo(), foo(), foo(); + #return bar(), bar(); +} + +Execution paused in bar: +function bar() { + #return foo(), + 1 + foo(), + +Called from testFunction: + foo(), foo(), foo(); + return #bar(), bar(); +} + +Execution paused in bar: + return foo(), + 1 #+ foo(), + 2 + foo(); + +Called from testFunction: + foo(), foo(), foo(); + return #bar(), bar(); +} + +Execution paused in bar: + 1 + foo(), + 2 #+ foo(); +} + +Called from testFunction: + foo(), foo(), foo(); + return #bar(), bar(); +} + +Execution paused in bar: + 1 + foo(), + 2 + foo();# +} + +Called from testFunction: + foo(), foo(), foo(); + return #bar(), bar(); +} + +Execution paused in testFunction: + foo(), foo(), foo(); + return bar(), #bar(); +} + +Execution paused in bar: +function bar() { + #return foo(), + 1 + foo(), + +Called from testFunction: + foo(), foo(), foo(); + return bar(), #bar(); +} + +Execution paused in bar: + return foo(), + 1 #+ foo(), + 2 + foo(); + +Called from testFunction: + foo(), foo(), foo(); + return bar(), #bar(); +} + +Execution paused in bar: + 1 + foo(), + 2 #+ foo(); +} + +Called from testFunction: + foo(), foo(), foo(); + return bar(), #bar(); +} + +Execution paused in bar: + 1 + foo(), + 2 + foo();# +} + +Called from testFunction: + foo(), foo(), foo(); + return bar(), #bar(); +} + +Execution paused in testFunction: + foo(), foo(), foo(); + return bar(), bar();# +} + +Resuming and finishing... diff --git a/deps/v8/test/inspector/debugger/comma-expression.js b/deps/v8/test/inspector/debugger/comma-expression.js new file mode 100644 index 00000000000000..4e3b26fcf6af4f --- /dev/null +++ b/deps/v8/test/inspector/debugger/comma-expression.js @@ -0,0 +1,96 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +let {session, contextGroup, Protocol} = + InspectorTest.start('Tests breakable locations in comma expressions.'); + +const source = ` +function testFunction() { + foo(), foo(), foo(); + return bar(), bar(); +} + +function foo() { + return baz(), baz(); +} + +function bar() { + return foo(), + 1 + foo(), + 2 + foo(); +} + +const baz = () => 0; +`; + +const url = 'test.js'; +contextGroup.addScript(source, 0, 0, url); +session.setupScriptMap(); + +InspectorTest.runAsyncTestSuite([ + async function testBreakLocations() { + const [, , {params: {scriptId}}] = await Promise.all([ + Protocol.Runtime.enable(), + Protocol.Debugger.enable(), + Protocol.Debugger.onceScriptParsed(), + ]); + const {result: {locations}} = + await Protocol.Debugger.getPossibleBreakpoints( + {start: {lineNumber: 0, columnNumber: 0, scriptId}}); + await session.logBreakLocations(locations); + await Promise.all([ + Protocol.Debugger.disable(), + Protocol.Runtime.disable(), + ]); + }, + + async function testStepping() { + const [, , {params: {scriptId}}] = await Promise.all([ + Protocol.Runtime.enable(), + Protocol.Debugger.enable(), + Protocol.Debugger.onceScriptParsed(), + ]); + const {breakpointId} = await Protocol.Debugger.setBreakpoint({ + location: { + scriptId, + lineNumber: 2, + } + }); + const evalPromise = + Protocol.Runtime.evaluate({expression: 'testFunction()'}); + for (;;) { + const {method, params} = await Promise.race([ + evalPromise, + Protocol.Debugger.oncePaused(), + ]); + if (method !== 'Debugger.paused') { + break; + } + const callFrames = params.callFrames.filter( + callFrame => callFrame.location.scriptId === scriptId); + if (callFrames.length === 0) { + InspectorTest.log('Resuming and finishing...'); + await Protocol.Debugger.resume(); + } else { + const [{functionName, location}, ...callerFrames] = callFrames; + InspectorTest.log(`Execution paused in ${functionName}:`); + await session.logSourceLocation(location); + for (const {location, functionName} of callerFrames) { + InspectorTest.log(`Called from ${functionName}:`); + await session.logSourceLocation(location); + } + if (functionName === 'testFunction') { + await Protocol.Debugger.stepInto(); + } else { + await Protocol.Debugger.stepOver(); + } + } + } + await Promise.all([ + Protocol.Debugger.removeBreakpoint({breakpointId}), + Protocol.Debugger.disable(), + Protocol.Runtime.disable(), + ]); + } +]); diff --git a/deps/v8/test/inspector/debugger/destructuring-expected.txt b/deps/v8/test/inspector/debugger/destructuring-expected.txt deleted file mode 100644 index 47673aefe7512b..00000000000000 --- a/deps/v8/test/inspector/debugger/destructuring-expected.txt +++ /dev/null @@ -1,34 +0,0 @@ -Tests breakable locations in destructuring. - -Running test: testBreakLocations - -function testFunction() { - function func() { - |_|return [1, 2];|R| - } - - var [|_|a, |_|b] = |C|func(); -|R|} - - -Running test: testSetBreakpoint -Setting breakpoint at test.js:6:0 - - var [a, b] = #func(); -} - -Setting breakpoint at test.js:6:7 - - var [#a, b] = func(); -} - -Setting breakpoint at test.js:6:10 - - var [a, #b] = func(); -} - -Setting breakpoint at test.js:6:15 - - var [a, b] = #func(); -} - diff --git a/deps/v8/test/inspector/debugger/destructuring.js b/deps/v8/test/inspector/debugger/destructuring.js deleted file mode 100644 index e46e0e499823a1..00000000000000 --- a/deps/v8/test/inspector/debugger/destructuring.js +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright 2021 the V8 project authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -let {session, contextGroup, Protocol} = - InspectorTest.start('Tests breakable locations in destructuring.'); - -let source = ` -function testFunction() { - function func() { - return [1, 2]; - } - - var [a, b] = func(); -} -//# sourceURL=test.js`; - -contextGroup.addScript(source); -session.setupScriptMap(); - -InspectorTest.runAsyncTestSuite([ - async function testBreakLocations() { - Protocol.Debugger.enable(); - let {params:{scriptId}} = await Protocol.Debugger.onceScriptParsed(); - let {result:{locations}} = await Protocol.Debugger.getPossibleBreakpoints({ - start: {lineNumber: 0, columnNumber : 0, scriptId}}); - await session.logBreakLocations(locations); - }, - - async function testSetBreakpoint() { - const SOURCE_LOCATIONS = [ - {lineNumber: 6, columnNumber: 0}, - {lineNumber: 6, columnNumber: 7}, - {lineNumber: 6, columnNumber: 10}, - {lineNumber: 6, columnNumber: 15}, - ]; - for (const {lineNumber, columnNumber} of SOURCE_LOCATIONS) { - const url = 'test.js'; - InspectorTest.log(`Setting breakpoint at ${url}:${lineNumber}:${columnNumber}`); - const {result: {breakpointId, locations}} = await Protocol.Debugger.setBreakpointByUrl({ - lineNumber, columnNumber, url - }); - locations.forEach(location => session.logSourceLocation(location)); - await Protocol.Debugger.removeBreakpoint({breakpointId}); - } - } -]); diff --git a/deps/v8/test/inspector/debugger/get-possible-breakpoints-class-fields-expected.txt b/deps/v8/test/inspector/debugger/get-possible-breakpoints-class-fields-expected.txt index d244261ba08c3f..b1fc379d92dfcf 100644 --- a/deps/v8/test/inspector/debugger/get-possible-breakpoints-class-fields-expected.txt +++ b/deps/v8/test/inspector/debugger/get-possible-breakpoints-class-fields-expected.txt @@ -99,7 +99,7 @@ let x = |R|class {} }|R| |_|x = |R|class { - x = |_|function*|_|() { + x = |_|function*() { |_|yield 1; |R|}; }|R| @@ -190,7 +190,7 @@ let x = |R|class {} } |_|x = |R|class { - static x = |_|function*|_|() { + static x = |_|function*() { |_|yield 1; |R|}|R|; } @@ -204,3 +204,4 @@ let x = |R|class {} [|C|bar()] = |_|6; }|R| |R| + diff --git a/deps/v8/test/inspector/debugger/get-possible-breakpoints-main-expected.txt b/deps/v8/test/inspector/debugger/get-possible-breakpoints-main-expected.txt index f7f9b7ca25469a..428685253c92dc 100644 --- a/deps/v8/test/inspector/debugger/get-possible-breakpoints-main-expected.txt +++ b/deps/v8/test/inspector/debugger/get-possible-breakpoints-main-expected.txt @@ -152,7 +152,7 @@ function testSwitch() { } |R|} -function* idMaker|_|() { +function* idMaker() { |_|yield 1; |_|yield 2; |_|yield 3; @@ -230,8 +230,8 @@ async function testPromiseAsyncWithCode() { |_|await p; |R|} |C|setTimeout(returnCall, 0); - |_|await |C|foo(); - |_|await |C|foo(); + await |C|foo(); + await |C|foo(); |C|nextTest(); |R|} |C|main(); @@ -252,7 +252,7 @@ async function testPromiseComplex() { } var x = |_|1; var y = |_|2; - |C|returnFunction(|C|emptyFunction(), x++, --y, x => 2 |_|* x|R|, |C|returnCall())|C|().a = |_|await |C|foo((a => 2 |_|*a|R|)|C|(5)); + |C|returnFunction(|C|emptyFunction(), x++, --y, x => 2 |_|* x|R|, |C|returnCall())|C|().a = await |C|foo((a => 2 |_|*a|R|)|C|(5)); |C|nextTest(); |R|} |C|main(); diff --git a/deps/v8/test/inspector/debugger/line-breakpoint-await-expected.txt b/deps/v8/test/inspector/debugger/line-breakpoint-await-expected.txt new file mode 100644 index 00000000000000..a648308d970f54 --- /dev/null +++ b/deps/v8/test/inspector/debugger/line-breakpoint-await-expected.txt @@ -0,0 +1,26 @@ +Line breakpoints for await + +Running test: testAwaitInAsyncFunctionWithLeadingWhitespace +Setting breakpoint on `await this.foo()` in `obj.bar` +Calling `obj.bar()` +Hit breakpoint before calling into `this.foo` + +Running test: testAwaitInAsyncFunctionWithoutLeadingWhitespace +Setting breakpoint on `await this.foo()` in `obj.baz` +Calling `obj.baz()` +Hit breakpoint before calling into `this.foo` + +Running test: testAwaitInAsyncGeneratorWithLeadingWhitespace +Setting breakpoint on `await this.foo()` in `obj.barGenerator` +Calling `obj.barGenerator().next()` +Hit breakpoint before calling into `this.foo` + +Running test: testAwaitInAsyncGeneratorWithoutLeadingWhitespace +Setting breakpoint on `await this.foo()` in `obj.bazGenerator` +Calling `obj.bazGenerator().next()` +Hit breakpoint before calling into `this.foo` + +Running test: testAwaitInAsyncFunctionMinified +Setting breakpoint on `await this.foo()` in `obj.minified` +Calling `obj.minified()` +Hit breakpoint before calling into `this.foo` diff --git a/deps/v8/test/inspector/debugger/line-breakpoint-await.js b/deps/v8/test/inspector/debugger/line-breakpoint-await.js new file mode 100644 index 00000000000000..c6f207533f8467 --- /dev/null +++ b/deps/v8/test/inspector/debugger/line-breakpoint-await.js @@ -0,0 +1,201 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +let {session, contextGroup, Protocol} = + InspectorTest.start('Line breakpoints for await'); + +// clang-format off +const url = 'line-breakpoint-await.js'; +contextGroup.addScript(` +var obj = { + foo() { + debugger; + }, + + async bar() { + await this.foo(); + }, + + async baz() { +await this.foo(); + }, + + async* barGenerator() { + await this.foo(); + }, + + async* bazGenerator() { +await this.foo(); + }, + + async minified(){await this.foo();} +}; +`, 0, 0, url); +// clang-format on + +session.setupScriptMap(); +InspectorTest.runAsyncTestSuite([ + async function testAwaitInAsyncFunctionWithLeadingWhitespace() { + await Promise.all([ + Protocol.Debugger.enable(), + Protocol.Runtime.enable(), + ]); + InspectorTest.log('Setting breakpoint on `await this.foo()` in `obj.bar`'); + const {result: {breakpointId}} = + await Protocol.Debugger.setBreakpointByUrl({ + url, + lineNumber: 7, + columnNumber: 0, + }); + InspectorTest.log('Calling `obj.bar()`'); + const pausedPromise = Protocol.Debugger.oncePaused(); + const evalPromise = Protocol.Runtime.evaluate({ + expression: 'obj.bar()', + awaitPromise: true, + }); + const {params: {hitBreakpoints}} = await pausedPromise; + if (hitBreakpoints?.length === 1 && hitBreakpoints[0] === breakpointId) { + InspectorTest.log('Hit breakpoint before calling into `this.foo`'); + } else { + InspectorTest.log('Missed breakpoint before calling into `this.foo`'); + } + await Promise.all([ + Protocol.Debugger.removeBreakpoint({breakpointId}), + Protocol.Debugger.disable(), + evalPromise, + Protocol.Runtime.disable(), + ]); + }, + + async function testAwaitInAsyncFunctionWithoutLeadingWhitespace() { + await Promise.all([ + Protocol.Debugger.enable(), + Protocol.Runtime.enable(), + ]); + InspectorTest.log('Setting breakpoint on `await this.foo()` in `obj.baz`'); + const {result: {breakpointId}} = + await Protocol.Debugger.setBreakpointByUrl({ + url, + lineNumber: 11, + columnNumber: 0, + }); + InspectorTest.log('Calling `obj.baz()`'); + const pausedPromise = Protocol.Debugger.oncePaused(); + const evalPromise = Protocol.Runtime.evaluate({ + expression: 'obj.baz()', + awaitPromise: true, + }); + const {params: {hitBreakpoints}} = await pausedPromise; + if (hitBreakpoints?.length === 1 && hitBreakpoints[0] === breakpointId) { + InspectorTest.log('Hit breakpoint before calling into `this.foo`'); + } else { + InspectorTest.log('Missed breakpoint before calling into `this.foo`'); + } + await Promise.all([ + Protocol.Debugger.removeBreakpoint({breakpointId}), + Protocol.Debugger.disable(), + evalPromise, + Protocol.Runtime.disable(), + ]); + }, + + async function testAwaitInAsyncGeneratorWithLeadingWhitespace() { + await Promise.all([ + Protocol.Debugger.enable(), + Protocol.Runtime.enable(), + ]); + InspectorTest.log( + 'Setting breakpoint on `await this.foo()` in `obj.barGenerator`'); + const {result: {breakpointId}} = + await Protocol.Debugger.setBreakpointByUrl({ + url, + lineNumber: 15, + columnNumber: 0, + }); + InspectorTest.log('Calling `obj.barGenerator().next()`'); + const pausedPromise = Protocol.Debugger.oncePaused(); + const evalPromise = Protocol.Runtime.evaluate({ + expression: 'obj.barGenerator().next()', + awaitPromise: true, + }); + const {params: {hitBreakpoints}} = await pausedPromise; + if (hitBreakpoints?.length === 1 && hitBreakpoints[0] === breakpointId) { + InspectorTest.log('Hit breakpoint before calling into `this.foo`'); + } else { + InspectorTest.log('Missed breakpoint before calling into `this.foo`'); + } + await Promise.all([ + Protocol.Debugger.removeBreakpoint({breakpointId}), + Protocol.Debugger.disable(), + evalPromise, + Protocol.Runtime.disable(), + ]); + }, + + async function testAwaitInAsyncGeneratorWithoutLeadingWhitespace() { + await Promise.all([ + Protocol.Debugger.enable(), + Protocol.Runtime.enable(), + ]); + InspectorTest.log( + 'Setting breakpoint on `await this.foo()` in `obj.bazGenerator`'); + const {result: {breakpointId}} = + await Protocol.Debugger.setBreakpointByUrl({ + url, + lineNumber: 19, + columnNumber: 0, + }); + InspectorTest.log('Calling `obj.bazGenerator().next()`'); + const pausedPromise = Protocol.Debugger.oncePaused(); + const evalPromise = Protocol.Runtime.evaluate({ + expression: 'obj.bazGenerator().next()', + awaitPromise: true, + }); + const {params: {hitBreakpoints}} = await pausedPromise; + if (hitBreakpoints?.length === 1 && hitBreakpoints[0] === breakpointId) { + InspectorTest.log('Hit breakpoint before calling into `this.foo`'); + } else { + InspectorTest.log('Missed breakpoint before calling into `this.foo`'); + } + await Promise.all([ + Protocol.Debugger.removeBreakpoint({breakpointId}), + Protocol.Debugger.disable(), + evalPromise, + Protocol.Runtime.disable(), + ]); + }, + + async function testAwaitInAsyncFunctionMinified() { + await Promise.all([ + Protocol.Debugger.enable(), + Protocol.Runtime.enable(), + ]); + InspectorTest.log( + 'Setting breakpoint on `await this.foo()` in `obj.minified`'); + const {result: {breakpointId}} = + await Protocol.Debugger.setBreakpointByUrl({ + url, + lineNumber: 22, + columnNumber: 19, + }); + InspectorTest.log('Calling `obj.minified()`'); + const pausedPromise = Protocol.Debugger.oncePaused(); + const evalPromise = Protocol.Runtime.evaluate({ + expression: 'obj.minified()', + awaitPromise: true, + }); + const {params: {hitBreakpoints}} = await pausedPromise; + if (hitBreakpoints?.length === 1 && hitBreakpoints[0] === breakpointId) { + InspectorTest.log('Hit breakpoint before calling into `this.foo`'); + } else { + InspectorTest.log('Missed breakpoint before calling into `this.foo`'); + } + await Promise.all([ + Protocol.Debugger.removeBreakpoint({breakpointId}), + Protocol.Debugger.disable(), + evalPromise, + Protocol.Runtime.disable(), + ]); + }, +]); diff --git a/deps/v8/test/inspector/debugger/line-breakpoint-yield-expected.txt b/deps/v8/test/inspector/debugger/line-breakpoint-yield-expected.txt new file mode 100644 index 00000000000000..948d381e5f767d --- /dev/null +++ b/deps/v8/test/inspector/debugger/line-breakpoint-yield-expected.txt @@ -0,0 +1,26 @@ +Line breakpoints for yield + +Running test: testYieldInGeneratorWithLeadingWhitespace +Setting breakpoint on `yield this.foo()` in `obj.barGenerator` +Calling `obj.barGenerator().next()` +Hit breakpoint before calling into `this.foo` + +Running test: testYieldInGeneratorWithoutLeadingWhitespace +Setting breakpoint on `await this.foo()` in `obj.bazGenerator` +Calling `obj.bazGenerator().next()` +Hit breakpoint before calling into `this.foo` + +Running test: testYieldInAsyncGeneratorWithLeadingWhitespace +Setting breakpoint on `yield this.foo()` in `obj.barAsyncGenerator` +Calling `obj.barAsyncGenerator().next()` +Hit breakpoint before calling into `this.foo` + +Running test: testYieldInAsyncGeneratorWithoutLeadingWhitespace +Setting breakpoint on `yield this.foo()` in `obj.bazAsyncGenerator` +Calling `obj.bazAsyncGenerator().next()` +Hit breakpoint before calling into `this.foo` + +Running test: testYieldInMinifiedGenerator +Setting breakpoint on `yield this.foo()` in `obj.minifiedGenerator` +Calling `obj.minifiedGenerator().next()` +Hit breakpoint before calling into `this.foo` diff --git a/deps/v8/test/inspector/debugger/line-breakpoint-yield.js b/deps/v8/test/inspector/debugger/line-breakpoint-yield.js new file mode 100644 index 00000000000000..d9cb36075da778 --- /dev/null +++ b/deps/v8/test/inspector/debugger/line-breakpoint-yield.js @@ -0,0 +1,200 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +let {session, contextGroup, Protocol} = + InspectorTest.start('Line breakpoints for yield'); + +// clang-format off +const url = 'line-breakpoint-yield.js'; +contextGroup.addScript(` +var obj = { + foo() { + debugger; + }, + + *barGenerator() { + yield this.foo(); + }, + + *bazGenerator() { +yield this.foo(); + }, + + async* barAsyncGenerator() { + yield this.foo(); + }, + + async* bazAsyncGenerator() { +yield this.foo(); + }, + + *minifiedGenerator(){yield this.foo();} +}; +`, 0, 0, url); +// clang-format on + +session.setupScriptMap(); +InspectorTest.runAsyncTestSuite([ + async function testYieldInGeneratorWithLeadingWhitespace() { + await Promise.all([ + Protocol.Debugger.enable(), + Protocol.Runtime.enable(), + ]); + InspectorTest.log( + 'Setting breakpoint on `yield this.foo()` in `obj.barGenerator`'); + const {result: {breakpointId}} = + await Protocol.Debugger.setBreakpointByUrl({ + url, + lineNumber: 7, + columnNumber: 0, + }); + InspectorTest.log('Calling `obj.barGenerator().next()`'); + const pausedPromise = Protocol.Debugger.oncePaused(); + const evalPromise = Protocol.Runtime.evaluate({ + expression: 'obj.barGenerator().next()', + }); + const {params: {hitBreakpoints}} = await pausedPromise; + if (hitBreakpoints?.length === 1 && hitBreakpoints[0] === breakpointId) { + InspectorTest.log('Hit breakpoint before calling into `this.foo`'); + } else { + InspectorTest.log('Missed breakpoint before calling into `this.foo`'); + } + await Promise.all([ + Protocol.Debugger.removeBreakpoint({breakpointId}), + Protocol.Debugger.disable(), + evalPromise, + Protocol.Runtime.disable(), + ]); + }, + + async function testYieldInGeneratorWithoutLeadingWhitespace() { + await Promise.all([ + Protocol.Debugger.enable(), + Protocol.Runtime.enable(), + ]); + InspectorTest.log( + 'Setting breakpoint on `await this.foo()` in `obj.bazGenerator`'); + const {result: {breakpointId}} = + await Protocol.Debugger.setBreakpointByUrl({ + url, + lineNumber: 11, + columnNumber: 0, + }); + InspectorTest.log('Calling `obj.bazGenerator().next()`'); + const pausedPromise = Protocol.Debugger.oncePaused(); + const evalPromise = Protocol.Runtime.evaluate({ + expression: 'obj.bazGenerator().next()', + }); + const {params: {hitBreakpoints}} = await pausedPromise; + if (hitBreakpoints?.length === 1 && hitBreakpoints[0] === breakpointId) { + InspectorTest.log('Hit breakpoint before calling into `this.foo`'); + } else { + InspectorTest.log('Missed breakpoint before calling into `this.foo`'); + } + await Promise.all([ + Protocol.Debugger.removeBreakpoint({breakpointId}), + Protocol.Debugger.disable(), + evalPromise, + Protocol.Runtime.disable(), + ]); + }, + + async function testYieldInAsyncGeneratorWithLeadingWhitespace() { + await Promise.all([ + Protocol.Debugger.enable(), + Protocol.Runtime.enable(), + ]); + InspectorTest.log( + 'Setting breakpoint on `yield this.foo()` in `obj.barAsyncGenerator`'); + const {result: {breakpointId}} = + await Protocol.Debugger.setBreakpointByUrl({ + url, + lineNumber: 15, + columnNumber: 0, + }); + InspectorTest.log('Calling `obj.barAsyncGenerator().next()`'); + const pausedPromise = Protocol.Debugger.oncePaused(); + const evalPromise = Protocol.Runtime.evaluate({ + expression: 'obj.barAsyncGenerator().next()', + awaitPromise: true, + }); + const {params: {hitBreakpoints}} = await pausedPromise; + if (hitBreakpoints?.length === 1 && hitBreakpoints[0] === breakpointId) { + InspectorTest.log('Hit breakpoint before calling into `this.foo`'); + } else { + InspectorTest.log('Missed breakpoint before calling into `this.foo`'); + } + await Promise.all([ + Protocol.Debugger.removeBreakpoint({breakpointId}), + Protocol.Debugger.disable(), + evalPromise, + Protocol.Runtime.disable(), + ]); + }, + + async function testYieldInAsyncGeneratorWithoutLeadingWhitespace() { + await Promise.all([ + Protocol.Debugger.enable(), + Protocol.Runtime.enable(), + ]); + InspectorTest.log( + 'Setting breakpoint on `yield this.foo()` in `obj.bazAsyncGenerator`'); + const {result: {breakpointId}} = + await Protocol.Debugger.setBreakpointByUrl({ + url, + lineNumber: 19, + columnNumber: 0, + }); + InspectorTest.log('Calling `obj.bazAsyncGenerator().next()`'); + const pausedPromise = Protocol.Debugger.oncePaused(); + const evalPromise = Protocol.Runtime.evaluate({ + expression: 'obj.bazAsyncGenerator().next()', + awaitPromise: true, + }); + const {params: {hitBreakpoints}} = await pausedPromise; + if (hitBreakpoints?.length === 1 && hitBreakpoints[0] === breakpointId) { + InspectorTest.log('Hit breakpoint before calling into `this.foo`'); + } else { + InspectorTest.log('Missed breakpoint before calling into `this.foo`'); + } + await Promise.all([ + Protocol.Debugger.removeBreakpoint({breakpointId}), + Protocol.Debugger.disable(), + evalPromise, + Protocol.Runtime.disable(), + ]); + }, + + async function testYieldInMinifiedGenerator() { + await Promise.all([ + Protocol.Debugger.enable(), + Protocol.Runtime.enable(), + ]); + InspectorTest.log( + 'Setting breakpoint on `yield this.foo()` in `obj.minifiedGenerator`'); + const {result: {breakpointId}} = + await Protocol.Debugger.setBreakpointByUrl({ + url, + lineNumber: 22, + columnNumber: 23, + }); + InspectorTest.log('Calling `obj.minifiedGenerator().next()`'); + const pausedPromise = Protocol.Debugger.oncePaused(); + const evalPromise = Protocol.Runtime.evaluate({ + expression: 'obj.minifiedGenerator().next()', + }); + const {params: {hitBreakpoints}} = await pausedPromise; + if (hitBreakpoints?.length === 1 && hitBreakpoints[0] === breakpointId) { + InspectorTest.log('Hit breakpoint before calling into `this.foo`'); + } else { + InspectorTest.log('Missed breakpoint before calling into `this.foo`'); + } + await Promise.all([ + Protocol.Debugger.removeBreakpoint({breakpointId}), + Protocol.Debugger.disable(), + evalPromise, + Protocol.Runtime.disable(), + ]); + }, +]); diff --git a/deps/v8/test/inspector/debugger/object-destructuring-expected.txt b/deps/v8/test/inspector/debugger/object-destructuring-expected.txt new file mode 100644 index 00000000000000..353df73c674955 --- /dev/null +++ b/deps/v8/test/inspector/debugger/object-destructuring-expected.txt @@ -0,0 +1,124 @@ +Tests breakable locations in object destructuring. + +Running test: testBreakLocations + +function testFunction() { + var {a, b, c = 4} = |_|obj1; + |_|({d: {a, b}, c} = obj2); + |_|({a, ...b} = obj1); +|R|} + +var obj1 = |_|{ + get a() { |_|return 1;|R| }, + get b() { |_|return 2;|R| }, +}; + +var obj2 = |_|{ + get c() { |_|return 3;|R| }, + get d() { |_|return obj1;|R| }, +}; +|R| + + +Running test: testStepping +Execution paused in testFunction: +function testFunction() { + var {a, b, c = 4} = #obj1; + ({d: {a, b}, c} = obj2); + +Execution paused in get a: +var obj1 = { + get a() { #return 1; }, + get b() { return 2; }, + +Called from testFunction: +function testFunction() { + var {#a, b, c = 4} = obj1; + ({d: {a, b}, c} = obj2); + +Execution paused in get b: + get a() { return 1; }, + get b() { #return 2; }, +}; + +Called from testFunction: +function testFunction() { + var {a, #b, c = 4} = obj1; + ({d: {a, b}, c} = obj2); + +Execution paused in testFunction: + var {a, b, c = 4} = obj1; + #({d: {a, b}, c} = obj2); + ({a, ...b} = obj1); + +Execution paused in get d: + get c() { return 3; }, + get d() { #return obj1; }, +}; + +Called from testFunction: + var {a, b, c = 4} = obj1; + ({d: #{a, b}, c} = obj2); + ({a, ...b} = obj1); + +Execution paused in get a: +var obj1 = { + get a() { #return 1; }, + get b() { return 2; }, + +Called from testFunction: + var {a, b, c = 4} = obj1; + ({d: {#a, b}, c} = obj2); + ({a, ...b} = obj1); + +Execution paused in get b: + get a() { return 1; }, + get b() { #return 2; }, +}; + +Called from testFunction: + var {a, b, c = 4} = obj1; + ({d: {a, #b}, c} = obj2); + ({a, ...b} = obj1); + +Execution paused in get c: +var obj2 = { + get c() { #return 3; }, + get d() { return obj1; }, + +Called from testFunction: + var {a, b, c = 4} = obj1; + ({d: {a, b}, #c} = obj2); + ({a, ...b} = obj1); + +Execution paused in testFunction: + ({d: {a, b}, c} = obj2); + #({a, ...b} = obj1); +} + +Execution paused in get a: +var obj1 = { + get a() { #return 1; }, + get b() { return 2; }, + +Called from testFunction: + ({d: {a, b}, c} = obj2); + ({#a, ...b} = obj1); +} + +Execution paused in get b: + get a() { return 1; }, + get b() { #return 2; }, +}; + +Called from testFunction: + ({d: {a, b}, c} = obj2); + ({a, ...#b} = obj1); +} + +Execution paused in testFunction: + ({a, ...b} = obj1); +#} + + +Resuming and finishing... diff --git a/deps/v8/test/inspector/debugger/object-destructuring.js b/deps/v8/test/inspector/debugger/object-destructuring.js new file mode 100644 index 00000000000000..5703ab9712e5bd --- /dev/null +++ b/deps/v8/test/inspector/debugger/object-destructuring.js @@ -0,0 +1,94 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +let {session, contextGroup, Protocol} = + InspectorTest.start('Tests breakable locations in object destructuring.'); + +const source = ` +function testFunction() { + var {a, b, c = 4} = obj1; + ({d: {a, b}, c} = obj2); + ({a, ...b} = obj1); +} + +var obj1 = { + get a() { return 1; }, + get b() { return 2; }, +}; + +var obj2 = { + get c() { return 3; }, + get d() { return obj1; }, +}; +`; + +const url = 'test.js'; +contextGroup.addScript(source, 0, 0, url); +session.setupScriptMap(); + +InspectorTest.runAsyncTestSuite([ + async function testBreakLocations() { + let [, , {params: {scriptId}}] = await Promise.all([ + Protocol.Runtime.enable(), + Protocol.Debugger.enable(), + Protocol.Debugger.onceScriptParsed(), + ]); + let {result: {locations}} = await Protocol.Debugger.getPossibleBreakpoints( + {start: {lineNumber: 0, columnNumber: 0, scriptId}}); + await session.logBreakLocations(locations); + await Promise.all([ + Protocol.Debugger.disable(), + Protocol.Runtime.disable(), + ]); + }, + + async function testStepping() { + let [, , {params: {scriptId}}] = await Promise.all([ + Protocol.Runtime.enable(), + Protocol.Debugger.enable(), + Protocol.Debugger.onceScriptParsed(), + ]); + const {breakpointId} = await Protocol.Debugger.setBreakpoint({ + location: { + scriptId, + lineNumber: 2, + } + }); + const evalPromise = + Protocol.Runtime.evaluate({expression: 'testFunction()'}); + for (;;) { + const {method, params} = await Promise.race([ + evalPromise, + Protocol.Debugger.oncePaused(), + ]); + if (method !== 'Debugger.paused') { + break; + } + const callFrames = params.callFrames.filter( + callFrame => callFrame.location.scriptId === scriptId); + if (callFrames.length === 0) { + InspectorTest.log('Resuming and finishing...'); + await Protocol.Debugger.resume(); + } else { + const [{functionName, location}, ...callerFrames] = callFrames; + InspectorTest.log(`Execution paused in ${functionName}:`); + await session.logSourceLocation(location); + for (const {location, functionName} of callerFrames) { + InspectorTest.log(`Called from ${functionName}:`); + await session.logSourceLocation(location); + } + if (functionName === 'testFunction') { + await Protocol.Debugger.stepInto(); + } else { + await Protocol.Debugger.stepOut(); + } + } + } + await Promise.all([ + Protocol.Debugger.removeBreakpoint({breakpointId}), + Protocol.Debugger.disable(), + Protocol.Runtime.disable(), + ]); + } +]); diff --git a/deps/v8/test/inspector/debugger/object-preview-internal-properties-expected.txt b/deps/v8/test/inspector/debugger/object-preview-internal-properties-expected.txt index 56f992237a2ebd..1bdef8231b2523 100644 --- a/deps/v8/test/inspector/debugger/object-preview-internal-properties-expected.txt +++ b/deps/v8/test/inspector/debugger/object-preview-internal-properties-expected.txt @@ -159,6 +159,88 @@ expression: new WeakSet([{}]) ] +Running test: symbolsAsKeysInEntries +expression: new Map([[Symbol('key1'), 1]]) +{ + name : size + type : number + value : 1 +} +[[Entries]]: +[ + [0] : { + key : { + description : Symbol(key1) + overflow : false + properties : [ + ] + type : symbol + } + value : { + description : 1 + overflow : false + properties : [ + ] + type : number + } + } +] + +expression: new Set([Symbol('key2')]) +{ + name : size + type : number + value : 1 +} +[[Entries]]: +[ + [0] : { + value : { + description : Symbol(key2) + overflow : false + properties : [ + ] + type : symbol + } + } +] + +expression: new WeakMap([[Symbol('key3'), 2]]) +[[Entries]]: +[ + [0] : { + key : { + description : Symbol(key3) + overflow : false + properties : [ + ] + type : symbol + } + value : { + description : 2 + overflow : false + properties : [ + ] + type : number + } + } +] + +expression: new WeakSet([Symbol('key4')]) +[[Entries]]: +[ + [0] : { + value : { + description : Symbol(key4) + overflow : false + properties : [ + ] + type : symbol + } + } +] + + Running test: iteratorObject expression: (new Map([[1,2]])).entries() [[Entries]]: diff --git a/deps/v8/test/inspector/debugger/object-preview-internal-properties.js b/deps/v8/test/inspector/debugger/object-preview-internal-properties.js index f542683aa49159..d2b3a7816339e9 100644 --- a/deps/v8/test/inspector/debugger/object-preview-internal-properties.js +++ b/deps/v8/test/inspector/debugger/object-preview-internal-properties.js @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // -// Flags: --harmony-class-fields +// Flags: --harmony-symbol-as-weakmap-key let {session, contextGroup, Protocol} = InspectorTest.start("Check internal properties reported in object preview."); @@ -45,6 +45,15 @@ InspectorTest.runTestSuite([ .then(next); }, + function symbolsAsKeysInEntries(next) + { + checkExpression("new Map([[Symbol('key1'), 1]])") + .then(() => checkExpression("new Set([Symbol('key2')])")) + .then(() => checkExpression("new WeakMap([[Symbol('key3'), 2]])")) + .then(() => checkExpression("new WeakSet([Symbol('key4')])")) + .then(next); + }, + function iteratorObject(next) { checkExpression("(new Map([[1,2]])).entries()") diff --git a/deps/v8/test/inspector/debugger/set-script-source-debug-evaluate-closure-expected.txt b/deps/v8/test/inspector/debugger/set-script-source-debug-evaluate-closure-expected.txt new file mode 100644 index 00000000000000..c230516c66221f --- /dev/null +++ b/deps/v8/test/inspector/debugger/set-script-source-debug-evaluate-closure-expected.txt @@ -0,0 +1,16 @@ +Check that setScriptSource doesn't affect debug-evaluate block listing +{ + className : ReferenceError + description : ReferenceError: a is not defined at globalThis.foo (eval at i (:1:1), <anonymous>:1:27) at <anonymous>:1:12 + objectId : <objectId> + subtype : error + type : object +} +Debugger.setScriptSource: Ok +{ + className : ReferenceError + description : ReferenceError: a is not defined at globalThis.foo (eval at i (:1:1), <anonymous>:1:27) at <anonymous>:1:12 + objectId : <objectId> + subtype : error + type : object +} diff --git a/deps/v8/test/inspector/debugger/set-script-source-debug-evaluate-closure.js b/deps/v8/test/inspector/debugger/set-script-source-debug-evaluate-closure.js new file mode 100644 index 00000000000000..59b5fe1719f34d --- /dev/null +++ b/deps/v8/test/inspector/debugger/set-script-source-debug-evaluate-closure.js @@ -0,0 +1,64 @@ +// Copyright 2020 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +const {session, contextGroup, Protocol} = + InspectorTest.start('Check that setScriptSource doesn\'t affect debug-evaluate block listing'); + +const script = ` +// INSERT NEWLINES HERE +function f() { + let a = 3; () => a; // context allocated. + return function g() { + let a = 42; // stack-allocated. Shadowing context-allocated from f. + return function h() { + // Give h a context. + let x = 5; () => x; + return function i() { + debugger; + }; + }; + }; +} +(((f())())())(); +`; + +const updatedScript = script.replace('// INSERT NEWLINES HERE', '\n\n\n'); + +(async function test() { + await Protocol.Debugger.enable(); + await Protocol.Runtime.enable(); + const promise = Protocol.Debugger.oncePaused(); + + contextGroup.addScript(script); + + const { params: { callFrames: [{ callFrameId, functionLocation: { scriptId } }] } } = await promise; + + // Create a closure that returns `a` and stash it on the global. + await Protocol.Debugger.evaluateOnCallFrame({ + callFrameId, + expression: `globalThis['foo'] = () => a;` + }); + await Protocol.Debugger.resume(); + + // Expect a ReferenceError. + const { result: { result: result1 } } = await Protocol.Runtime.evaluate({ + expression: 'globalThis.foo();' + }); + InspectorTest.logMessage(result1); + + // Move function 'h' but don't change it. + const { result: { status } } = await Protocol.Debugger.setScriptSource({ + scriptId, + scriptSource: updatedScript, + }); + InspectorTest.log(`Debugger.setScriptSource: ${status}`); + + // Still expect a ReferenceError. + const { result: { result: result2 } } = await Protocol.Runtime.evaluate({ + expression: 'globalThis.foo();' + }); + InspectorTest.logMessage(result2); + + InspectorTest.completeTest(); +})(); diff --git a/deps/v8/test/inspector/debugger/step-into-expected.txt b/deps/v8/test/inspector/debugger/step-into-expected.txt index 351c0f4437a195..a2e274f9a51d7e 100644 --- a/deps/v8/test/inspector/debugger/step-into-expected.txt +++ b/deps/v8/test/inspector/debugger/step-into-expected.txt @@ -682,11 +682,6 @@ function testGenerator() { var gen = #idMaker(); return42(); -break at: - -function* idMaker#() { - yield 1; - break at: var gen = idMaker(); #return42(); diff --git a/deps/v8/test/inspector/debugger/stepping-async-generator-expected.txt b/deps/v8/test/inspector/debugger/stepping-async-generator-expected.txt index f46d0fd2f90a8b..031e1e61f6791d 100644 --- a/deps/v8/test/inspector/debugger/stepping-async-generator-expected.txt +++ b/deps/v8/test/inspector/debugger/stepping-async-generator-expected.txt @@ -1,18 +1,5 @@ Async generator stepping -Running test: testStepOverFromInitialYield -Setting breakpoint on implicit initial yield -Calling callGenerator() - -async function* generator#() { - var a = 42; - -Stepping over while paused on the initial yield -function callGenerator() { - return generator();# -} - - Running test: testStepIntoInitialYield Setting breakpoint on call to generator() Calling callGenerator() @@ -21,11 +8,6 @@ function callGenerator() { } Stepping into the generator() - -async function* generator#() { - var a = 42; - -Stepping into while paused on the initial yield function callGenerator() { return generator();# } diff --git a/deps/v8/test/inspector/debugger/stepping-async-generator.js b/deps/v8/test/inspector/debugger/stepping-async-generator.js index 37d3bbd577aaf4..55d370d950ea67 100644 --- a/deps/v8/test/inspector/debugger/stepping-async-generator.js +++ b/deps/v8/test/inspector/debugger/stepping-async-generator.js @@ -18,36 +18,6 @@ function callGenerator() { session.setupScriptMap(); InspectorTest.runAsyncTestSuite([ - async function testStepOverFromInitialYield() { - await Promise.all([Protocol.Debugger.enable(), Protocol.Runtime.enable()]); - InspectorTest.log(`Setting breakpoint on implicit initial yield`); - const {result: {breakpointId}} = await Protocol.Debugger.setBreakpointByUrl({ - url, - lineNumber: 1, - columnNumber: 0, - }) - InspectorTest.log(`Calling callGenerator()`); - const pausedPromise = Protocol.Debugger.oncePaused(); - const evalPromise = Protocol.Runtime.evaluate({expression: 'callGenerator()'}); - const {method, params} = await Promise.race([pausedPromise, evalPromise]); - if (method === 'Debugger.paused') { - await session.logSourceLocation(params.callFrames[0].location); - - InspectorTest.log('Stepping over while paused on the initial yield'); - const [{params: {callFrames:[{location}]}}] = await Promise.all([ - Protocol.Debugger.oncePaused(), - Protocol.Debugger.stepOver(), - ]); - await session.logSourceLocation(location); - - await Promise.all([Protocol.Debugger.resume(), evalPromise]); - } else { - InspectorTest.log('Did not pause'); - } - await Protocol.Debugger.removeBreakpoint({breakpointId}); - await Promise.all([Protocol.Debugger.disable(), Protocol.Runtime.disable()]); - }, - async function testStepIntoInitialYield() { await Promise.all([Protocol.Debugger.enable(), Protocol.Runtime.enable()]); InspectorTest.log(`Setting breakpoint on call to generator()`); @@ -70,13 +40,6 @@ InspectorTest.runAsyncTestSuite([ ]); await session.logSourceLocation(location); - InspectorTest.log('Stepping into while paused on the initial yield'); - ([{params: {callFrames:[{location}]}}] = await Promise.all([ - Protocol.Debugger.oncePaused(), - Protocol.Debugger.stepInto(), - ])); - await session.logSourceLocation(location); - await Promise.all([Protocol.Debugger.resume(), evalPromise]); } else { InspectorTest.log('Did not pause'); diff --git a/deps/v8/test/inspector/debugger/stepping-generator-expected.txt b/deps/v8/test/inspector/debugger/stepping-generator-expected.txt index 851668360a01b2..5fe4c51ff2ddbf 100644 --- a/deps/v8/test/inspector/debugger/stepping-generator-expected.txt +++ b/deps/v8/test/inspector/debugger/stepping-generator-expected.txt @@ -1,18 +1,5 @@ Generator stepping -Running test: testStepOverFromInitialYield -Setting breakpoint on implicit initial yield -Calling callGenerator() - -function* generator#() { - var a = 42; - -Stepping over while paused on the initial yield -function callGenerator() { - return generator();# -} - - Running test: testStepIntoInitialYield Setting breakpoint on call to generator() Calling callGenerator() @@ -21,11 +8,6 @@ function callGenerator() { } Stepping into the generator() - -function* generator#() { - var a = 42; - -Stepping into while paused on the initial yield function callGenerator() { return generator();# } diff --git a/deps/v8/test/inspector/debugger/stepping-generator-parameters-expected.txt b/deps/v8/test/inspector/debugger/stepping-generator-parameters-expected.txt index dfeed7ea2c889e..bc438215c1b2f4 100644 --- a/deps/v8/test/inspector/debugger/stepping-generator-parameters-expected.txt +++ b/deps/v8/test/inspector/debugger/stepping-generator-parameters-expected.txt @@ -1,18 +1,5 @@ Generator stepping with non-simple parameters -Running test: testStepOverFromInitialYield -Setting breakpoint on implicit initial yield -Calling callGenerator() - -function* generator(a = (x => x)(42)) #{ - yield a; - -Stepping over while paused on the initial yield -function callGenerator() { - return generator(1);# -} - - Running test: testStepIntoInitialYield Setting breakpoint on call to generator() Calling callGenerator() @@ -21,11 +8,6 @@ function callGenerator() { } Stepping into the generator() - -function* generator(a = (x => x)(42)) #{ - yield a; - -Stepping into while paused on the initial yield function callGenerator() { return generator(1);# } diff --git a/deps/v8/test/inspector/debugger/stepping-generator-parameters.js b/deps/v8/test/inspector/debugger/stepping-generator-parameters.js index 6cc49ce167940f..e79903a3aa0585 100644 --- a/deps/v8/test/inspector/debugger/stepping-generator-parameters.js +++ b/deps/v8/test/inspector/debugger/stepping-generator-parameters.js @@ -17,36 +17,6 @@ function callGenerator() { session.setupScriptMap(); InspectorTest.runAsyncTestSuite([ - async function testStepOverFromInitialYield() { - await Promise.all([Protocol.Debugger.enable(), Protocol.Runtime.enable()]); - InspectorTest.log(`Setting breakpoint on implicit initial yield`); - const {result: {breakpointId}} = await Protocol.Debugger.setBreakpointByUrl({ - url, - lineNumber: 1, - columnNumber: 38, - }) - InspectorTest.log(`Calling callGenerator()`); - const pausedPromise = Protocol.Debugger.oncePaused(); - const evalPromise = Protocol.Runtime.evaluate({expression: 'callGenerator()'}); - const {method, params} = await Promise.race([pausedPromise, evalPromise]); - if (method === 'Debugger.paused') { - await session.logSourceLocation(params.callFrames[0].location); - - InspectorTest.log('Stepping over while paused on the initial yield'); - const [{params: {callFrames:[{location}]}}] = await Promise.all([ - Protocol.Debugger.oncePaused(), - Protocol.Debugger.stepOver(), - ]); - await session.logSourceLocation(location); - - await Promise.all([Protocol.Debugger.resume(), evalPromise]); - } else { - InspectorTest.log('Did not pause'); - } - await Protocol.Debugger.removeBreakpoint({breakpointId}); - await Promise.all([Protocol.Debugger.disable(), Protocol.Runtime.disable()]); - }, - async function testStepIntoInitialYield() { await Promise.all([Protocol.Debugger.enable(), Protocol.Runtime.enable()]); InspectorTest.log(`Setting breakpoint on call to generator()`); @@ -69,13 +39,6 @@ InspectorTest.runAsyncTestSuite([ ]); await session.logSourceLocation(location); - InspectorTest.log('Stepping into while paused on the initial yield'); - ([{params: {callFrames:[{location}]}}] = await Promise.all([ - Protocol.Debugger.oncePaused(), - Protocol.Debugger.stepInto(), - ])); - await session.logSourceLocation(location); - await Promise.all([Protocol.Debugger.resume(), evalPromise]); } else { InspectorTest.log('Did not pause'); diff --git a/deps/v8/test/inspector/debugger/stepping-generator.js b/deps/v8/test/inspector/debugger/stepping-generator.js index 63134c01e25f74..b7cb9158f3442c 100644 --- a/deps/v8/test/inspector/debugger/stepping-generator.js +++ b/deps/v8/test/inspector/debugger/stepping-generator.js @@ -18,36 +18,6 @@ function callGenerator() { session.setupScriptMap(); InspectorTest.runAsyncTestSuite([ - async function testStepOverFromInitialYield() { - await Promise.all([Protocol.Debugger.enable(), Protocol.Runtime.enable()]); - InspectorTest.log(`Setting breakpoint on implicit initial yield`); - const {result: {breakpointId}} = await Protocol.Debugger.setBreakpointByUrl({ - url, - lineNumber: 1, - columnNumber: 0, - }) - InspectorTest.log(`Calling callGenerator()`); - const pausedPromise = Protocol.Debugger.oncePaused(); - const evalPromise = Protocol.Runtime.evaluate({expression: 'callGenerator()'}); - const {method, params} = await Promise.race([pausedPromise, evalPromise]); - if (method === 'Debugger.paused') { - await session.logSourceLocation(params.callFrames[0].location); - - InspectorTest.log('Stepping over while paused on the initial yield'); - const [{params: {callFrames:[{location}]}}] = await Promise.all([ - Protocol.Debugger.oncePaused(), - Protocol.Debugger.stepOver(), - ]); - await session.logSourceLocation(location); - - await Promise.all([Protocol.Debugger.resume(), evalPromise]); - } else { - InspectorTest.log('Did not pause'); - } - await Protocol.Debugger.removeBreakpoint({breakpointId}); - await Promise.all([Protocol.Debugger.disable(), Protocol.Runtime.disable()]); - }, - async function testStepIntoInitialYield() { await Promise.all([Protocol.Debugger.enable(), Protocol.Runtime.enable()]); InspectorTest.log(`Setting breakpoint on call to generator()`); @@ -70,13 +40,6 @@ InspectorTest.runAsyncTestSuite([ ]); await session.logSourceLocation(location); - InspectorTest.log('Stepping into while paused on the initial yield'); - ([{params: {callFrames:[{location}]}}] = await Promise.all([ - Protocol.Debugger.oncePaused(), - Protocol.Debugger.stepInto(), - ])); - await session.logSourceLocation(location); - await Promise.all([Protocol.Debugger.resume(), evalPromise]); } else { InspectorTest.log('Did not pause'); diff --git a/deps/v8/test/inspector/debugger/suspended-generator-scopes.js b/deps/v8/test/inspector/debugger/suspended-generator-scopes.js index 55a1fd57ca9886..4ed47965832dd4 100644 --- a/deps/v8/test/inspector/debugger/suspended-generator-scopes.js +++ b/deps/v8/test/inspector/debugger/suspended-generator-scopes.js @@ -2,6 +2,8 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. +// Flags: --no-experimental-remove-internal-scopes-property + let {session, contextGroup, Protocol} = InspectorTest.start('Tests that suspended generators produce scopes'); contextGroup.addScript(` diff --git a/deps/v8/test/inspector/debugger/wasm-gc-anyref-expected.txt b/deps/v8/test/inspector/debugger/wasm-gc-anyref-expected.txt index 2c4d3f6c429340..4b627d5372dbf6 100644 --- a/deps/v8/test/inspector/debugger/wasm-gc-anyref-expected.txt +++ b/deps/v8/test/inspector/debugger/wasm-gc-anyref-expected.txt @@ -4,18 +4,18 @@ Running test: test Calling instantiate function. Waiting for wasm script to be parsed. Got wasm script! -Setting breakpoint Module instantiated. +Tables populated. +Setting breakpoint { - columnNumber : 138 + columnNumber : 246 lineNumber : 0 scriptId : <scriptId> } -Table populated. Paused: -Script wasm://wasm/0e116a66 byte offset 138: Wasm opcode 0x01 (kExprNop) +Script wasm://wasm/739f5f0a byte offset 246: Wasm opcode 0x01 (kExprNop) Scope: -at $main (0:138): +at $main (0:246): - scope (wasm-expression-stack): stack: - scope (local): @@ -24,11 +24,15 @@ at $main (0:138): $anyref_local_i31: null (anyref) $anyref_local_null: null (anyref) - scope (module): - instance: exports: "exported_ref_table" (Table), "fill_ref_table" (Function), "main" (Function) + instance: exports: "exported_ref_table" (Table), "exported_func_table" (Table), "fill_tables" (Function), "main" (Function) module: Module - functions: "$fill_ref_table": (Function), "$main": (Function) + functions: "$my_func": (Function), "$fill_tables": (Function), "$main": (Function) + globals: "$global0": function $my_func() { [native code] } (funcref) tables: + $import.any_table: 0: Array(2) (anyref), 1: Struct ((ref $type0)), 2: undefined (anyref) + $import.func_table: 0: function () { [native code] } (funcref), 1: function $my_func() { [native code] } (funcref), 2: undefined (funcref) $exported_ref_table: 0: Struct ((ref $type0)), 1: Array ((ref $type1)), 2: undefined (anyref), 3: undefined (anyref) + $exported_func_table: 0: function external_fct() { [native code] } (funcref), 1: function $my_func() { [native code] } (funcref), 2: undefined (funcref) at (anonymous) (0:17): - scope (global): -- skipped globals diff --git a/deps/v8/test/inspector/debugger/wasm-gc-anyref.js b/deps/v8/test/inspector/debugger/wasm-gc-anyref.js index 0a1eee880cb8e2..04750908cfa743 100644 --- a/deps/v8/test/inspector/debugger/wasm-gc-anyref.js +++ b/deps/v8/test/inspector/debugger/wasm-gc-anyref.js @@ -2,7 +2,8 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -// Flags: --experimental-wasm-gc +// Flags: --experimental-wasm-gc --experimental-wasm-typed-funcref +// Flags: --experimental-wasm-type-reflection utils.load('test/inspector/wasm-inspector-test.js'); @@ -17,8 +18,9 @@ let breakpointLocation = -1; InspectorTest.runAsyncTestSuite([ async function test() { - instantiateWasm(); + let wasm_promise = instantiateWasm(); let scriptIds = await waitForWasmScripts(); + await wasm_promise; // Make sure the instantiation is finished. // Set a breakpoint. InspectorTest.log('Setting breakpoint'); @@ -66,10 +68,20 @@ async function instantiateWasm() { var builder = new WasmModuleBuilder(); let struct_type = builder.addStruct([makeField(kWasmI32, false)]); let array_type = builder.addArray(kWasmI32); + let imported_ref_table = + builder.addImportedTable('import', 'any_table', 3, 3, kWasmAnyRef); + let imported_func_table = + builder.addImportedTable('import', 'func_table', 3, 3, kWasmFuncRef); let ref_table = builder.addTable(kWasmAnyRef, 4) .exportAs('exported_ref_table'); + let func_table = builder.addTable(kWasmFuncRef, 3) + .exportAs('exported_func_table'); - builder.addFunction('fill_ref_table', kSig_v_v) + let func = builder.addFunction('my_func', kSig_v_v).addBody([kExprNop]); + // Make the function "declared". + builder.addGlobal(kWasmFuncRef, false, [kExprRefFunc, func.index]); + + builder.addFunction('fill_tables', kSig_v_v) .addBody([ ...wasmI32Const(0), ...wasmI32Const(123), kGCPrefix, kExprStructNew, struct_type, kExprTableSet, ref_table.index, @@ -83,6 +95,21 @@ async function instantiateWasm() { // apart. // ...wasmI32Const(2), ...wasmI32Const(30), // kGCPrefix, kExprI31New, kExprTableSet, ref_table.index, + + // Fill imported any table. + ...wasmI32Const(1), + ...wasmI32Const(123), kGCPrefix, kExprStructNew, struct_type, + kExprTableSet, imported_ref_table, + + // Fill imported func table. + ...wasmI32Const(1), + kExprRefFunc, func.index, + kExprTableSet, imported_func_table, + + // Fill func table. + ...wasmI32Const(1), + kExprRefFunc, func.index, + kExprTableSet, func_table.index, ]).exportFunc(); let body = [ @@ -114,11 +141,32 @@ async function instantiateWasm() { breakpointLocation = main.body_offset + body.length - 1; InspectorTest.log('Calling instantiate function.'); - await WasmInspectorTest.instantiate(module_bytes); + let imports = `{'import' : { + 'any_table': (() => { + let js_table = + new WebAssembly.Table({element: 'anyref', initial: 3, maximum: 3}); + js_table.set(0, ['JavaScript', 'value']); + return js_table; + })(), + 'func_table': (() => { + let func_table = + new WebAssembly.Table({element: 'anyfunc', initial: 3, maximum: 3}); + func_table.set(0, new WebAssembly.Function( + {parameters:['i32', 'i32'], results: ['i32']}, + function /*anonymous*/ (a, b) { return a * b; })); + return func_table; + })(), + }}`; + await WasmInspectorTest.instantiate(module_bytes, 'instance', imports); InspectorTest.log('Module instantiated.'); await WasmInspectorTest.evalWithUrl( - 'instance.exports.fill_ref_table()', 'fill_ref_table'); - InspectorTest.log('Table populated.'); + 'instance.exports.fill_tables();', 'fill_tables'); + await WasmInspectorTest.evalWithUrl( + `instance.exports.exported_func_table.set(0, new WebAssembly.Function( + {parameters:['i32', 'i32'], results: ['i32']}, + function external_fct(a, b) { return a * b; }))`, + 'add_func_to_table'); + InspectorTest.log('Tables populated.'); } async function waitForWasmScripts() { diff --git a/deps/v8/test/inspector/inspector-test.cc b/deps/v8/test/inspector/inspector-test.cc index 1784e48666b8b7..87c0066cf22784 100644 --- a/deps/v8/test/inspector/inspector-test.cc +++ b/deps/v8/test/inspector/inspector-test.cc @@ -512,6 +512,9 @@ class InspectorExtension : public InspectorIsolateData::SetupGlobalTask { inspector->Set(isolate, "callbackForTests", v8::FunctionTemplate::New( isolate, &InspectorExtension::CallbackForTests)); + inspector->Set(isolate, "runNestedMessageLoop", + v8::FunctionTemplate::New( + isolate, &InspectorExtension::RunNestedMessageLoop)); global->Set(isolate, "inspector", inspector); } @@ -788,6 +791,15 @@ class InspectorExtension : public InspectorIsolateData::SetupGlobalTask { callback->Call(context, v8::Undefined(isolate), 0, nullptr); args.GetReturnValue().Set(result.ToLocalChecked()); } + + static void RunNestedMessageLoop( + const v8::FunctionCallbackInfo<v8::Value>& args) { + v8::Isolate* isolate = args.GetIsolate(); + v8::Local<v8::Context> context = isolate->GetCurrentContext(); + InspectorIsolateData* data = InspectorIsolateData::FromContext(context); + + data->task_runner()->RunMessageLoop(true); + } }; int InspectorTestMain(int argc, char* argv[]) { diff --git a/deps/v8/test/inspector/inspector.status b/deps/v8/test/inspector/inspector.status index a00bc3f450b944..b8bc9e2600e610 100644 --- a/deps/v8/test/inspector/inspector.status +++ b/deps/v8/test/inspector/inspector.status @@ -60,16 +60,6 @@ 'runtime/get-properties': [SKIP], }], # not has_webassembly or variant == jitless -############################################################################## -['lite_mode or variant == jitless', { - # Lite mode does not allocate feedback vector. - 'type-profiler/type-profile-start-stop': [SKIP], - 'type-profiler/type-profile': [SKIP], - 'type-profiler/type-profile-with-to-string-tag': [SKIP], - 'type-profiler/type-profile-with-classes': [SKIP], - 'type-profiler/type-profile-disable': [SKIP], -}], # 'lite_mode or variant == jitless' - ############################################################################## ['variant == jitless', { # https://crbug.com/v8/7777 @@ -176,7 +166,6 @@ 'runtime/context-destroyed-on-context-collected': [SKIP], 'runtime/evaluate-async': [SKIP], 'runtime/internal-properties-entries': [SKIP], - 'type-profiler/type-profile-start-stop': [SKIP], }], # gc_stress ############################################################################## @@ -281,7 +270,8 @@ 'debugger/continue-to-location-target-call-frames': [SKIP], 'debugger/destroy-in-break-program': [SKIP], 'debugger/destroy-in-break-program2': [SKIP], - 'debugger/destructuring': [SKIP], + 'debugger/array-destructuring': [SKIP], + 'debugger/object-destructuring': [SKIP], 'debugger/disable-agent-on-pause': [SKIP], 'debugger/doesnt-step-into-injected-script': [SKIP], 'debugger/es6-module-liveedit': [SKIP], @@ -528,11 +518,6 @@ 'sessions/runtime-evaluate': [SKIP], 'sessions/runtime-evaluate-exception': [SKIP], 'sessions/runtime-remote-object': [SKIP], - 'type-profiler/type-profile': [SKIP], - 'type-profiler/type-profile-disable': [SKIP], - 'type-profiler/type-profile-start-stop': [SKIP], - 'type-profiler/type-profile-with-classes': [SKIP], - 'type-profiler/type-profile-with-to-string-tag': [SKIP], 'regress/regress-crbug-1195927': [SKIP], }], # third_party_heap @@ -553,4 +538,9 @@ 'debugger/value-unavailable-scopes': [SKIP], }], # lite_mode or variant in (nooptimization, jitless, assert_types) +############################################################################## +['single_generation', { + 'heap-profiler/sampling-heap-profiler-flags': [SKIP], +}], # single_generation + ] diff --git a/deps/v8/test/inspector/regress/regress-crbug-1246896.js b/deps/v8/test/inspector/regress/regress-crbug-1246896.js index 6c70d44362ae51..822a194ce1852a 100644 --- a/deps/v8/test/inspector/regress/regress-crbug-1246896.js +++ b/deps/v8/test/inspector/regress/regress-crbug-1246896.js @@ -2,6 +2,8 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. +// Flags: --no-experimental-remove-internal-scopes-property + const {Protocol} = InspectorTest.start('Don\'t crash when getting the properties of a native function'); (async () => { diff --git a/deps/v8/test/inspector/regress/regress-crbug-1366843-expected.txt b/deps/v8/test/inspector/regress/regress-crbug-1366843-expected.txt new file mode 100644 index 00000000000000..9e3f2ec0294db9 --- /dev/null +++ b/deps/v8/test/inspector/regress/regress-crbug-1366843-expected.txt @@ -0,0 +1 @@ +Don't crash when injected script dies during Promise.then diff --git a/deps/v8/test/inspector/regress/regress-crbug-1366843.js b/deps/v8/test/inspector/regress/regress-crbug-1366843.js new file mode 100644 index 00000000000000..0327a43e6172eb --- /dev/null +++ b/deps/v8/test/inspector/regress/regress-crbug-1366843.js @@ -0,0 +1,26 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +const { Protocol, contextGroup } = InspectorTest.start('Don\'t crash when injected script dies during Promise.then'); + +(async () => { + // Overwrite 'Promise#then' to block. + contextGroup.addScript(` + Object.prototype.__defineGetter__('then', function () { + inspector.runNestedMessageLoop(); // Doesn't return. + }); + `); + + // Trigger an evaluation that installs the inspector promise handler. Note + // that the expression is somewhat carefully crafted so we stop in the right + // micro task. + Protocol.Runtime.evaluate({ + expression: `(() => ({ foo: 42 }))()`, + awaitPromise: true, + }); + + contextGroup.reset(); + + InspectorTest.completeTest(); +})(); diff --git a/deps/v8/test/inspector/runtime/evaluate-repl-mode-broken-thenable-expected.txt b/deps/v8/test/inspector/runtime/evaluate-repl-mode-broken-thenable-expected.txt new file mode 100644 index 00000000000000..c64abf7b04d899 --- /dev/null +++ b/deps/v8/test/inspector/runtime/evaluate-repl-mode-broken-thenable-expected.txt @@ -0,0 +1,6 @@ +Tests that REPL mode still works even with a broken Promise.prototype.then +{ + description : 42 + type : number + value : 42 +} diff --git a/deps/v8/test/inspector/runtime/evaluate-repl-mode-broken-thenable.js b/deps/v8/test/inspector/runtime/evaluate-repl-mode-broken-thenable.js new file mode 100644 index 00000000000000..27393922d3e13a --- /dev/null +++ b/deps/v8/test/inspector/runtime/evaluate-repl-mode-broken-thenable.js @@ -0,0 +1,20 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +const {contextGroup, Protocol} = InspectorTest.start( + "Tests that REPL mode still works even with a broken Promise.prototype.then"); + +(async function() { + contextGroup.addScript(` + Promise.prototype.then = () => {throw Error('you shall not evaluate')}; + `); + + const { result: { result }} = await Protocol.Runtime.evaluate({ + expression: '42', + replMode: true, + }); + InspectorTest.logMessage(result); + + InspectorTest.completeTest(); +})(); diff --git a/deps/v8/test/inspector/runtime/function-scopes.js b/deps/v8/test/inspector/runtime/function-scopes.js index bda069bd9aca2a..c382ccda472c9f 100644 --- a/deps/v8/test/inspector/runtime/function-scopes.js +++ b/deps/v8/test/inspector/runtime/function-scopes.js @@ -2,6 +2,8 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. +// Flags: --no-experimental-remove-internal-scopes-property + let {session, contextGroup, Protocol} = InspectorTest.start('Checks [[Scopes]] for functions'); contextGroup.addScript(` diff --git a/deps/v8/test/inspector/runtime/internal-properties.js b/deps/v8/test/inspector/runtime/internal-properties.js index b4b0bc47fbed91..3e3ce4a197aec9 100644 --- a/deps/v8/test/inspector/runtime/internal-properties.js +++ b/deps/v8/test/inspector/runtime/internal-properties.js @@ -2,6 +2,8 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. +// Flags: --no-experimental-remove-internal-scopes-property + let {session, contextGroup, Protocol} = InspectorTest.start('Checks internal properties in Runtime.getProperties output'); contextGroup.addScript(` diff --git a/deps/v8/test/inspector/type-profiler/type-profile-disable-expected.txt b/deps/v8/test/inspector/type-profiler/type-profile-disable-expected.txt deleted file mode 100644 index b70acbac0ecfd8..00000000000000 --- a/deps/v8/test/inspector/type-profiler/type-profile-disable-expected.txt +++ /dev/null @@ -1,9 +0,0 @@ -Turn Profiler.startTypeProfile on and off. - -function g(/*Object*/a, /*Array*/b, /*null*/c) { - return 'bye'; -/*string*/}; -g({}, [], null); - -[ -] \ No newline at end of file diff --git a/deps/v8/test/inspector/type-profiler/type-profile-disable.js b/deps/v8/test/inspector/type-profiler/type-profile-disable.js deleted file mode 100644 index e378c54e58602c..00000000000000 --- a/deps/v8/test/inspector/type-profiler/type-profile-disable.js +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright 2017 the V8 project authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -const source = - ` -function g(a, b, c) { - return 'bye'; -}; -g({}, [], null); -`; - -let {session, contextGroup, Protocol} = InspectorTest.start("Turn " + - "Profiler.startTypeProfile on and off."); - -(async function testTypeProfile() { - Protocol.Runtime.enable(); - let {result: {scriptId}} = await Protocol.Runtime.compileScript({ - expression: source, - sourceURL: arguments.callee.name, persistScript: true - }); - await Protocol.Profiler.enable(); - // Start, run, take. - await Protocol.Profiler.startTypeProfile(); - Protocol.Runtime.runScript({scriptId}); - - let typeProfiles = await Protocol.Profiler.takeTypeProfile(); - session.logTypeProfile(typeProfiles.result.result[0], - source); - - // This should delete all data. - Protocol.Profiler.stopTypeProfile(); - - await Protocol.Profiler.startTypeProfile(); - typeProfiles = await Protocol.Profiler.takeTypeProfile(); - - // Should be empty because no code was run since start. - InspectorTest.logMessage(typeProfiles.result.result); - - Protocol.Profiler.stopTypeProfile(); - - Protocol.Profiler.disable(); - await Protocol.Runtime.disable(); - InspectorTest.completeTest(); -})(); diff --git a/deps/v8/test/inspector/type-profiler/type-profile-expected.txt b/deps/v8/test/inspector/type-profiler/type-profile-expected.txt deleted file mode 100644 index 04fa128e8ee0d1..00000000000000 --- a/deps/v8/test/inspector/type-profiler/type-profile-expected.txt +++ /dev/null @@ -1,9 +0,0 @@ -Test collecting type profile data with Profiler.takeTypeProfile. - -function f(/*Object, number, undefined*/a, /*Array, number, null*/b, /*boolean, Object, symbol*/c) { - return 'bye'; -/*string*/}; -f({}, [], true); -f(3, 2.3, {a: 42}); -f(undefined, null, Symbol('hello')); -/*string*/ diff --git a/deps/v8/test/inspector/type-profiler/type-profile-start-stop-expected.txt b/deps/v8/test/inspector/type-profiler/type-profile-start-stop-expected.txt deleted file mode 100644 index e04a4ec3df0681..00000000000000 --- a/deps/v8/test/inspector/type-profiler/type-profile-start-stop-expected.txt +++ /dev/null @@ -1,51 +0,0 @@ -Turn Profiler.startTypeProfile on and off. - -Running test: testTypeProfile - -function g(/*Object*/a, /*Array*/b, /*null*/c) { - return 'first'; -/*string*/}; -g({}, [], null); - - -Running test: testTypeProfileFromDifferentSource - -function f(/*null*/a) { - return 'second'; -/*string*/}; -f(null); - - -Running test: testStopTypeProfileDeletesFeedback -[ -] - -Running test: testTypeProfileWithoutStartingItFirst -Type profile has not been started. - -Running test: testTypeProfileAfterStoppingIt -Type profile has not been started. - -Running test: testStartTypeProfileAfterRunning -{ - id : <messageId> - result : { - result : [ - ] - } -} - -Running test: testTypeProfileForTwoSources - -function g(/*Object*/a, /*Array*/b, /*null*/c) { - return 'first'; -/*string*/}; -g({}, [], null); - - -function f(/*null*/a) { - return 'second'; -/*string*/}; -f(null); - -Running test: testStopTwice diff --git a/deps/v8/test/inspector/type-profiler/type-profile-start-stop.js b/deps/v8/test/inspector/type-profiler/type-profile-start-stop.js deleted file mode 100644 index 88a2ad13bf995d..00000000000000 --- a/deps/v8/test/inspector/type-profiler/type-profile-start-stop.js +++ /dev/null @@ -1,176 +0,0 @@ -// Copyright 2017 the V8 project authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -const source1 = - ` -function g(a, b, c) { - return 'first'; -}; -g({}, [], null); -`; - -const source2 = - ` -function f(a) { - return 'second'; -}; -f(null); -`; - -let {session, contextGroup, Protocol} = InspectorTest.start("Turn " + - "Profiler.startTypeProfile on and off."); - -InspectorTest.runAsyncTestSuite([ - async function testTypeProfile() { - Protocol.Runtime.enable(); - let {result: {scriptId}} = await Protocol.Runtime.compileScript({ - expression: source1, - sourceURL: arguments.callee.name, persistScript: true - }); - await Protocol.Profiler.enable(); - - // Start, run, take. - await Protocol.Profiler.startTypeProfile(); - Protocol.Runtime.runScript({scriptId}); - - let typeProfiles = await Protocol.Profiler.takeTypeProfile(); - await session.logTypeProfile(typeProfiles.result.result[0], - source1); - - Protocol.Profiler.stopTypeProfile(); - Protocol.Profiler.disable(); - await Protocol.Runtime.disable(); - }, - async function testTypeProfileFromDifferentSource() { - Protocol.Runtime.enable(); - let {result: {scriptId}} = await Protocol.Runtime.compileScript({ - expression: source2, - sourceURL: arguments.callee.name, persistScript: true - }); - await Protocol.Profiler.enable(); - - // Start, run different script, take. - await Protocol.Profiler.startTypeProfile(); - Protocol.Runtime.runScript({scriptId}); - - let typeProfiles = await Protocol.Profiler.takeTypeProfile(); - await session.logTypeProfile(typeProfiles.result.result[0], - source2); - - Protocol.Profiler.stopTypeProfile(); - Protocol.Profiler.disable(); - await Protocol.Runtime.disable(); - }, - async function testStopTypeProfileDeletesFeedback() { - Protocol.Runtime.enable(); - let {result: {scriptId}} = await Protocol.Runtime.compileScript({ - expression: source1, - sourceURL: arguments.callee.name, persistScript: true - }); - await Protocol.Profiler.enable(); - - // Start, run, stop. - await Protocol.Profiler.startTypeProfile(); - Protocol.Runtime.runScript({scriptId}); - await Protocol.Profiler.stopTypeProfile(); - - // Start, take. Should be empty, because no code was run. - await Protocol.Profiler.startTypeProfile(); - let typeProfiles = await Protocol.Profiler.takeTypeProfile(); - InspectorTest.logMessage(typeProfiles.result.result); - await Protocol.Profiler.stopTypeProfile(); - - Protocol.Profiler.disable(); - await Protocol.Runtime.disable(); - }, - async function testTypeProfileWithoutStartingItFirst() { - Protocol.Runtime.enable(); - let {result: {scriptId}} = await Protocol.Runtime.compileScript({ expression: source1, - sourceURL: arguments.callee.name, persistScript: true }); - Protocol.Runtime.runScript({ scriptId }); - await Protocol.Profiler.enable(); - - // This should return an error because type profile was never started. - let typeProfiles = await Protocol.Profiler.takeTypeProfile(); - InspectorTest.logObject(typeProfiles.error.message); - - Protocol.Profiler.disable(); - await Protocol.Runtime.disable(); - }, - async function testTypeProfileAfterStoppingIt() { - Protocol.Runtime.enable(); - let {result: {scriptId}} = await Protocol.Runtime.compileScript({ expression: source1, - sourceURL: arguments.callee.name, persistScript: true }); - Protocol.Runtime.runScript({ scriptId }); - await Protocol.Profiler.enable(); - await Protocol.Profiler.startTypeProfile(); - - // Make sure that this turns off type profile. - await Protocol.Profiler.stopTypeProfile(); - - // This should return an error because type profile was stopped. - let typeProfiles = await Protocol.Profiler.takeTypeProfile(); - InspectorTest.logObject(typeProfiles.error.message); - - Protocol.Profiler.disable(); - await Protocol.Runtime.disable(); - }, - async function testStartTypeProfileAfterRunning() { - Protocol.Runtime.enable(); - let {result: {scriptId}} = await Protocol.Runtime.compileScript({ - expression: source1, - sourceURL: arguments.callee.name, persistScript: true - }); - Protocol.Runtime.runScript({scriptId}); - - await Protocol.Profiler.enable(); - await Protocol.Profiler.startTypeProfile(); - - let typeProfiles = await Protocol.Profiler.takeTypeProfile(); - - // This should be empty because type profile was started after compilation. - // Only the outer script is annotated with return value "string" because - // that does not depend on runScript(). - InspectorTest.logMessage(typeProfiles); - - Protocol.Profiler.stopTypeProfile(); - Protocol.Profiler.disable(); - await Protocol.Runtime.disable(); - }, - async function testTypeProfileForTwoSources() { - Protocol.Runtime.enable(); - let {result: {scriptId: scriptId1}} = await Protocol.Runtime.compileScript({ - expression: source1, - sourceURL: arguments.callee.name, persistScript: true - }); - let {result: {scriptId: scriptId2}} = await Protocol.Runtime.compileScript({ - expression: source2, - sourceURL: arguments.callee.name, persistScript: true - }); - await Protocol.Profiler.enable(); - - // Start, run different script, take. - await Protocol.Profiler.startTypeProfile(); - Protocol.Runtime.runScript({scriptId: scriptId1}); - Protocol.Runtime.runScript({scriptId: scriptId2}); - - let typeProfiles = await Protocol.Profiler.takeTypeProfile(); - await session.logTypeProfile(typeProfiles.result.result[0], - source1); - await session.logTypeProfile(typeProfiles.result.result[1], - source2); - - Protocol.Profiler.stopTypeProfile(); - Protocol.Profiler.disable(); - await Protocol.Runtime.disable(); - }, - async function testStopTwice() { - Protocol.Runtime.enable(); - await Protocol.Profiler.enable(); - await Protocol.Profiler.stopTypeProfile(); - await Protocol.Profiler.stopTypeProfile(); - Protocol.Profiler.disable(); - await Protocol.Runtime.disable(); - }, -]); diff --git a/deps/v8/test/inspector/type-profiler/type-profile-with-classes-expected.txt b/deps/v8/test/inspector/type-profiler/type-profile-with-classes-expected.txt deleted file mode 100644 index 8c540617f1df69..00000000000000 --- a/deps/v8/test/inspector/type-profiler/type-profile-with-classes-expected.txt +++ /dev/null @@ -1,16 +0,0 @@ -Test collecting type profile data with Profiler.takeTypeProfile. - -function f(/*number*/n) { -/*undefined*/}; -f(5); -function g(/*Object, number*/a, /*Array, number*/b, /*Flower, Object*/c) { - return 'bye'; -/*string*/}; -/*undefined*/class Tree {}; -/*Flower*/class Flower extends Tree{}; -var f = new Flower(); -f.constructor = {}; -f.constructor.name = "Not a flower."; -g({}, [], f); -g(3, 2.3, {a: 42}); -/*string*/ diff --git a/deps/v8/test/inspector/type-profiler/type-profile-with-classes.js b/deps/v8/test/inspector/type-profiler/type-profile-with-classes.js deleted file mode 100644 index 0e75b7db09ed3d..00000000000000 --- a/deps/v8/test/inspector/type-profiler/type-profile-with-classes.js +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright 2017 the V8 project authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -const source = - ` -function f(n) { -}; -f(5); -function g(a, b, c) { - return 'bye'; -}; -class Tree {}; -class Flower extends Tree{}; -var f = new Flower(); -f.constructor = {}; -f.constructor.name = "Not a flower."; -g({}, [], f); -g(3, 2.3, {a: 42}); -`; - -let {session, contextGroup, Protocol} = InspectorTest.start("Test collecting type profile data with Profiler.takeTypeProfile."); - -(async function testTypeProfile(next) { - await Protocol.Profiler.enable(); - await Protocol.Profiler.startTypeProfile(); - - Protocol.Runtime.enable(); - let {result: {scriptId}} = await Protocol.Runtime.compileScript({ expression: source, - sourceURL: arguments.callee.name, persistScript: true }); - - Protocol.Runtime.runScript({ scriptId }); - - let typeProfiles = await Protocol.Profiler.takeTypeProfile(); - await session.logTypeProfile(typeProfiles.result.result[0], - source); - - Protocol.Profiler.disable(); - await Protocol.Runtime.disable(); - InspectorTest.completeTest(); -})(); diff --git a/deps/v8/test/inspector/type-profiler/type-profile-with-to-string-tag-expected.txt b/deps/v8/test/inspector/type-profiler/type-profile-with-to-string-tag-expected.txt deleted file mode 100644 index 16999557266e31..00000000000000 --- a/deps/v8/test/inspector/type-profiler/type-profile-with-to-string-tag-expected.txt +++ /dev/null @@ -1,17 +0,0 @@ -Test collecting type profile data with Profiler.takeTypeProfile. - -function g(/*Object, number*/a, /*Array, number*/b, /*Dog, Object*/c) { - return 'bye'; -/*string*/}; -/*undefined*/class Tree {}; -/*Flower*/class Flower extends Tree{}; -var f = new Flower(); -// We store the type when a variable is used. If a toStringTag is -// changes the type, we want to collect that changed feedback. -// This tests ensures that we collect that information rather than -// for example infer the types from the internal map, which wouldn't -// know about a toStringTag. -f[Symbol.toStringTag] = 'Dog'; -g({}, [], f); -g(3, 2.3, {a: 42}); -/*string*/ diff --git a/deps/v8/test/inspector/type-profiler/type-profile-with-to-string-tag.js b/deps/v8/test/inspector/type-profiler/type-profile-with-to-string-tag.js deleted file mode 100644 index d6d5b6d53819b2..00000000000000 --- a/deps/v8/test/inspector/type-profiler/type-profile-with-to-string-tag.js +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright 2017 the V8 project authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -const source = - ` -function g(a, b, c) { - return 'bye'; -}; -class Tree {}; -class Flower extends Tree{}; -var f = new Flower(); -// We store the type when a variable is used. If a toStringTag is -// changes the type, we want to collect that changed feedback. -// This tests ensures that we collect that information rather than -// for example infer the types from the internal map, which wouldn't -// know about a toStringTag. -f[Symbol.toStringTag] = 'Dog'; -g({}, [], f); -g(3, 2.3, {a: 42}); -`; - -let {session, contextGroup, Protocol} = InspectorTest.start("Test collecting " + - "type profile data with Profiler.takeTypeProfile."); - -(async function testTypeProfile() { - await Protocol.Profiler.enable(); - await Protocol.Profiler.startTypeProfile(); - - Protocol.Runtime.enable(); - let {result: {scriptId}} = await Protocol.Runtime.compileScript({ expression: source, - sourceURL: arguments.callee.name, persistScript: true }); - Protocol.Runtime.runScript({ scriptId }); - await Protocol.Profiler.startTypeProfile(); - - let typeProfiles = await Protocol.Profiler.takeTypeProfile(); - await session.logTypeProfile(typeProfiles.result.result[0], - source); - - Protocol.Profiler.stopTypeProfile(); - Protocol.Profiler.disable(); - await Protocol.Runtime.disable(); - InspectorTest.completeTest(); -})(); diff --git a/deps/v8/test/inspector/type-profiler/type-profile.js b/deps/v8/test/inspector/type-profiler/type-profile.js deleted file mode 100644 index c8ae080b1b40a0..00000000000000 --- a/deps/v8/test/inspector/type-profiler/type-profile.js +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright 2017 the V8 project authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -const source = - ` -function f(a, b, c) { - return 'bye'; -}; -f({}, [], true); -f(3, 2.3, {a: 42}); -f(undefined, null, Symbol('hello')); -`; - -let {session, contextGroup, Protocol} = InspectorTest.start("Test collecting type profile data with Profiler.takeTypeProfile."); - -(async function testTypeProfile() { - await Protocol.Profiler.enable(); - await Protocol.Profiler.startTypeProfile(); - - Protocol.Runtime.enable(); - let {result: {scriptId}} = await Protocol.Runtime.compileScript({ - expression: source, - sourceURL: arguments.callee.name, - persistScript: true - }); - Protocol.Runtime.runScript({ scriptId }); - - let typeProfiles = await Protocol.Profiler.takeTypeProfile(); - await session.logTypeProfile(typeProfiles.result.result[0], - source); - - Protocol.Profiler.stopTypeProfile(); - Protocol.Profiler.disable(); - await Protocol.Runtime.disable(); - InspectorTest.completeTest(); -})(); diff --git a/deps/v8/test/intl/number-format/use-grouping-v3.js b/deps/v8/test/intl/number-format/use-grouping-v3.js index 83b0062031c9f5..0b1f7686d5ac66 100644 --- a/deps/v8/test/intl/number-format/use-grouping-v3.js +++ b/deps/v8/test/intl/number-format/use-grouping-v3.js @@ -11,21 +11,32 @@ let validUseGrouping = [ false, ]; +let fallbackUseGrouping = [ + "true", + "false", +]; + let invalidUseGrouping = [ "min-2", - "true", ]; + validUseGrouping.forEach(function(useGrouping) { let nf = new Intl.NumberFormat(undefined, {useGrouping}); assertEquals(useGrouping, nf.resolvedOptions().useGrouping); }); -invalidUseGrouping.forEach(function(useGrouping) { +fallbackUseGrouping.forEach(function(useGrouping) { let nf = new Intl.NumberFormat(undefined, {useGrouping}); assertEquals("auto", nf.resolvedOptions().useGrouping); }); +invalidUseGrouping.forEach(function(useGrouping) { + assertThrows( + () => new Intl.NumberFormat(undefined, {useGrouping}), + RangeError); +}); + // useGrouping: undefined get "auto" assertEquals("auto", (new Intl.NumberFormat()).resolvedOptions().useGrouping); diff --git a/deps/v8/test/js-perf-test/BigInt/add-no-opt.js b/deps/v8/test/js-perf-test/BigInt/add-no-opt.js new file mode 100644 index 00000000000000..2c0fddda45d0a1 --- /dev/null +++ b/deps/v8/test/js-perf-test/BigInt/add-no-opt.js @@ -0,0 +1,75 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +"use strict"; + +d8.file.execute('bigint-util.js'); + +let random_bigints = []; + +// This dummy ensures that the feedback for benchmark.run() in the Measure +// function from base.js is not monomorphic, thereby preventing the benchmarks +// below from being inlined. This ensures consistent behavior and comparable +// results. +new BenchmarkSuite('Prevent-Inline-Dummy', [10000], [ + new Benchmark('Prevent-Inline-Dummy', true, false, 0, () => {}) +]); + + +new BenchmarkSuite(`Add-Small`, [1000], [ + new Benchmark(`Add-Small`, true, false, 0, TestAdd, + () => SetUpRandomBigInts(32)) +]); + + +new BenchmarkSuite(`Add-Large`, [1000], [ + new Benchmark(`Add-Large`, true, false, 0, TestAdd, + () => SetUpRandomBigInts(8192)) +]); + + +new BenchmarkSuite(`Add-LargerThanSmall`, [1000], [ + new Benchmark(`Add-LargerThanSmall`, true, false, 0, TestAdd, + () => SetUpRandomBigInts(68)) +]); + + +new BenchmarkSuite(`Add-Random`, [1000], [ + new Benchmark(`Add-Random`, true, false, 0, TestAdd, + SetUpTestAddRandom) +]); + + +function SetUpRandomBigInts(bits) { + random_bigints = []; + // RandomBigIntWithBits needs multiples of 4 bits. + bits = Math.floor(bits / 4) * 4; + for (let i = 0; i < TEST_ITERATIONS; ++i) { + const bigint = RandomBigIntWithBits(bits); + random_bigints.push(Math.random() < 0.5 ? -bigint : bigint); + } +} + + +function SetUpTestAddRandom() { + random_bigints = []; + // RandomBigIntWithBits needs multiples of 4 bits. + const max_in_4bits = RANDOM_BIGINTS_MAX_BITS / 4; + for (let i = 0; i < TEST_ITERATIONS; ++i) { + const bits = Math.floor(Math.random() * max_in_4bits) * 4; + const bigint = RandomBigIntWithBits(bits); + random_bigints.push(Math.random() < 0.5 ? -bigint : bigint); + } +} + + +function TestAdd() { + let sum = 0n; + + for (let i = 0; i < TEST_ITERATIONS - 1; ++i) { + sum += random_bigints[i] + random_bigints[i + 1]; + } + + return sum; +} diff --git a/deps/v8/test/js-perf-test/BigInt/bitwise-and.js b/deps/v8/test/js-perf-test/BigInt/bitwise-and.js index aa5fb400cdd33c..d945b6e32bc2f1 100644 --- a/deps/v8/test/js-perf-test/BigInt/bitwise-and.js +++ b/deps/v8/test/js-perf-test/BigInt/bitwise-and.js @@ -54,7 +54,7 @@ function TestBitwiseAndZero() { function SetUpTestBitwiseAndSmall() { random_bigints = []; for (let i = 0; i < TEST_ITERATIONS; ++i) { - const bigint = RandomBigIntWithBits(64); + const bigint = RandomBigIntWithBits(60); random_bigints.push(Math.random() < 0.5 ? -bigint : bigint); } } diff --git a/deps/v8/test/js-perf-test/BigInt/divide.js b/deps/v8/test/js-perf-test/BigInt/divide.js index 2fcf18b9de862a..aaa89a6a4de71b 100644 --- a/deps/v8/test/js-perf-test/BigInt/divide.js +++ b/deps/v8/test/js-perf-test/BigInt/divide.js @@ -6,7 +6,7 @@ d8.file.execute('bigint-util.js'); -let random_dividends = [] +let random_dividends = []; let random_divisors = []; // This dummy ensures that the feedback for benchmark.run() in the Measure @@ -30,7 +30,8 @@ new BenchmarkSuite('Divide-Small', [1000], [ new BenchmarkSuite('Divide-Small-Truncated', [1000], [ - new Benchmark('Divide-Small-Truncated', true, false, 0, TestDivideSmallTruncated) + new Benchmark('Divide-Small-Truncated', true, false, 0, + TestDivideSmallTruncated, SetUpTestDivideSmall) ]); @@ -54,7 +55,7 @@ function TestDivideOne() { function SetUpTestDivideSmall() { random_dividends = []; for (let i = 0; i < TEST_ITERATIONS; ++i) { - const bigint = RandomBigIntWithBits(64); + const bigint = RandomBigIntWithBits(60); random_dividends.push(Math.random() < 0.5 ? -bigint : bigint); } diff --git a/deps/v8/test/js-perf-test/JSTests1.json b/deps/v8/test/js-perf-test/JSTests1.json index f35a6e5c6af108..e9d90ba4676e29 100644 --- a/deps/v8/test/js-perf-test/JSTests1.json +++ b/deps/v8/test/js-perf-test/JSTests1.json @@ -128,6 +128,20 @@ { "name": "Add-Random" } ] }, + { + "name": "Add-No-Opt", + "main": "run.js", + "flags": ["--allow-natives-syntax", "--no-turbofan"], + "resources": ["add-no-opt.js", "bigint-util.js"], + "test_flags": ["add-no-opt"], + "results_regexp": "^BigInt\\-%s\\(Score\\): (.+)$", + "tests": [ + { "name": "Add-Small" }, + { "name": "Add-Large" }, + { "name": "Add-LargerThanSmall" }, + { "name": "Add-Random" } + ] + }, { "name": "Subtract", "main": "run.js", diff --git a/deps/v8/test/message/fail/weak-refs-register1.out b/deps/v8/test/message/fail/weak-refs-register1.out index aa4cbc2fa22e1e..7e7bf12791be2d 100644 --- a/deps/v8/test/message/fail/weak-refs-register1.out +++ b/deps/v8/test/message/fail/weak-refs-register1.out @@ -1,6 +1,6 @@ -*%(basename)s:*: TypeError: FinalizationRegistry.prototype.register: target must be an object +*%(basename)s:*: TypeError: FinalizationRegistry.prototype.register: invalid target fg.register(1); ^ -TypeError: FinalizationRegistry.prototype.register: target must be an object +TypeError: FinalizationRegistry.prototype.register: invalid target at FinalizationRegistry.register (<anonymous>) at *%(basename)s:*:4 diff --git a/deps/v8/test/message/fail/weak-refs-unregister.out b/deps/v8/test/message/fail/weak-refs-unregister.out index 52945869838778..a6a66ea709d747 100644 --- a/deps/v8/test/message/fail/weak-refs-unregister.out +++ b/deps/v8/test/message/fail/weak-refs-unregister.out @@ -1,6 +1,6 @@ -*%(basename)s:*: TypeError: unregisterToken ('1') must be an object +*%(basename)s:*: TypeError: Invalid unregisterToken ('1') fg.unregister(1); ^ -TypeError: unregisterToken ('1') must be an object +TypeError: Invalid unregisterToken ('1') at FinalizationRegistry.unregister (<anonymous>) at *%(basename)s:*:4 diff --git a/deps/v8/test/mjsunit/collect-type-profile-undefined-feedback-vector.js b/deps/v8/test/mjsunit/collect-type-profile-undefined-feedback-vector.js deleted file mode 100644 index f78f241c74ed0f..00000000000000 --- a/deps/v8/test/mjsunit/collect-type-profile-undefined-feedback-vector.js +++ /dev/null @@ -1,11 +0,0 @@ -// Copyright 2020 the V8 project authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -// Flags: --allow-natives-syntax - -function f(a, b, c) { - return 'bye'; -}; - -%CollectTypeProfile(0, f, undefined); diff --git a/deps/v8/test/mjsunit/compiler/bigint-multiply-truncate.js b/deps/v8/test/mjsunit/compiler/bigint-multiply-truncate.js index 6f25571591db0c..0d100e399b60b1 100644 --- a/deps/v8/test/mjsunit/compiler/bigint-multiply-truncate.js +++ b/deps/v8/test/mjsunit/compiler/bigint-multiply-truncate.js @@ -10,8 +10,6 @@ function TestMultiplyAndTruncate(a, b) { function OptimizeAndTest(fn) { let bi = 2n ** (2n ** 29n); - // Before optimization, a BigIntTooBig exception is expected - assertThrows(() => fn(bi + 3n, bi + 4n), RangeError); if (%Is64Bit()) { %PrepareFunctionForOptimization(fn); assertEquals(-4n, fn(3n, 4n)); diff --git a/deps/v8/test/mjsunit/compiler/bigint-rematerialize.js b/deps/v8/test/mjsunit/compiler/bigint-rematerialize.js new file mode 100644 index 00000000000000..08b7b080a35f12 --- /dev/null +++ b/deps/v8/test/mjsunit/compiler/bigint-rematerialize.js @@ -0,0 +1,156 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax --turbofan --no-always-turbofan + +(function OptimizeAndTestAsUintN() { + function f(x) { + // Will be lowered to Int64Constant(-1) and stored as an immediate. + let y = BigInt.asUintN(64, -1n); + try { + return x + y; + } catch(_) { + return y; + } + } + + %PrepareFunctionForOptimization(f); + assertEquals(2n ** 64n, f(1n)); + assertEquals(2n ** 64n + 1n, f(2n)); + %OptimizeFunctionOnNextCall(f); + assertEquals(2n ** 64n, f(1n)); + assertOptimized(f); + // Should be rematerialized to 2n ** 64n - 1n in code generation. + assertEquals(2n ** 64n - 1n, f(0)); + if (%Is64Bit()) { + assertUnoptimized(f); + } +})(); + +(function OptimizeAndTestAsUintN() { + function f(x) { + // Will be lowered to Int64Sub because exponentiation is not truncated and + // stored in a register. + let y = BigInt.asUintN(64, -(2n ** 0n)); + try { + return x + y; + } catch(_) { + return y; + } + } + + %PrepareFunctionForOptimization(f); + assertEquals(2n ** 64n, f(1n)); + assertEquals(2n ** 64n + 1n, f(2n)); + %OptimizeFunctionOnNextCall(f); + assertEquals(2n ** 64n, f(1n)); + assertOptimized(f); + // Should be rematerialized to 2n ** 64n - 1n in deoptimization. + assertEquals(2n ** 64n - 1n, f(0)); + if (%Is64Bit()) { + assertUnoptimized(f); + } +})(); + +(function OptimizeAndTestAsUintN() { + function f(x) { + // Will be lowered to Int64Sub because exponentiation is not truncated and + // stored in a stack slot. + let y = BigInt.asUintN(64, -(2n ** 0n)); + try { + // The recursion is used to make sure `y` is stored on the stack. + return (x < 3n) ? (x + y) : f(x - 1n); + } catch(_) { + return y; + } + } + + %PrepareFunctionForOptimization(f); + assertEquals(2n ** 64n, f(1n)); + assertEquals(2n ** 64n + 1n, f(2n)); + %OptimizeFunctionOnNextCall(f); + assertEquals(2n ** 64n, f(1n)); + assertOptimized(f); + // Should be rematerialized to 2n ** 64n - 1n in deoptimization. + assertEquals(2n ** 64n - 1n, f(0)); + if (%Is64Bit()) { + assertUnoptimized(f); + } +})(); + +(function OptimizeAndTestAsIntN() { + function f(x) { + // Will be lowered to Int64Constant(-1) and stored as an immediate. + let y = BigInt.asIntN(64, -1n); + try { + return x + y; + } catch (_) { + return y; + } + } + + %PrepareFunctionForOptimization(f); + assertEquals(0n, f(1n)); + assertEquals(1n, f(2n)); + %OptimizeFunctionOnNextCall(f); + assertEquals(0n, f(1n)); + assertOptimized(f); + // Should be rematerialized to -1n in code generation. + assertEquals(-1n, f(0)); + if (%Is64Bit()) { + assertUnoptimized(f); + } +})(); + +(function OptimizeAndTestAsIntN() { + function f(x) { + // Will be lowered to Int64Sub because exponentiation is not truncated and + // stored in a register. + let y = BigInt.asIntN(64, -(2n ** 0n)); + try { + return x + y; + } catch(_) { + return y; + } + } + + %PrepareFunctionForOptimization(f); + assertEquals(0n, f(1n)); + assertEquals(1n, f(2n)); + %OptimizeFunctionOnNextCall(f); + assertEquals(0n, f(1n)); + assertOptimized(f); + // Should be rematerialized to -1n in deoptimization. + assertEquals(-1n, f(0)); + if (%Is64Bit()) { + assertUnoptimized(f); + } +})(); + +(function OptimizeAndTestAsIntN() { + + function f(x) { + // Will be lowered to Int64Sub because exponentiation is not truncated and + // stored in a stack slot. + let y = BigInt.asIntN(64, -(2n ** 0n)); + try { + // The recursion is used to make sure `y` is stored on the stack. + return (x < 3n) ? (x + y) : f(x - 1n); + } catch(_) { + return y; + } + } + + %PrepareFunctionForOptimization(f); + assertEquals(0n, f(1n)); + assertEquals(1n, f(2n)); + %OptimizeFunctionOnNextCall(f); + assertEquals(0n, f(1n)); + assertOptimized(f); + // Should be rematerialized to -1n in deoptimization. + assertEquals(-1n, f(0)); + if (%Is64Bit()) { + assertUnoptimized(f); + } +})(); diff --git a/deps/v8/test/mjsunit/compiler/bigint-unused-still-throws.js b/deps/v8/test/mjsunit/compiler/bigint-unused-still-throws.js new file mode 100644 index 00000000000000..774f5ab3d508c7 --- /dev/null +++ b/deps/v8/test/mjsunit/compiler/bigint-unused-still-throws.js @@ -0,0 +1,73 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax --turbofan + +function WarmupAndOptimize(f) { + %PrepareFunctionForOptimization(f); + f(1n, 1n); + %OptimizeFunctionOnNextCall(f); + f(1n, 1n); + assertOptimized(f); +} +%NeverOptimizeFunction(WarmupAndOptimize); + +function TestBinary(f) { + WarmupAndOptimize(f); + assertThrows(() => { f(1, 1n); }, TypeError); + // Recompile in case the above deopts. + WarmupAndOptimize(f); + assertThrows(() => { f(1n, 1); }, TypeError); +} +%NeverOptimizeFunction(TestBinary); + +function Add(a, b) { + let [c] = [1n]; + let temp = 0n; + temp = a + c; + temp = c + b; + temp = 42n; + result = temp; +} +TestBinary(Add); + +function Subtract(a, b) { + let [c] = [1n]; + let temp = 0n; + temp = a - c; + temp = c - b; + temp = 42n; + result = temp; +} +TestBinary(Subtract); + +function Multiply(a, b) { + let [c] = [1n]; + let temp = 0n; + temp = a * c; + temp = c * b; + temp = 42n; + result = temp; +} +TestBinary(Multiply); + +function Divide(a, b) { + let [c] = [1n]; + let temp = 0n; + temp = a / c; + temp = c / b; + temp = 42n; + result = temp; +} +TestBinary(Divide); + +function BitwiseAnd(a, b) { + let [c] = [1n]; + let temp = 0n; + temp = a & c; + temp = c & b; + temp = 42n; + result = temp; +} +TestBinary(BitwiseAnd); diff --git a/deps/v8/test/mjsunit/compiler/bigint64-add-no-deopt-loop.js b/deps/v8/test/mjsunit/compiler/bigint64-add-no-deopt-loop.js new file mode 100644 index 00000000000000..53f1f106c92495 --- /dev/null +++ b/deps/v8/test/mjsunit/compiler/bigint64-add-no-deopt-loop.js @@ -0,0 +1,61 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax --turbofan --no-always-turbofan + +(function OptimizeAndTest() { + function f(x, y) { + return x + y; + } + %PrepareFunctionForOptimization(f); + assertEquals(1n, f(0n, 1n)); + assertEquals(5n, f(2n, 3n)); + %OptimizeFunctionOnNextCall(f); + assertEquals(9n, f(4n, 5n)); + assertOptimized(f); + // CheckBigInt64 should trigger deopt. + assertEquals(-(2n ** 63n), f(-(2n ** 63n), 0n)); + if (%Is64Bit()) { + assertUnoptimized(f); + + %PrepareFunctionForOptimization(f); + assertEquals(1n, f(0n, 1n)); + assertEquals(5n, f(2n, 3n)); + %OptimizeFunctionOnNextCall(f); + assertEquals(9n, f(4n, 5n)); + assertOptimized(f); + // Ensure there is no deopt loop. + assertEquals(-(2n ** 63n), f(-(2n ** 63n), 0n)); + assertOptimized(f); + } +})(); + +(function OptimizeAndTestOverflow() { + function f(x, y) { + return x + y; + } + %PrepareFunctionForOptimization(f); + assertEquals(1n, f(0n, 1n)); + assertEquals(5n, f(2n, 3n)); + %OptimizeFunctionOnNextCall(f); + assertEquals(9n, f(4n, 5n)); + assertOptimized(f); + assertEquals(-(2n ** 63n), f(-(2n ** 62n), -(2n ** 62n))); + assertOptimized(f); + // CheckedBigInt64Add will trigger deopt due to overflow. + assertEquals(-(2n ** 63n) - 1n, f(-(2n ** 62n + 1n), -(2n ** 62n))); + if (%Is64Bit()) { + assertUnoptimized(f); + + %PrepareFunctionForOptimization(f); + assertEquals(1n, f(0n, 1n)); + assertEquals(5n, f(2n, 3n)); + %OptimizeFunctionOnNextCall(f); + assertEquals(9n, f(4n, 5n)); + assertOptimized(f); + // Ensure there is no deopt loop. + assertEquals(-(2n ** 63n) - 1n, f(-(2n ** 62n + 1n), -(2n ** 62n))); + assertOptimized(f); + } +})(); diff --git a/deps/v8/test/mjsunit/compiler/fast-api-annotations.js b/deps/v8/test/mjsunit/compiler/fast-api-annotations.js index cb95c9420e9392..3fcfb83835f1c7 100644 --- a/deps/v8/test/mjsunit/compiler/fast-api-annotations.js +++ b/deps/v8/test/mjsunit/compiler/fast-api-annotations.js @@ -40,10 +40,16 @@ assertEquals(limits_result, add_all_annotate_enforce_range(limits_params)); %OptimizeFunctionOnNextCall(add_all_annotate_enforce_range); assertEquals(limits_result, add_all_annotate_enforce_range(limits_params)); +const min_int32 = -(2 ** 31); +const max_int32 = 2 ** 31 - 1; +const min_uint32 = 0; +const max_uint32 = 2 ** 32 - 1; + // ----------- enforce_range_compare ----------- // `enforce_range_compare` has the following signature: -// double enforce_range_compare(bool /*should_fallback*/, -// double, int64_t) +// bool enforce_range_compare(bool /*in_range*/, +// double, integer_type) +// where integer_type = {int32_t, uint32_t, int64_t, uint64_t} // ----------- i32 ----------- function compare_i32(in_range, arg) { @@ -125,3 +131,105 @@ assertThrows(() => compare_u64(false, -1)); assertThrows(() => compare_u64(false, -1.5)); assertThrows(() => compare_u64(false, Number.MIN_SAFE_INTEGER)); assertThrows(() => compare_u64(false, 2 ** 64 + 3.15)); + +// ----------- clamp_compare ----------- +// `clamp_compare` has the following signature: +// void clamp_compare(bool /*in_range*/, +// double, integer_type) +// where integer_type = {int32_t, uint32_t, int64_t, uint64_t} + +// ----------- i32 ----------- +function is_in_range_i32(in_range, arg, expected) { + let result = fast_c_api.clamp_compare_i32(in_range, arg, arg); + + assertEquals(expected, result); +} + +%PrepareFunctionForOptimization(is_in_range_i32); +is_in_range_i32(true, 123, 123); +%OptimizeFunctionOnNextCall(is_in_range_i32); +is_in_range_i32(true, 123, 123); +is_in_range_i32(true, -0.5, 0); +is_in_range_i32(true, 0.5, 0); +is_in_range_i32(true, 1.5, 2); +is_in_range_i32(true, min_int32, min_int32); +is_in_range_i32(true, max_int32, max_int32); +// Slow path doesn't perform clamping. +if (isOptimized(is_in_range_i32)) { + is_in_range_i32(false, -(2 ** 32), min_int32); + is_in_range_i32(false, -(2 ** 32 + 1), min_int32); + is_in_range_i32(false, 2 ** 32, max_int32); + is_in_range_i32(false, 2 ** 32 + 3.15, max_int32); + is_in_range_i32(false, Number.MIN_SAFE_INTEGER, min_int32); + is_in_range_i32(false, Number.MAX_SAFE_INTEGER, max_int32); +} + +// ----------- u32 ----------- +function is_in_range_u32(in_range, arg, expected) { + let result = fast_c_api.clamp_compare_u32(in_range, arg, arg); + + assertEquals(expected, result); +} + +%PrepareFunctionForOptimization(is_in_range_u32); +is_in_range_u32(true, 123, 123); +%OptimizeFunctionOnNextCall(is_in_range_u32); +is_in_range_u32(true, 123, 123); +is_in_range_u32(true, 0, 0); +is_in_range_u32(true, -0.5, 0); +is_in_range_u32(true, 0.5, 0); +is_in_range_u32(true, 2 ** 32 - 1, max_uint32); +is_in_range_u32(false, -(2 ** 31), min_uint32); +is_in_range_u32(false, 2 ** 32, max_uint32); +is_in_range_u32(false, -1, min_uint32); +is_in_range_u32(false, -1.5, min_uint32); +is_in_range_u32(false, Number.MIN_SAFE_INTEGER, min_uint32); +is_in_range_u32(false, Number.MAX_SAFE_INTEGER, max_uint32); + +// ----------- i64 ----------- +function is_in_range_i64(in_range, arg, expected) { + let result = fast_c_api.clamp_compare_i64(in_range, arg, arg); + assertEquals(expected, result); +} + +%PrepareFunctionForOptimization(is_in_range_i64); +is_in_range_i64(true, 123, 123); +%OptimizeFunctionOnNextCall(is_in_range_i64); +is_in_range_i64(true, 123, 123); +is_in_range_i64(true, -0.5, 0); +is_in_range_i64(true, 0.5, 0); +is_in_range_i64(true, 1.5, 2); +is_in_range_i64(true, Number.MIN_SAFE_INTEGER, Number.MIN_SAFE_INTEGER); +is_in_range_i64(true, Number.MAX_SAFE_INTEGER, Number.MAX_SAFE_INTEGER); +is_in_range_i64(false, -(2 ** 63), Number.MIN_SAFE_INTEGER); +is_in_range_i64(false, 2 ** 63 - 1024, Number.MAX_SAFE_INTEGER); +is_in_range_i64(false, 2 ** 63, Number.MAX_SAFE_INTEGER); +is_in_range_i64(false, -(2 ** 64), Number.MIN_SAFE_INTEGER); +is_in_range_i64(false, -(2 ** 64 + 1), Number.MIN_SAFE_INTEGER); +is_in_range_i64(false, 2 ** 64, Number.MAX_SAFE_INTEGER); +is_in_range_i64(false, 2 ** 64 + 3.15, Number.MAX_SAFE_INTEGER); + +// ----------- u64 ----------- +function is_in_range_u64(in_range, arg, expected) { + let result = fast_c_api.clamp_compare_u64(in_range, arg, arg); + assertEquals(expected, result); +} + +%PrepareFunctionForOptimization(is_in_range_u64); +is_in_range_u64(true, 123, 123); +%OptimizeFunctionOnNextCall(is_in_range_u64); +is_in_range_u64(true, 123, 123); +is_in_range_u64(true, 0, 0); +is_in_range_u64(true, -0.5, 0); +is_in_range_u64(true, 0.5, 0); +is_in_range_u64(true, 2 ** 32 - 1, 2 ** 32 - 1); +is_in_range_u64(true, Number.MAX_SAFE_INTEGER, Number.MAX_SAFE_INTEGER); +is_in_range_u64(false, Number.MIN_SAFE_INTEGER, 0); +is_in_range_u64(false, -1, 0); +is_in_range_u64(false, -1.5, 0); +is_in_range_u64(false, 2 ** 64, Number.MAX_SAFE_INTEGER); +is_in_range_u64(false, 2 ** 64 + 3.15, Number.MAX_SAFE_INTEGER); + +// ---------- invalid arguments for clamp_compare --------- +fast_c_api.clamp_compare_i32(true); +fast_c_api.clamp_compare_i32(true, 753801, -2147483650); diff --git a/deps/v8/test/mjsunit/compiler/get-iterator-deopt.js b/deps/v8/test/mjsunit/compiler/get-iterator-deopt.js deleted file mode 100644 index 4d7c08136346ad..00000000000000 --- a/deps/v8/test/mjsunit/compiler/get-iterator-deopt.js +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright 2022 the V8 project authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -// Flags: --allow-natives-syntax - -function throwsRepeated(fn, ErrorType) { - // Collect type feedback. - %PrepareFunctionForOptimization(fn); - for (let i = 0; i < 5; i++) assertThrows(fn, ErrorType); - // Force compilation and run. - %OptimizeFunctionOnNextCall(fn); - assertThrows(fn, ErrorType); - // If the function isn't optimized / turbofan tier not available, - // a deopt happened on the call above. - assertEquals(%IsTurbofanEnabled(), %ActiveTierIsTurbofan(fn)); -} - -function repeated(fn) { - // Collect type feedback. - %PrepareFunctionForOptimization(fn); - for (let i = 0; i < 5; i++) fn(); - // Force compilation and run. - %OptimizeFunctionOnNextCall(fn); - fn(); - // If the function isn't optimized / turbofan tier not available, - // a deopt happened on the call above. - assertEquals(%IsTurbofanEnabled(), %ActiveTierIsTurbofan(fn)); -} - -repeated(() => { for (let p of "abc") { } }); -repeated(() => { for (let p of [1, 2, 3]) { } }); -throwsRepeated(() => { for (let p of {a: 1, b: 2}) { } }, TypeError); -let objWithIterator = { [Symbol.iterator]: function* () { yield 1; } }; -repeated(() => { for (let p of objWithIterator) { } }); -throwsRepeated(() => { for (let p of 5) { } }, TypeError); -throwsRepeated(() => { for (let p of new Number(5)) { } }, TypeError); -throwsRepeated(() => { for (let p of true) { } }, TypeError); -throwsRepeated(() => { for (let p of new BigInt(123)) { } }, TypeError); -throwsRepeated(() => { for (let p of new Symbol("symbol")) { } }, TypeError); -throwsRepeated(function testUndef() { for (let p of undefined) { } }, TypeError); -throwsRepeated(() => { for (let p of null) { } }, TypeError); diff --git a/deps/v8/test/mjsunit/compiler/misc-ensure-no-deopt.js b/deps/v8/test/mjsunit/compiler/misc-ensure-no-deopt.js new file mode 100644 index 00000000000000..5cc13d383f314b --- /dev/null +++ b/deps/v8/test/mjsunit/compiler/misc-ensure-no-deopt.js @@ -0,0 +1,61 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax + +function throwsRepeated(fn, ErrorType, required_compilation_count) { + for (let j = 0; j < (required_compilation_count ?? 1); j++) { + // Collect type feedback. + %PrepareFunctionForOptimization(fn); + for (let i = 0; i < 5; i++) assertThrows(fn, ErrorType); + // Force compilation and run. + %OptimizeFunctionOnNextCall(fn); + assertThrows(fn, ErrorType); + } + // If the function isn't optimized / turbofan tier not available, + // a deopt happened on the call above. + assertEquals(%IsTurbofanEnabled(), %ActiveTierIsTurbofan(fn)); +} + +function repeated(fn) { + // Collect type feedback. + %PrepareFunctionForOptimization(fn); + for (let i = 0; i < 2; i++) fn(); + // Force compilation and run. + %OptimizeFunctionOnNextCall(fn); + fn(); + // If the function isn't optimized / turbofan tier not available, + // a deopt happened on the call above. + assertEquals(%IsTurbofanEnabled(), %ActiveTierIsTurbofan(fn)); +} + +repeated(() => { for (let p of "abc") { } }); +repeated(() => { for (let p of [1, 2, 3]) { } }); +throwsRepeated(() => { for (let p of {a: 1, b: 2}) { } }, TypeError); +let objWithIterator = { [Symbol.iterator]: function* () { yield 1; } }; +repeated(() => { for (let p of objWithIterator) { } }); +throwsRepeated(() => { for (let p of 5) { } }, TypeError); +throwsRepeated(() => { for (let p of new Number(5)) { } }, TypeError); +throwsRepeated(() => { for (let p of true) { } }, TypeError); +throwsRepeated(() => { for (let p of new BigInt(123)) { } }, TypeError); +throwsRepeated(() => { for (let p of Symbol("symbol")) { } }, TypeError); +throwsRepeated(() => { for (let p of undefined) { } }, TypeError); +throwsRepeated(() => { for (let p of null) { } }, TypeError); + +throwsRepeated(() => (undefined).val = undefined, TypeError); +throwsRepeated(() => (undefined)["test"] = undefined, TypeError); +throwsRepeated(() => (undefined)[Symbol("test")] = undefined, TypeError); +throwsRepeated(() => (undefined)[null] = undefined, TypeError); +throwsRepeated(() => (undefined)[undefined] = undefined, TypeError); +throwsRepeated(() => (undefined)[0] = undefined, TypeError); +throwsRepeated(() => (undefined)[NaN] = undefined, TypeError); +throwsRepeated(() => (null)[0] = undefined, TypeError); +// BigInt.asIntN() deopts once but provides a better suitable compile result +// on the second compilation which doesn't deopt any more. +let compiles = 2; +throwsRepeated(() => BigInt.asIntN(2, 2), TypeError, compiles); +throwsRepeated(() => BigInt.asIntN(2, () => {}), SyntaxError, compiles); +throwsRepeated(() => BigInt.asIntN(2, {some: Object}), SyntaxError, compiles); +throwsRepeated(() => BigInt.asIntN(2, Symbol("test")), TypeError, compiles); +throwsRepeated(() => BigInt.asIntN(2, null), TypeError, compiles); diff --git a/deps/v8/test/mjsunit/compiler/omit-default-ctors-array-iterator.js b/deps/v8/test/mjsunit/compiler/omit-default-ctors-array-iterator.js new file mode 100644 index 00000000000000..0c52b0e45e4c16 --- /dev/null +++ b/deps/v8/test/mjsunit/compiler/omit-default-ctors-array-iterator.js @@ -0,0 +1,33 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --omit-default-ctors --allow-natives-syntax --turbofan +// Flags: --no-always-turbofan + +// This behavior is not spec compliant, see crbug.com/v8/13249. +(function ArrayIteratorMonkeyPatched() { + let iterationCount = 0; + const oldIterator = Array.prototype[Symbol.iterator]; + Array.prototype[Symbol.iterator] = + function () { ++iterationCount; return oldIterator.call(this); }; + + class A {} + class B extends A {} + class C extends B {} + + %PrepareFunctionForOptimization(C); + new C(); + %OptimizeFunctionOnNextCall(C); + + // C default ctor doing "...args" and B default ctor doing "...args". + assertEquals(2, iterationCount); + + new C(); + + // C default ctor doing "...args" and B default ctor doing "...args". + assertEquals(4, iterationCount); + assertTrue(isTurboFanned(C)); // No deopt. + + Array.prototype[Symbol.iterator] = oldIterator; +})(); diff --git a/deps/v8/test/mjsunit/compiler/omit-default-ctors.js b/deps/v8/test/mjsunit/compiler/omit-default-ctors.js new file mode 100644 index 00000000000000..2f1c5c18c0ccd1 --- /dev/null +++ b/deps/v8/test/mjsunit/compiler/omit-default-ctors.js @@ -0,0 +1,781 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --omit-default-ctors --allow-natives-syntax --turbofan +// Flags: --no-always-turbofan + +(function OmitDefaultBaseCtor() { + class A {}; // default base ctor -> will be omitted + class B extends A {}; + %PrepareFunctionForOptimization(B); + new B(); + %OptimizeFunctionOnNextCall(B); + const o = new B(); + assertSame(B.prototype, o.__proto__); + assertTrue(isTurboFanned(B)); // No deopt. +})(); + +(function OmitDefaultDerivedCtor() { + class A { constructor() {} }; + class B extends A {}; // default derived ctor -> will be omitted + class C extends B {}; + %PrepareFunctionForOptimization(C); + new C(); + %OptimizeFunctionOnNextCall(C); + const o = new C(); + assertSame(C.prototype, o.__proto__); + assertTrue(isTurboFanned(C)); // No deopt. +})(); + +(function OmitDefaultBaseAndDerivedCtor() { + class A {}; // default base ctor -> will be omitted + class B extends A {}; // default derived ctor -> will be omitted + class C extends B {}; + %PrepareFunctionForOptimization(C); + new C(); + %OptimizeFunctionOnNextCall(C); + const o = new C(); + assertSame(C.prototype, o.__proto__); + assertTrue(isTurboFanned(C)); // No deopt. +})(); + +(function OmitDefaultBaseCtorWithExplicitSuper() { + class A {}; // default base ctor -> will be omitted + class B extends A { constructor() { super(); } }; + %PrepareFunctionForOptimization(B); + new B(); + %OptimizeFunctionOnNextCall(B); + const o = new B(); + assertSame(B.prototype, o.__proto__); + assertTrue(isTurboFanned(B)); // No deopt. +})(); + +(function OmitDefaultDerivedCtorWithExplicitSuper() { + class A { constructor() {} }; + class B extends A {}; // default derived ctor -> will be omitted + class C extends B { constructor() { super(); } }; + %PrepareFunctionForOptimization(C); + new C(); + %OptimizeFunctionOnNextCall(C); + const o = new C(); + assertSame(C.prototype, o.__proto__); + assertTrue(isTurboFanned(C)); // No deopt. +})(); + +(function OmitDefaultBaseAndDerivedCtorWithExplicitSuper() { + class A {}; // default base ctor -> will be omitted + class B extends A {}; // default derived ctor -> will be omitted + class C extends B { constructor() { super(); } }; + %PrepareFunctionForOptimization(C); + new C(); + %OptimizeFunctionOnNextCall(C); + const o = new C(); + assertSame(C.prototype, o.__proto__); + assertTrue(isTurboFanned(C)); // No deopt. +})(); + +(function OmitDefaultBaseCtorWithExplicitSuperAndNonFinalSpread() { + class A {}; // default base ctor -> will be omitted + class B extends A { constructor(...args) { super(1, ...args, 2); } }; + %PrepareFunctionForOptimization(B); + new B(); + %OptimizeFunctionOnNextCall(B); + const o = new B(3, 4); + assertSame(B.prototype, o.__proto__); + // See https://bugs.chromium.org/p/v8/issues/detail?id=13310 + // assertTrue(isTurboFanned(B)); // No deopt. + // This assert will fail when the above bug is fixed: + assertFalse(isTurboFanned(B)); +})(); + +(function OmitDefaultDerivedCtorWithExplicitSuperAndNonFinalSpread() { + class A { constructor() {} }; + class B extends A {}; // default derived ctor -> will be omitted + class C extends B { constructor(...args) { super(1, ...args, 2); } }; + %PrepareFunctionForOptimization(C); + new C(); + %OptimizeFunctionOnNextCall(C); + const o = new C(3, 4); + assertSame(C.prototype, o.__proto__); + // See https://bugs.chromium.org/p/v8/issues/detail?id=13310 + // assertTrue(isTurboFanned(C)); // No deopt. + // This assert will fail when the above bug is fixed: + assertFalse(isTurboFanned(C)); +})(); + +(function OmitDefaultBaseAndDerivedCtorWithExplicitSuperAndNonFinalSpread() { + class A {}; // default base ctor -> will be omitted + class B extends A {}; // default derived ctor -> will be omitted + class C extends B { constructor(...args) { super(1, ...args, 2); } }; + %PrepareFunctionForOptimization(C); + new C(); + %OptimizeFunctionOnNextCall(C); + const o = new C(3, 4); + assertSame(C.prototype, o.__proto__); + // See https://bugs.chromium.org/p/v8/issues/detail?id=13310 + // assertTrue(isTurboFanned(C)); // No deopt. + // This assert will fail when the above bug is fixed: + assertFalse(isTurboFanned(C)); +})(); + +(function NonDefaultBaseConstructorCalled() { + let ctorCallCount = 0; + let lastArgs; + class Base { + constructor(...args) { + ++ctorCallCount; + this.baseTagged = true; + lastArgs = args; + } + }; + // Nothing will be omitted. + class A extends Base {}; + %PrepareFunctionForOptimization(A); + new A(); + %OptimizeFunctionOnNextCall(A); + const a = new A(1, 2, 3); + assertEquals(2, ctorCallCount); + assertEquals([1, 2, 3], lastArgs); + assertTrue(a.baseTagged); + assertTrue(isTurboFanned(A)); // No deopt. + + // 'A' default ctor will be omitted. + class B1 extends A {}; + %PrepareFunctionForOptimization(B1); + new B1(); + %OptimizeFunctionOnNextCall(B1); + const b1 = new B1(4, 5, 6); + assertEquals(4, ctorCallCount); + assertEquals([4, 5, 6], lastArgs); + assertTrue(b1.baseTagged); + assertTrue(isTurboFanned(B1)); // No deopt. + + // The same test with non-final spread; 'A' default ctor will be omitted. + class B2 extends A { + constructor(...args) { super(1, ...args, 2); } + }; + %PrepareFunctionForOptimization(B2); + new B2(); + %OptimizeFunctionOnNextCall(B2); + const b2 = new B2(4, 5, 6); + assertEquals(6, ctorCallCount); + assertEquals([1, 4, 5, 6, 2], lastArgs); + assertTrue(b2.baseTagged); + // See https://bugs.chromium.org/p/v8/issues/detail?id=13310 + // assertTrue(isTurboFanned(B2)); // No deopt. + // This assert will fail when the above bug is fixed: + assertFalse(isTurboFanned(B2)); // No deopt. +})(); + +(function NonDefaultDerivedConstructorCalled() { + let ctorCallCount = 0; + let lastArgs; + class Base {}; + class Derived extends Base { + constructor(...args) { + super(); + ++ctorCallCount; + this.derivedTagged = true; + lastArgs = args; + } + }; + // Nothing will be omitted. + class A extends Derived {}; + %PrepareFunctionForOptimization(A); + new A(); + %OptimizeFunctionOnNextCall(A); + const a = new A(1, 2, 3); + assertEquals(2, ctorCallCount); + assertEquals([1, 2, 3], lastArgs); + assertTrue(a.derivedTagged); + assertTrue(isTurboFanned(A)); // No deopt. + + // 'A' default ctor will be omitted. + class B1 extends A {}; + %PrepareFunctionForOptimization(B1); + new B1(); + %OptimizeFunctionOnNextCall(B1); + const b1 = new B1(4, 5, 6); + assertEquals(4, ctorCallCount); + assertEquals([4, 5, 6], lastArgs); + assertTrue(b1.derivedTagged); + assertTrue(isTurboFanned(B1)); // No deopt. + + // The same test with non-final spread. 'A' default ctor will be omitted. + class B2 extends A { + constructor(...args) { super(1, ...args, 2); } + }; + %PrepareFunctionForOptimization(B2); + new B2(); + %OptimizeFunctionOnNextCall(B2); + const b2 = new B2(4, 5, 6); + assertEquals(6, ctorCallCount); + assertEquals([1, 4, 5, 6, 2], lastArgs); + assertTrue(b2.derivedTagged); + // See https://bugs.chromium.org/p/v8/issues/detail?id=13310 + // assertTrue(isTurboFanned(B2)); // No deopt. + // This assert will fail when the above bug is fixed: + assertFalse(isTurboFanned(B2)); // No deopt. +})(); + +(function BaseFunctionCalled() { + let baseFunctionCallCount = 0; + function BaseFunction() { + ++baseFunctionCallCount; + this.baseTagged = true; + } + + class A1 extends BaseFunction {}; + %PrepareFunctionForOptimization(A1); + new A1(); + %OptimizeFunctionOnNextCall(A1); + const a1 = new A1(); + assertEquals(2, baseFunctionCallCount); + assertTrue(a1.baseTagged); + assertTrue(isTurboFanned(A1)); // No deopt. + + class A2 extends BaseFunction { + constructor(...args) { super(1, ...args, 2); } + }; + %PrepareFunctionForOptimization(A2); + new A2(); + %OptimizeFunctionOnNextCall(A2); + const a2 = new A2(); + assertEquals(4, baseFunctionCallCount); + assertTrue(a2.baseTagged); + assertTrue(isTurboFanned(A2)); // No deopt. +})(); + +(function NonSuperclassCtor() { + class A {}; + class B extends A {}; + class C extends B {}; + class D1 extends C {}; + class D2 extends C { constructor(...args) { super(1, ...args, 2); }}; + + %PrepareFunctionForOptimization(C); + %PrepareFunctionForOptimization(D1); + %PrepareFunctionForOptimization(D2); + new C(); + new D1(); + new D2(); + %OptimizeFunctionOnNextCall(C); + %OptimizeFunctionOnNextCall(D1); + %OptimizeFunctionOnNextCall(D2); + + // Install an object which is not a constructor into the class hierarchy. + C.__proto__ = {}; + + assertThrows(() => { new C(); }, TypeError); + assertThrows(() => { new D1(); }, TypeError); + assertThrows(() => { new D2(); }, TypeError); +})(); + +(function ArgumentsEvaluatedBeforeNonSuperclassCtorDetected() { + class A {}; + class B extends A {}; + class C extends B {}; + class D1 extends C {}; + class D2 extends C { constructor(...args) { super(1, ...args, 2); }}; + + %PrepareFunctionForOptimization(C); + %PrepareFunctionForOptimization(D1); + %PrepareFunctionForOptimization(D2); + new C(); + new D1(); + new D2(); + %OptimizeFunctionOnNextCall(C); + %OptimizeFunctionOnNextCall(D1); + %OptimizeFunctionOnNextCall(D2); + + // Install an object which is not a constructor into the class hierarchy. + C.__proto__ = {}; + + let callCount = 0; + function foo() { + ++callCount; + } + + assertThrows(() => { new C(foo()); }, TypeError); + assertEquals(1, callCount); + + assertThrows(() => { new D1(foo()); }, TypeError); + assertEquals(2, callCount); + + assertThrows(() => { new D2(foo()); }, TypeError); + assertEquals(3, callCount); +})(); + +(function ArgumentsEvaluatedBeforeNonSuperclassCtorDetected2() { + class A {}; + class B extends A {}; + class C extends B {}; + class D1 extends C { + constructor() { + super(foo()); + } + }; + + class D2 extends C { + constructor(...args) { + super(...args, foo()); + } + }; + + let callCount = 0; + function foo() { + ++callCount; + } + + %PrepareFunctionForOptimization(D1); + %PrepareFunctionForOptimization(D2); + new D1(); + new D2(); + %OptimizeFunctionOnNextCall(D1); + %OptimizeFunctionOnNextCall(D2); + assertEquals(2, callCount); + + // Install an object which is not a constructor into the class hierarchy. + C.__proto__ = {}; + + assertThrows(() => { new D1(); }, TypeError); + assertEquals(3, callCount); + + assertThrows(() => { new D2(); }, TypeError); + assertEquals(4, callCount); +})(); + +(function EvaluatingArgumentsChangesClassHierarchy() { + let ctorCallCount = 0; + class A {}; + class B extends A { constructor() { + super(); + ++ctorCallCount; + }}; + class C extends B {}; + class D extends C { + constructor() { + super(foo()); + } + }; + + let fooCallCount = 0; + let changeHierarchy = false; + function foo() { + if (changeHierarchy) { + C.__proto__ = A; + C.prototype.__proto__ = A.prototype; + } + ++fooCallCount; + } + + %PrepareFunctionForOptimization(D); + new D(); + assertEquals(1, fooCallCount); + assertEquals(1, ctorCallCount); + %OptimizeFunctionOnNextCall(D); + changeHierarchy = true; + + new D(); + assertEquals(2, fooCallCount); + assertEquals(1, ctorCallCount); + assertFalse(isTurboFanned(D)); // Deopt. +})(); + +// The same test as the previous one, but with a ctor with a non-final spread. +(function EvaluatingArgumentsChangesClassHierarchyThisTimeWithNonFinalSpread() { + let ctorCallCount = 0; + class A {}; + class B extends A { constructor() { + super(); + ++ctorCallCount; + }}; + class C extends B {}; + class D extends C { + constructor(...args) { + super(...args, foo()); + } + }; + + let fooCallCount = 0; + let changeHierarchy = false; + function foo() { + if (changeHierarchy) { + C.__proto__ = A; + C.prototype.__proto__ = A.prototype; + } + ++fooCallCount; + } + + %PrepareFunctionForOptimization(D); + new D(); + assertEquals(1, fooCallCount); + assertEquals(1, ctorCallCount); + %OptimizeFunctionOnNextCall(D); + changeHierarchy = true; + + new D(); + assertEquals(2, fooCallCount); + assertEquals(1, ctorCallCount); + assertFalse(isTurboFanned(D)); // Deopt. +})(); + +(function BasePrivateField() { + class A { + #aBrand = true; + isA() { + return #aBrand in this; + } + }; + class B extends A {}; + class C1 extends B {}; + class C2 extends B { constructor(...args) { super(1, ...args, 2); }}; + + %PrepareFunctionForOptimization(B); + new B(); + %OptimizeFunctionOnNextCall(B); + + const b = new B(); + assertTrue(b.isA()); + assertTrue(isTurboFanned(B)); // No deopt. + + %PrepareFunctionForOptimization(C1); + new C1(); + %OptimizeFunctionOnNextCall(C1); + + const c1 = new C1(); + assertTrue(c1.isA()); + assertTrue(isTurboFanned(C1)); // No deopt. + + %PrepareFunctionForOptimization(C2); + new C2(); + %OptimizeFunctionOnNextCall(C2); + + const c2 = new C2(); + assertTrue(c2.isA()); + assertTrue(isTurboFanned(C2)); // No deopt. +})(); + +(function DerivedPrivateField() { + class A {}; + class B extends A { + #bBrand = true; + isB() { + return #bBrand in this; + } + }; + class C1 extends B {}; + class C2 extends B { constructor(...args) { super(1, ...args, 2); }}; + + %PrepareFunctionForOptimization(C1); + new C1(); + %OptimizeFunctionOnNextCall(C1); + + const c1 = new C1(); + assertTrue(c1.isB()); + assertTrue(isTurboFanned(C1)); // No deopt. + + %PrepareFunctionForOptimization(C2); + new C2(); + %OptimizeFunctionOnNextCall(C2); + + const c2 = new C2(); + assertTrue(c2.isB()); + assertTrue(isTurboFanned(C2)); // No deopt. +})(); + +(function BasePrivateMethod() { + class A { + #m() { return 'private'; } + callPrivate() { + return this.#m(); + } + }; + class B extends A {}; + class C1 extends B {}; + class C2 extends B { constructor(...args) { super(1, ...args, 2); }}; + + %PrepareFunctionForOptimization(B); + new B(); + %OptimizeFunctionOnNextCall(B); + + const b = new B(); + assertEquals('private', b.callPrivate()); + assertTrue(isTurboFanned(B)); // No deopt. + + %PrepareFunctionForOptimization(C1); + new C1(); + %OptimizeFunctionOnNextCall(C1); + + const c1 = new C1(); + assertEquals('private', c1.callPrivate()); + assertTrue(isTurboFanned(C1)); // No deopt. + + %PrepareFunctionForOptimization(C2); + new C2(); + %OptimizeFunctionOnNextCall(C2); + + const c2 = new C2(); + assertEquals('private', c2.callPrivate()); + assertTrue(isTurboFanned(C2)); // No deopt. +})(); + +(function DerivedPrivateMethod() { + class A {}; + class B extends A { + #m() { return 'private'; } + callPrivate() { + return this.#m(); + } + }; + class C1 extends B {}; + class C2 extends B { constructor(...args) { super(1, ...args, 2); }}; + + %PrepareFunctionForOptimization(C1); + new C1(); + %OptimizeFunctionOnNextCall(C1); + + const c1 = new C1(); + assertEquals('private', c1.callPrivate()); + assertTrue(isTurboFanned(C1)); // No deopt. + + %PrepareFunctionForOptimization(C2); + new C2(); + %OptimizeFunctionOnNextCall(C2); + + const c2 = new C2(); + assertEquals('private', c2.callPrivate()); + assertTrue(isTurboFanned(C2)); // No deopt. +})(); + +(function BasePrivateGetter() { + class A { + get #p() { return 'private'; } + getPrivate() { + return this.#p; + } + }; + class B extends A {}; + class C1 extends B {}; + class C2 extends B { constructor(...args) { super(1, ...args, 2); }}; + + %PrepareFunctionForOptimization(B); + new B(); + %OptimizeFunctionOnNextCall(B); + + const b = new B(); + assertEquals('private', b.getPrivate()); + assertTrue(isTurboFanned(B)); // No deopt. + + %PrepareFunctionForOptimization(C1); + new C1(); + %OptimizeFunctionOnNextCall(C1); + + const c1 = new C1(); + assertEquals('private', c1.getPrivate()); + assertTrue(isTurboFanned(C1)); // No deopt. + + %PrepareFunctionForOptimization(C2); + new C2(); + %OptimizeFunctionOnNextCall(C2); + + const c2 = new C2(); + assertEquals('private', c2.getPrivate()); + assertTrue(isTurboFanned(C2)); // No deopt. +})(); + +(function DerivedPrivateGetter() { + class A {}; + class B extends A { + get #p() { return 'private'; } + getPrivate() { + return this.#p; + } + }; + class C1 extends B {}; + class C2 extends B { constructor(...args) { super(1, ...args, 2); }}; + + %PrepareFunctionForOptimization(C1); + new C1(); + %OptimizeFunctionOnNextCall(C1); + + const c1 = new C1(); + assertEquals('private', c1.getPrivate()); + assertTrue(isTurboFanned(C1)); // No deopt. + + %PrepareFunctionForOptimization(C2); + new C2(); + %OptimizeFunctionOnNextCall(C2); + + const c2 = new C2(); + assertEquals('private', c2.getPrivate()); + assertTrue(isTurboFanned(C2)); // No deopt. +})(); + +(function BasePrivateSetter() { + class A { + set #p(value) { this.secret = value; } + setPrivate() { + this.#p = 'private'; + } + }; + class B extends A {}; + class C1 extends B {}; + class C2 extends B { constructor(...args) { super(1, ...args, 2); }}; + + %PrepareFunctionForOptimization(B); + new B(); + %OptimizeFunctionOnNextCall(B); + + const b = new B(); + b.setPrivate(); + assertEquals('private', b.secret); + + %PrepareFunctionForOptimization(C1); + new C1(); + %OptimizeFunctionOnNextCall(C1); + + const c1 = new C1(); + c1.setPrivate(); + assertEquals('private', c1.secret); + assertTrue(isTurboFanned(C1)); // No deopt. + + %PrepareFunctionForOptimization(C2); + new C2(); + %OptimizeFunctionOnNextCall(C2); + + const c2 = new C2(); + c2.setPrivate(); + assertEquals('private', c2.secret); + assertTrue(isTurboFanned(C2)); // No deopt. +})(); + +(function DerivedPrivateSetter() { + class A {}; + class B extends A { + set #p(value) { this.secret = value; } + setPrivate() { + this.#p = 'private'; + } + }; + class C1 extends B {}; + class C2 extends B { constructor(...args) { super(1, ...args, 2); }}; + + %PrepareFunctionForOptimization(C1); + new C1(); + %OptimizeFunctionOnNextCall(C1); + + const c1 = new C1(); + c1.setPrivate(); + assertEquals('private', c1.secret); + assertTrue(isTurboFanned(C1)); // No deopt. + + %PrepareFunctionForOptimization(C2); + new C2(); + %OptimizeFunctionOnNextCall(C2); + + const c2 = new C2(); + c2.setPrivate(); + assertEquals('private', c2.secret); + assertTrue(isTurboFanned(C2)); // No deopt. +})(); + +(function BaseClassFields() { + class A { + aField = true; + }; + class B extends A {}; + class C1 extends B {}; + class C2 extends B { constructor(...args) { super(1, ...args, 2); }}; + + %PrepareFunctionForOptimization(B); + new B(); + %OptimizeFunctionOnNextCall(B); + + const b = new B(); + assertTrue(b.aField); + + %PrepareFunctionForOptimization(C1); + new C1(); + %OptimizeFunctionOnNextCall(C1); + + const c1 = new C1(); + assertTrue(c1.aField); + assertTrue(isTurboFanned(C1)); // No deopt. + + %PrepareFunctionForOptimization(C2); + new C2(); + %OptimizeFunctionOnNextCall(C2); + + const c2 = new C2(); + assertTrue(c2.aField); + assertTrue(isTurboFanned(C2)); // No deopt. +})(); + +(function DerivedClassFields() { + class A {}; + class B extends A { + bField = true; + }; + class C1 extends B {}; + class C2 extends B { constructor(...args) { super(1, ...args, 2); }}; + + %PrepareFunctionForOptimization(C1); + new C1(); + %OptimizeFunctionOnNextCall(C1); + + const c1 = new C1(); + assertTrue(c1.bField); + assertTrue(isTurboFanned(C1)); // No deopt. + + %PrepareFunctionForOptimization(C2); + new C2(); + %OptimizeFunctionOnNextCall(C2); + + const c2 = new C2(); + assertTrue(c2.bField); + assertTrue(isTurboFanned(C2)); // No deopt. +})(); + +(function SuperInTryCatchDefaultCtor() { + class A {}; + class B extends A { + constructor() { + try { + super(); + } catch { + assertUnreachable(); + } + } + }; + + %PrepareFunctionForOptimization(B); + new B(); + %OptimizeFunctionOnNextCall(B); + + const b = new B(); + assertSame(B.prototype, b.__proto__); + assertTrue(isTurboFanned(B)); // No deopt. +})(); + +(function SuperInTryCatchNonDefaultCtor() { + class A { constructor() {} }; + class B extends A { + constructor() { + try { + super(); + } catch { + assertUnreachable(); + } + } + }; + + %PrepareFunctionForOptimization(B); + new B(); + %OptimizeFunctionOnNextCall(B); + + const b = new B(); + assertSame(B.prototype, b.__proto__); + assertTrue(isTurboFanned(B)); // No deopt. +})(); diff --git a/deps/v8/test/mjsunit/d8/performance-mark.js b/deps/v8/test/mjsunit/d8/performance-mark.js new file mode 100644 index 00000000000000..52f8262140d68e --- /dev/null +++ b/deps/v8/test/mjsunit/d8/performance-mark.js @@ -0,0 +1,23 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +const mark = performance.mark("a mark"); +assertEquals("mark", mark.entryType); +assertEquals("a mark", mark.name); +assertTrue(typeof mark.startTime == "number"); +assertEquals(0, mark.duration); + +const measure = performance.measure("a measure") +assertEquals("measure", measure.entryType); +assertEquals("a measure", measure.name); +assertEquals(0, measure.startTime); +assertTrue(typeof mark.duration == "number"); +assertTrue(mark.startTime <= measure.duration); + +const range_measure = performance.measure("a range measure", mark) +assertEquals("measure", range_measure.entryType); +assertEquals("a range measure", range_measure.name); +assertEquals(mark.startTime, range_measure.startTime); +assertTrue(typeof range_measure.duration == "number"); +assertTrue(0 <= range_measure.duration); diff --git a/deps/v8/test/mjsunit/harmony/array-to-reversed-big.js b/deps/v8/test/mjsunit/harmony/array-to-reversed-big.js new file mode 100644 index 00000000000000..62d9730484d835 --- /dev/null +++ b/deps/v8/test/mjsunit/harmony/array-to-reversed-big.js @@ -0,0 +1,14 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --harmony-change-array-by-copy + +(function TestBigPacked() { + let a = []; + for (let i = 0; i < 50000; i++) a.push(i); + let r = a.toReversed(); + for (let i = 0; i < 50000; i++) { + assertEquals(r[i], a.at(-(i+1))); + } +})(); diff --git a/deps/v8/test/mjsunit/harmony/array-to-reversed.js b/deps/v8/test/mjsunit/harmony/array-to-reversed.js index adb1f23f29699e..caae4079ab088d 100644 --- a/deps/v8/test/mjsunit/harmony/array-to-reversed.js +++ b/deps/v8/test/mjsunit/harmony/array-to-reversed.js @@ -57,6 +57,10 @@ assertEquals("toReversed", Array.prototype.toReversed.name); assertEquals(["1st","2nd","3rd","4th"], order); })(); +(function TestEmpty() { + assertEquals([], [].toReversed()); +})(); + (function TestTooBig() { let a = { length: Math.pow(2, 32) }; assertThrows(() => Array.prototype.toReversed.call(a), RangeError); diff --git a/deps/v8/test/mjsunit/harmony/array-to-sorted.js b/deps/v8/test/mjsunit/harmony/array-to-sorted.js index 6df02586183a41..e5ea813fb82de5 100644 --- a/deps/v8/test/mjsunit/harmony/array-to-sorted.js +++ b/deps/v8/test/mjsunit/harmony/array-to-sorted.js @@ -56,25 +56,25 @@ function TestToSortedBasicBehaviorHelper(input) { } // Smi packed -AssertToSortedAndSortSameResult([1,3,2,4]); +TestToSortedBasicBehaviorHelper([1,3,2,4]); // Double packed -AssertToSortedAndSortSameResult([1.1,3.3,2.2,4.4]); +TestToSortedBasicBehaviorHelper([1.1,3.3,2.2,4.4]); // Packed -AssertToSortedAndSortSameResult([true,false,1,42.42,null,"foo"]); +TestToSortedBasicBehaviorHelper([true,false,1,42.42,null,"foo"]); // Smi holey -AssertToSortedAndSortSameResult([1,,3,,2,,4,,]); +TestToSortedBasicBehaviorHelper([1,,3,,2,,4,,]); // Double holey -AssertToSortedAndSortSameResult([1.1,,3.3,,2.2,,4.4,,]); +TestToSortedBasicBehaviorHelper([1.1,,3.3,,2.2,,4.4,,]); // Holey -AssertToSortedAndSortSameResult([true,,false,,1,,42.42,,null,,"foo",,]); +TestToSortedBasicBehaviorHelper([true,,false,,1,,42.42,,null,,"foo",,]); // Generic -AssertToSortedAndSortSameResult({ length: 4, +TestToSortedBasicBehaviorHelper({ length: 4, get "0"() { return "hello"; }, get "1"() { return "cursed"; }, get "2"() { return "java"; }, @@ -94,6 +94,12 @@ AssertToSortedAndSortSameResult({ length: 4, assertEquals(0, a.length); })(); +(function TestBig() { + const a = []; + a[50001] = 42.42; + a.toSorted(); +})(); + (function TestTooBig() { const a = { length: Math.pow(2, 32) }; assertThrows(() => Array.prototype.toSorted.call(a), RangeError); diff --git a/deps/v8/test/mjsunit/harmony/array-to-spliced-big.js b/deps/v8/test/mjsunit/harmony/array-to-spliced-big.js new file mode 100644 index 00000000000000..84e81056953630 --- /dev/null +++ b/deps/v8/test/mjsunit/harmony/array-to-spliced-big.js @@ -0,0 +1,11 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --harmony-change-array-by-copy + +(function TestBigPacked() { + let a = []; + for (let i = 0; i < 50000; i++) a.push(i); + let r = a.toSpliced(); +})(); diff --git a/deps/v8/test/mjsunit/harmony/array-to-spliced.js b/deps/v8/test/mjsunit/harmony/array-to-spliced.js index ad72eea5694d43..f76a71c4db9792 100644 --- a/deps/v8/test/mjsunit/harmony/array-to-spliced.js +++ b/deps/v8/test/mjsunit/harmony/array-to-spliced.js @@ -101,6 +101,24 @@ TestToSplicedBasicBehaviorHelper({ length: 4, assertEquals(Array, (new MyArray()).toSpliced().constructor); })(); +(function TestEmpty() { + assertEquals([], [].toSpliced()); +})(); + +function TestFastSourceEmpty(input, itemsToInsert) { + // Create an empty input Array of the same ElementsKind with splice(). + TestToSplicedBasicBehaviorHelper(input.splice(), itemsToInsert); +} + +// Packed +TestFastSourceEmpty([1,2,3,4], [5,6]); + +// Double packed +TestFastSourceEmpty([1.1,2.2,3.3,4.4], [5.5,6.6]); + +// Packed +TestFastSourceEmpty([true,false,1,42.42], [null,"foo"]); + // All tests after this have an invalidated elements-on-prototype protector. (function TestNoHoles() { const a = [,,,,]; diff --git a/deps/v8/test/mjsunit/harmony/arraybuffer-transfer.js b/deps/v8/test/mjsunit/harmony/arraybuffer-transfer.js index 2efe530d03088e..85b3fd417736cd 100644 --- a/deps/v8/test/mjsunit/harmony/arraybuffer-transfer.js +++ b/deps/v8/test/mjsunit/harmony/arraybuffer-transfer.js @@ -125,6 +125,11 @@ TestNonGrow(0, { maxByteLength: 2048 }); } })(); +(function TestEmptySourceStore() { + let ab = new ArrayBuffer(); + let xfer = ab.transfer().transfer(1024); +})(); + if (typeof WebAssembly !== 'undefined') { // WebAssembly buffers cannot be detached. const memory = new WebAssembly.Memory({ initial: 1 }); diff --git a/deps/v8/test/mjsunit/harmony/json-parse-with-source.js b/deps/v8/test/mjsunit/harmony/json-parse-with-source.js new file mode 100644 index 00000000000000..96990825299d49 --- /dev/null +++ b/deps/v8/test/mjsunit/harmony/json-parse-with-source.js @@ -0,0 +1,287 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --harmony-json-parse-with-source + +(function TestBigInt() { + const tooBigForNumber = BigInt(Number.MAX_SAFE_INTEGER) + 2n; + const intToBigInt = (key, val, { source }) => + typeof val === 'number' && val % 1 === 0 ? BigInt(source) : val; + const roundTripped = JSON.parse(String(tooBigForNumber), intToBigInt); + assertEquals(tooBigForNumber, roundTripped); + + const bigIntToRawJSON = (key, val) => + typeof val === 'bigint' ? JSON.rawJSON(val) : val; + const embedded = JSON.stringify({ tooBigForNumber }, bigIntToRawJSON); + assertEquals('{"tooBigForNumber":9007199254740993}', embedded); +})(); + +function GenerateParseReviverFunction(texts) { + let i = 0; + return function (key, value, context) { + assertTrue(typeof context === 'object'); + assertEquals(Object.prototype, Object.getPrototypeOf(context)); + // The json value is a primitive value, it's context only has a source property. + if (texts[i] !== undefined) { + const descriptor = Object.getOwnPropertyDescriptor(context, 'source'); + assertTrue(descriptor.configurable); + assertTrue(descriptor.enumerable); + assertTrue(descriptor.writable); + assertEquals(undefined, descriptor.get); + assertEquals(undefined, descriptor.set); + assertEquals(texts[i++], descriptor.value); + + assertEquals(['source'], Object.getOwnPropertyNames(context)); + assertEquals([], Object.getOwnPropertySymbols(context)); + } else { + // The json value is JSArray or JSObject, it's context has no property. + assertFalse(Object.hasOwn(context, 'source')); + assertEquals([], Object.getOwnPropertyNames(context)); + assertEquals([], Object.getOwnPropertySymbols(context)); + i++; + } + return value; + }; +} + +(function TestNumber() { + assertEquals(1, JSON.parse('1', GenerateParseReviverFunction(['1']))); + assertEquals(1.1, JSON.parse('1.1', GenerateParseReviverFunction(['1.1']))); + assertEquals(-1, JSON.parse('-1', GenerateParseReviverFunction(['-1']))); + assertEquals( + -1.1, + JSON.parse('-1.1', GenerateParseReviverFunction(['-1.1'])) + ); + assertEquals( + 11, + JSON.parse('1.1e1', GenerateParseReviverFunction(['1.1e1'])) + ); + assertEquals( + 11, + JSON.parse('1.1e+1', GenerateParseReviverFunction(['1.1e+1'])) + ); + assertEquals( + 0.11, + JSON.parse('1.1e-1', GenerateParseReviverFunction(['1.1e-1'])) + ); + assertEquals( + 11, + JSON.parse('1.1E1', GenerateParseReviverFunction(['1.1E1'])) + ); + assertEquals( + 11, + JSON.parse('1.1E+1', GenerateParseReviverFunction(['1.1E+1'])) + ); + assertEquals( + 0.11, + JSON.parse('1.1E-1', GenerateParseReviverFunction(['1.1E-1'])) + ); + + assertEquals('1', JSON.stringify(JSON.rawJSON(1))); + assertEquals('1.1', JSON.stringify(JSON.rawJSON(1.1))); + assertEquals('-1', JSON.stringify(JSON.rawJSON(-1))); + assertEquals('-1.1', JSON.stringify(JSON.rawJSON(-1.1))); + assertEquals('11', JSON.stringify(JSON.rawJSON(1.1e1))); + assertEquals('0.11', JSON.stringify(JSON.rawJSON(1.1e-1))); +})(); + +(function TestBasic() { + assertEquals( + null, + JSON.parse('null', GenerateParseReviverFunction(['null'])) + ); + assertEquals( + true, + JSON.parse('true', GenerateParseReviverFunction(['true'])) + ); + assertEquals( + false, + JSON.parse('false', GenerateParseReviverFunction(['false'])) + ); + assertEquals( + 'foo', + JSON.parse('"foo"', GenerateParseReviverFunction(['"foo"'])) + ); + + assertEquals('null', JSON.stringify(JSON.rawJSON(null))); + assertEquals('true', JSON.stringify(JSON.rawJSON(true))); + assertEquals('false', JSON.stringify(JSON.rawJSON(false))); + assertEquals('"foo"', JSON.stringify(JSON.rawJSON('"foo"'))); +})(); + +(function TestObject() { + assertEquals( + {}, + JSON.parse('{}', GenerateParseReviverFunction([])) + ); + assertEquals( + { 42: 37 }, + JSON.parse('{"42":37}', GenerateParseReviverFunction(['37'])) + ); + assertEquals( + { x: 1, y: 2 }, + JSON.parse('{"x": 1, "y": 2}', GenerateParseReviverFunction(['1', '2'])) + ); + // undefined means the json value is JSObject or JSArray and the passed + // context to the reviver function has no source property. + assertEquals( + { x: [1, 2], y: [2, 3] }, + JSON.parse( + '{"x": [1,2], "y": [2,3]}', + GenerateParseReviverFunction(['1', '2', undefined, '2', '3', undefined]) + ) + ); + assertEquals( + { x: { x: 1, y: 2 } }, + JSON.parse( + '{"x": {"x": 1, "y": 2}}', + GenerateParseReviverFunction(['1', '2', undefined, undefined]) + ) + ); + + assertEquals('{"42":37}', JSON.stringify({ 42: JSON.rawJSON(37) })); + assertEquals( + '{"x":1,"y":2}', + JSON.stringify({ x: JSON.rawJSON(1), y: JSON.rawJSON(2) }) + ); + assertEquals( + '{"x":{"x":1,"y":2}}', + JSON.stringify({ x: { x: JSON.rawJSON(1), y: JSON.rawJSON(2) } }) + ); +})(); + +(function TestArray() { + assertEquals([1], JSON.parse('[1.0]', GenerateParseReviverFunction(['1.0']))); + assertEquals( + [1.1], + JSON.parse('[1.1]', GenerateParseReviverFunction(['1.1'])) + ); + assertEquals([], JSON.parse('[]', GenerateParseReviverFunction([]))); + assertEquals( + [1, '2', true, null, { x: 1, y: 1 }], + JSON.parse( + '[1, "2", true, null, {"x": 1, "y": 1}]', + GenerateParseReviverFunction(['1', '"2"', 'true', 'null', '1', '1']) + ) + ); + + assertEquals('[1,1.1]', JSON.stringify([JSON.rawJSON(1), JSON.rawJSON(1.1)])); + assertEquals( + '["1",true,null,false]', + JSON.stringify([ + JSON.rawJSON('"1"'), + JSON.rawJSON(true), + JSON.rawJSON(null), + JSON.rawJSON(false), + ]) + ); + assertEquals( + '[{"x":1,"y":1}]', + JSON.stringify([{ x: JSON.rawJSON(1), y: JSON.rawJSON(1) }]) + ); +})(); + +function assertIsRawJson(rawJson, expectedRawJsonValue) { + assertEquals(null, Object.getPrototypeOf(rawJson)); + assertTrue(Object.hasOwn(rawJson, 'rawJSON')); + assertEquals(['rawJSON'], Object.getOwnPropertyNames(rawJson)); + assertEquals([], Object.getOwnPropertySymbols(rawJson)); + assertEquals(expectedRawJsonValue, rawJson.rawJSON); +} + +(function TestRawJson() { + assertIsRawJson(JSON.rawJSON(1), '1'); + assertIsRawJson(JSON.rawJSON(null), 'null'); + assertIsRawJson(JSON.rawJSON(true), 'true'); + assertIsRawJson(JSON.rawJSON(false), 'false'); + assertIsRawJson(JSON.rawJSON('"foo"'), '"foo"'); + + assertThrows(() => { + JSON.rawJSON(Symbol('123')); + }, TypeError); + + assertThrows(() => { + JSON.rawJSON(undefined); + }, SyntaxError); + + assertThrows(() => { + JSON.rawJSON({}); + }, SyntaxError); + + assertThrows(() => { + JSON.rawJSON([]); + }, SyntaxError); + + const ILLEGAL_END_CHARS = ['\n', '\t', '\r', ' ']; + for (const char of ILLEGAL_END_CHARS) { + assertThrows(() => { + JSON.rawJSON(`${char}123`); + }, SyntaxError); + assertThrows(() => { + JSON.rawJSON(`123${char}`); + }, SyntaxError); + } + + assertThrows(() => { + JSON.rawJSON(''); + }, SyntaxError); + + const values = [1, 1.1, null, false, true, '123']; + for (const value of values) { + assertFalse(JSON.isRawJSON(value)); + assertTrue(JSON.isRawJSON(JSON.rawJSON(value))); + } + assertFalse(JSON.isRawJSON(undefined)); + assertFalse(JSON.isRawJSON(Symbol('123'))); + assertFalse(JSON.isRawJSON([])); + assertFalse(JSON.isRawJSON({ rawJSON: '123' })); +})(); + +(function TestReviverModifyJsonValue() { + { + let reviverCallIndex = 0; + const expectedKeys = ['a', 'b', 'c', '']; + const reviver = function(key, value, {source}) { + assertEquals(expectedKeys[reviverCallIndex++], key); + if (key == 'a') { + this.b = 2; + assertEquals('0', source); + } else if (key == 'b') { + this.c = 3; + assertEquals(2, value); + assertEquals('1', source); + } else if (key == 'c') { + assertEquals(3, value); + assertEquals(undefined, source); + } + return value; + } + assertEquals({a: 0, b: 2, c: 3}, JSON.parse('{"a": 0, "b": 1, "c": [1, 2]}', reviver)); + } + { + let reviverCallIndex = 0; + const expectedKeys = ['0', '1', '2', '3', '']; + const reviver = function(key, value, {source}) { + assertEquals(expectedKeys[reviverCallIndex++], key); + if (key == '0') { + this[1] = 3; + assertEquals(1, value); + assertEquals('1', source); + } else if (key == '1') { + this[2] = 4; + assertEquals(3, value); + assertEquals('2', source); + } else if(key == '2') { + this[3] = 5; + assertEquals(4, value); + assertEquals('3', source); + } else if(key == '5'){ + assertEquals(5, value); + assertEquals(undefined, source); + } + return value; + } + assertEquals([1, 3, 4, 5], JSON.parse('[1, 2, 3, {"a": 1}]', reviver)); + } +})(); diff --git a/deps/v8/test/mjsunit/harmony/regexp-unicode-sets.js b/deps/v8/test/mjsunit/harmony/regexp-unicode-sets.js new file mode 100644 index 00000000000000..19f95bbffbdf24 --- /dev/null +++ b/deps/v8/test/mjsunit/harmony/regexp-unicode-sets.js @@ -0,0 +1,143 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --harmony-regexp-unicode-sets + +// u and v are not allowed together. +assertEarlyError('/./uv'); +assertThrowsAtRuntime("new RegExp('.','uv')", SyntaxError); + +assertEquals('v', /./v.flags); +assertTrue(/./v.unicodeSets); + +// Characters that require escaping within a character class in /v mode +assertEarlyError('/[(]/v'); +assertEarlyError('/[)]/v'); +assertEarlyError('/[[]/v'); +assertEarlyError('/[]]/v'); +assertEarlyError('/[{]/v'); +assertEarlyError('/[}]/v'); +assertEarlyError('/[/]/v'); +assertEarlyError('/[-]/v'); +// Need to escape the backslash, as assertEarlyError uses eval(). +assertEarlyError('/[\\]/v'); +assertEarlyError('/[|]/v'); + +assertEarlyError('/[&&]/v'); +assertEarlyError('/[!!]/v'); +assertEarlyError('/[##]/v'); +assertEarlyError('/[$$]/v'); +assertEarlyError('/[%%]/v'); +assertEarlyError('/[**]/v'); +assertEarlyError('/[++]/v'); +assertEarlyError('/[,,]/v'); +assertEarlyError('/[..]/v'); +assertEarlyError('/[::]/v'); +assertEarlyError('/[;;]/v'); +assertEarlyError('/[<<]/v'); +assertEarlyError('/[==]/v'); +assertEarlyError('/[>>]/v'); +assertEarlyError('/[??]/v'); +assertEarlyError('/[@@]/v'); +// The first ^ negates the class. The following two are not valid. +assertEarlyError('/[^^^]/v'); +assertEarlyError('/[``]/v'); +assertEarlyError('/[~~]/v'); + +assertEarlyError('/[a&&&]/v'); +assertEarlyError('/[&&&a]/v'); + +const allAscii = Array.from( + {length: 127}, (v, i) => { return String.fromCharCode(i); }); + +function check(re, expectMatch, expectNoMatch) { + if (expectNoMatch === undefined) { + const expectSet = new Set(expectMatch.map(val => { + return (typeof val == 'number') ? String(val) : val; })); + expectNoMatch = allAscii.filter(val => !expectSet.has(val)); + } + for (const match of expectMatch) { + assertTrue(re.test(match), `${re}.test(${match})`); + } + for (const noMatch of expectNoMatch) { + assertFalse(re.test(noMatch), `${re}.test(${noMatch})`); + } + // Nest the current RegExp in a negated class and check expectations are + // inversed. + const inverted = new RegExp(`[^${re.source}]`, re.flags); + for (const match of expectMatch) { + assertFalse(inverted.test(match), `${inverted}.test(${match})`); + } + for (const noMatch of expectNoMatch) { + assertTrue(inverted.test(noMatch), `${inverted}.test(${noMatch})`); + } +} + +// Union with nested class +check( + /[\da-f[xy][^[^z]]]/v, Array.from('0123456789abcdefxyz'), + Array.from('ghijklmnopqrstuv!?')); + +// Intersections +check(/[\d&&[0-9]]/v, Array.from('0123456789'), []); +check(/[\d&&0]/v, [0], Array.from('123456789')); +check(/[\d&&9]/v, [9], Array.from('012345678')); +check(/[\d&&[02468]]/v, Array.from('02468'), Array.from('13579')); +check(/[\d&&[13579]]/v, Array.from('13579'), Array.from('02468')); +check( + /[\w&&[^a-zA-Z_]]/v, Array.from('0123456789'), + Array.from('abcdxyzABCDXYZ_!?')); +check( + /[^\w&&[a-zA-Z_]]/v, Array.from('0123456789!?'), + Array.from('abcdxyzABCDXYZ_')); + +// Subtractions +check(/[\d--[!-%]]/v, Array.from('0123456789')); +check(/[\d--[A-Z]]/v, Array.from('0123456789')); +check(/[\d--[0-9]]/v, []); +check(/[\d--[\w]]/v, []); +check(/[\d--0]/v, Array.from('123456789')); +check(/[\d--9]/v, Array.from('012345678')); +check(/[[\d[a-c]]--9]/v, Array.from('012345678abc')); +check(/[\d--[02468]]/v, Array.from('13579')); +check(/[\d--[13579]]/v, Array.from('02468')); +check(/[[3-7]--[0-9]]/v, []); +check(/[[3-7]--[0-7]]/v, []); +check(/[[3-7]--[3-9]]/v, []); +check(/[[3-79]--[0-7]]/v, [9]); +check(/[[3-79]--[3-9]]/v, []); +check(/[[3-7]--[0-3]]/v, Array.from('4567')); +check(/[[3-7]--[0-5]]/v, Array.from('67')); +check(/[[3-7]--[7-9]]/v, Array.from('3456')); +check(/[[3-7]--[5-9]]/v, Array.from('34')); +check(/[[3-7a-c]--[0-3]]/v, Array.from('4567abc')); +check(/[[3-7a-c]--[0-5]]/v, Array.from('67abc')); +check(/[[3-7a-c]--[7-9]]/v, Array.from('3456abc')); +check(/[[3-7a-c]--[5-9]]/v, Array.from('34abc')); +check(/[[2-8]--[0-3]--5--[7-9]]/v, Array.from('46')); +check(/[[2-57-8]--[0-3]--[5-7]]/v, Array.from('48')); +check(/[[0-57-8]--[1-34]--[5-7]]/v, Array.from('08')); +check(/[\d--[^02468]]/v, Array.from('02468')); +check(/[\d--[^13579]]/v, Array.from('13579')); + +// Ignore-Case +check(/[Ā-č]/v, Array.from('ĀāĂ㥹Ćć'), Array.from('abc')); +check(/[ĀĂĄĆ]/vi, Array.from('ĀāĂ㥹Ćć'), Array.from('abc')); +check(/[āăąć]/vi, Array.from('ĀāĂ㥹Ćć'), Array.from('abc')); + +// Some more sophisticated tests taken from +// https://v8.dev/features/regexp-v-flag +assertFalse(/[\p{Script_Extensions=Greek}--π]/v.test('π')); +assertFalse(/[\p{Script_Extensions=Greek}--[αβγ]]/v.test('α')); +assertFalse(/[\p{Script_Extensions=Greek}--[α-γ]]/v.test('β')); +assertTrue(/[\p{Decimal_Number}--[0-9]]/v.test('𑜹')); +assertFalse(/[\p{Decimal_Number}--[0-9]]/v.test('4')); +assertTrue(/[\p{Script_Extensions=Greek}&&\p{Letter}]/v.test('π')); +assertFalse(/[\p{Script_Extensions=Greek}&&\p{Letter}]/v.test('𐆊')); +assertTrue(/[\p{White_Space}&&\p{ASCII}]/v.test('\n')); +assertFalse(/[\p{White_Space}&&\p{ASCII}]/v.test('\u2028')); +assertTrue(/[\p{Script_Extensions=Mongolian}&&\p{Number}]/v.test('᠗')); +assertFalse(/[\p{Script_Extensions=Mongolian}&&\p{Number}]/v.test('ᠴ')); +assertEquals('XXXXXX4#', 'aAbBcC4#'.replaceAll(/\p{Lowercase_Letter}/giv, 'X')); +assertEquals('XXXXXX4#', 'aAbBcC4#'.replaceAll(/[^\P{Lowercase_Letter}]/giv, 'X')); diff --git a/deps/v8/test/mjsunit/harmony/regress/regress-crbug-1367133.js b/deps/v8/test/mjsunit/harmony/regress/regress-crbug-1367133.js new file mode 100644 index 00000000000000..a80d9b0be904ca --- /dev/null +++ b/deps/v8/test/mjsunit/harmony/regress/regress-crbug-1367133.js @@ -0,0 +1,28 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --harmony-change-array-by-copy +// Flags: --allow-natives-syntax --stress-concurrent-inlining + +(function TestArray() { + function doCall(a, method, ...args) { a[method](); } + function callOnArray(a) { doCall(a, 'with'); a.keys(); } + + %PrepareFunctionForOptimization(callOnArray); + callOnArray([1]); + doCall({}, 'valueOf', "foo"); + %OptimizeFunctionOnNextCall(callOnArray); + callOnArray([{},]); +})(); + +(function TestTypedArray() { + function doCall(a, method, ...args) { a[method](); } + function callOnArray(a) { doCall(a, 'with'); a.keys(); } + + %PrepareFunctionForOptimization(callOnArray); + callOnArray(new Uint8Array(32)); + doCall({}, 'valueOf', "foo"); + %OptimizeFunctionOnNextCall(callOnArray); + callOnArray(new Float64Array(8)); +})(); diff --git a/deps/v8/test/mjsunit/harmony/regress/regress-crbug-1372500.js b/deps/v8/test/mjsunit/harmony/regress/regress-crbug-1372500.js new file mode 100644 index 00000000000000..6264570fdd3bd7 --- /dev/null +++ b/deps/v8/test/mjsunit/harmony/regress/regress-crbug-1372500.js @@ -0,0 +1,14 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --harmony-symbol-as-weakmap-key --expose-gc + +// Register an object in a FinalizationRegistry with a Symbol as the unregister +// token. +let fr = new FinalizationRegistry(function () {}); +(function register() { + fr.register({}, "holdings", Symbol('unregisterToken')); +})(); +// The unregister token should be dead, trigger its collection. +gc(); diff --git a/deps/v8/test/mjsunit/harmony/symbol-as-weakmap-key.js b/deps/v8/test/mjsunit/harmony/symbol-as-weakmap-key.js new file mode 100644 index 00000000000000..284e78b30196b7 --- /dev/null +++ b/deps/v8/test/mjsunit/harmony/symbol-as-weakmap-key.js @@ -0,0 +1,108 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --harmony-symbol-as-weakmap-key --expose-gc --allow-natives-syntax --noincremental-marking + +(function TestWeakMapWithNonRegisteredSymbolKey() { + const key = Symbol('123'); + const value = 1; + const map = new WeakMap(); + assertFalse(map.has(key)); + assertSame(undefined, map.get(key)); + assertFalse(map.delete(key)); + assertSame(map, map.set(key, value)); + assertSame(value, map.get(key)); + assertTrue(map.has(key)); + assertTrue(map.delete(key)); + assertFalse(map.has(key)); + assertSame(undefined, map.get(key)); + assertFalse(map.delete(key)); + assertFalse(map.has(key)); + assertSame(undefined, map.get(key)); +})(); + +(function TestWeakMapWithNonRegisteredSymbolKeyGC() { + const map = new WeakMap(); + + const outerKey = Symbol('234'); + const outerValue = 1; + map.set(outerKey, outerValue); + (function () { + const innerKey = Symbol('123'); + const innerValue = 1; + map.set(innerKey, innerValue); + assertTrue(map.has(innerKey)); + assertSame(innerValue, map.get(innerKey)); + })(); + gc(); + assertTrue(map.has(outerKey)); + assertSame(outerValue, map.get(outerKey)); + assertEquals(1, %GetWeakCollectionSize(map)); +})(); + +(function TestWeakMapWithRegisteredSymbolKey() { + const key = Symbol.for('123'); + const value = 1; + const map = new WeakMap(); + assertFalse(map.has(key)); + assertSame(undefined, map.get(key)); + assertFalse(map.delete(key)); + assertThrows(() => { + map.set(key, value); + }, TypeError, 'Invalid value used as weak map key'); + assertFalse(map.has(key)); + assertSame(undefined, map.get(key)); + assertFalse(map.delete(key)); + assertFalse(map.has(key)); + assertSame(undefined, map.get(key)); +})(); + +(function TestWeakSetWithNonRegisteredSymbolKey() { + const key = Symbol('123'); + const set = new WeakSet(); + assertFalse(set.has(key)); + assertFalse(set.delete(key)); + assertSame(set, set.add(key)); + assertTrue(set.has(key)); + assertTrue(set.delete(key)); + assertFalse(set.has(key)); + assertFalse(set.delete(key)); + assertFalse(set.has(key)); +})(); + +(function TestWeakSetWithNonRegisteredSymbolKeyGC() { + const set = new WeakSet(); + const outerKey = Symbol('234'); + set.add(outerKey); + (function () { + const innerKey = Symbol('123'); + set.add(innerKey); + assertTrue(set.has(innerKey)); + })(); + assertTrue(set.has(outerKey)); + gc(); + assertEquals(1, %GetWeakCollectionSize(set)); +})(); + +(function TestWeakSetWithRegisteredSymbolKey() { + const key = Symbol.for('123'); + const set = new WeakSet(); + assertFalse(set.has(key)); + assertFalse(set.delete(key)); + + assertThrows(() => { + assertSame(set, set.add(key)); + }, TypeError, 'Invalid value used in weak set'); + + assertFalse(set.has(key)); + assertFalse(set.delete(key)); + assertFalse(set.has(key)); +})(); + +(function TestFinalizationRegistryUnregister() { + const fr = new FinalizationRegistry(function() {}); + const key = {}; + fr.register(Symbol('foo'), "holdings", key); + fr.unregister(key); +})(); diff --git a/deps/v8/test/mjsunit/harmony/typed-array-to-sorted.js b/deps/v8/test/mjsunit/harmony/typed-array-to-sorted.js new file mode 100644 index 00000000000000..71eac41dcb783d --- /dev/null +++ b/deps/v8/test/mjsunit/harmony/typed-array-to-sorted.js @@ -0,0 +1,299 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --harmony-change-array-by-copy --harmony-rab-gsab +// Flags: --allow-natives-syntax + +d8.file.execute('test/mjsunit/typedarray-helpers.js'); + +const TAProto = Object.getPrototypeOf(Int8Array.prototype); + +function AssertToSortedAndSortSameResult(input, ...args) { + const orig = input.slice(); + const s = TAProto.toSorted.apply(input, args); + const copy = input.slice(); + TAProto.sort.apply(copy, args); + + // The in-place sorted version should be pairwise equal to the toSorted + // version. + assertEquals(copy, s); + + // The original input should be unchanged. + assertEquals(orig, input); + + // The result of toSorted() is a copy. + assertFalse(s === input); +} + +function TestToSortedBasicBehaviorHelper(input) { + // No custom comparator. + AssertToSortedAndSortSameResult(input); + // Custom comparator. + AssertToSortedAndSortSameResult(input, (x, y) => { + if (x < y) return -1; + if (x > y) return 1; + return 0; + }); +} + +(function TestSurface() { + for (let TA of ctors) { + assertEquals(1, TA.prototype.toSorted.length); + assertEquals("toSorted", TA.prototype.toSorted.name); + } +})(); + +(function TestBasic() { + for (let TA of ctors) { + let a = new TA(4); + for (let i = 0; i < 4; i++) { + WriteToTypedArray(a, i, (Math.random() * 100)|0); + } + TestToSortedBasicBehaviorHelper(a); + } +})(); + +(function TestResizableBuffer() { + for (let TA of ctors) { + const rab = CreateResizableArrayBuffer(4 * TA.BYTES_PER_ELEMENT, + 8 * TA.BYTES_PER_ELEMENT); + const fixedLength = new TA(rab, 0, 4); + const fixedLengthWithOffset = new TA(rab, 2 * TA.BYTES_PER_ELEMENT, 2); + const lengthTracking = new TA(rab, 0); + const lengthTrackingWithOffset = new TA(rab, 2 * TA.BYTES_PER_ELEMENT); + + // Write some data into the array. + const taWrite = new TA(rab); + for (let i = 0; i < 4; i++) { + WriteToTypedArray(taWrite, i, (Math.random() * 100)|0); + } + + // a, b, c, d, below represent random values. + // + // Orig. array: [a, b, c, d] + // [a, b, c, d] << fixedLength + // [c, d] << fixedLengthWithOffset + // [a, b, c, d, ...] << lengthTracking + // [c, d, ...] << lengthTrackingWithOffset + + TestToSortedBasicBehaviorHelper(fixedLength); + TestToSortedBasicBehaviorHelper(fixedLengthWithOffset); + TestToSortedBasicBehaviorHelper(lengthTracking); + TestToSortedBasicBehaviorHelper(lengthTrackingWithOffset); + + // Shrink so that the TAs with offset go out of bounds. + rab.resize(1 * TA.BYTES_PER_ELEMENT); + WriteToTypedArray(taWrite, 0, 0); + + assertThrows(() => { fixedLength.toSorted(); }, TypeError); + assertThrows(() => { fixedLengthWithOffset.toSorted(); }, TypeError); + TestToSortedBasicBehaviorHelper(lengthTracking); + assertThrows(() => { lengthTrackingWithOffset.toSorted(); }, TypeError); + + // Shrink to zero. + rab.resize(0); + + assertThrows(() => { fixedLength.toSorted(); }, TypeError); + assertThrows(() => { fixedLengthWithOffset.toSorted(); }, TypeError); + TestToSortedBasicBehaviorHelper(lengthTracking); + assertThrows(() => { lengthTrackingWithOffset.toSorted(); }, TypeError); + + // Grow so that all TAs are back in-bounds. + rab.resize(6 * TA.BYTES_PER_ELEMENT); + for (let i = 0; i < 6; ++i) { + WriteToTypedArray(taWrite, i, (Math.random() * 100)|0); + } + + // Orig. array: [a, b, c, d, e, f] + // [a, b, c, d] << fixedLength + // [c, d] << fixedLengthWithOffset + // [a, b, c, d, e, f, ...] << lengthTracking + // [c, d, e, f, ...] << lengthTrackingWithOffset + + TestToSortedBasicBehaviorHelper(fixedLength); + TestToSortedBasicBehaviorHelper(fixedLengthWithOffset); + TestToSortedBasicBehaviorHelper(lengthTracking); + TestToSortedBasicBehaviorHelper(lengthTrackingWithOffset); + } +})(); + +(function TestComparatorShrinks() { + for (let TA of ctors) { + const rab = CreateResizableArrayBuffer(4 * TA.BYTES_PER_ELEMENT, + 8 * TA.BYTES_PER_ELEMENT); + const lengthTracking = new TA(rab, 0); + for (let i = 0; i < 4; i++) { + WriteToTypedArray(lengthTracking, i, (Math.random() * 100)|0) + } + + let resized = false; + const evilComparator = (x, y) => { + if (!resized) { + resized = true; + rab.resize(2 * TA.BYTES_PER_ELEMENT); + } + if (x < y) return -1; + if (x > y) return 1; + return 0; + }; + + // Shrinks don't affect toSorted because sorting is done on a snapshot taken + // at the beginning. + let s = lengthTracking.toSorted(evilComparator); + assertEquals(4, s.length); + // Source shrunk. + assertEquals(2, lengthTracking.length); + } +})(); + +(function TestComparatorGrows() { + for (let TA of ctors) { + const rab = CreateResizableArrayBuffer(4 * TA.BYTES_PER_ELEMENT, + 8 * TA.BYTES_PER_ELEMENT); + const lengthTracking = new TA(rab, 0); + for (let i = 0; i < 4; i++) { + WriteToTypedArray(lengthTracking, i, (Math.random() * 100)|0) + } + + let resized = false; + const evilComparator = (x, y) => { + if (!resized) { + resized = true; + rab.resize(6 * TA.BYTES_PER_ELEMENT); + } + if (x < y) return -1; + if (x > y) return 1; + return 0; + }; + + // Grows also don't affect toSorted because sorting is done on a snapshot + // taken at the beginning. + let s = lengthTracking.toSorted(evilComparator); + assertEquals(4, s.length); + // Source grew. + assertEquals(6, lengthTracking.length); + } +})(); + +(function TestComparatorDetaches() { + for (let TA of ctors) { + const rab = CreateResizableArrayBuffer(4 * TA.BYTES_PER_ELEMENT, + 8 * TA.BYTES_PER_ELEMENT); + const lengthTracking = new TA(rab, 0); + for (let i = 0; i < 4; i++) { + WriteToTypedArray(lengthTracking, i, (Math.random() * 100)|0) + } + + let detached = false; + const evilComparator = (x, y) => { + if (!detached) { + detached = true; + %ArrayBufferDetach(rab); + } + if (x < y) return -1; + if (x > y) return 1; + return 0; + }; + + // Detaching also don't affect toSorted because sorting is done on a snapshot + // taken at the beginning. + let s = lengthTracking.toSorted(evilComparator); + assertEquals(4, s.length); + // Source is detached. + assertEquals(0, lengthTracking.length); + } +})(); + +(function TestGrowableSAB() { + for (let TA of ctors) { + const gsab = CreateGrowableSharedArrayBuffer(4 * TA.BYTES_PER_ELEMENT, + 8 * TA.BYTES_PER_ELEMENT); + const fixedLength = new TA(gsab, 0, 4); + const fixedLengthWithOffset = new TA(gsab, 2 * TA.BYTES_PER_ELEMENT, 2); + const lengthTracking = new TA(gsab, 0); + const lengthTrackingWithOffset = new TA(gsab, 2 * TA.BYTES_PER_ELEMENT); + + // Write some data into the array. + const taWrite = new TA(gsab); + for (let i = 0; i < 4; i++) { + WriteToTypedArray(taWrite, i, (Math.random() * 100)|0); + } + + // Orig. array: [a, b, c, d] + // [a, b, c, d] << fixedLength + // [c, d] << fixedLengthWithOffset + // [a, b, c, d, ...] << lengthTracking + // [c, d, ...] << lengthTrackingWithOffset + TestToSortedBasicBehaviorHelper(fixedLength); + TestToSortedBasicBehaviorHelper(fixedLengthWithOffset); + TestToSortedBasicBehaviorHelper(lengthTracking); + TestToSortedBasicBehaviorHelper(lengthTrackingWithOffset); + + // Grow. + gsab.grow(6 * TA.BYTES_PER_ELEMENT); + for (let i = 0; i < 6; ++i) { + WriteToTypedArray(taWrite, i, (Math.random() * 100)|0); + } + + // Orig. array: [a, b, c, d, e, f] + // [a, b, c, d] << fixedLength + // [c, d] << fixedLengthWithOffset + // [a, b, c, d, e, f, ...] << lengthTracking + // [c, d, e, f, ...] << lengthTrackingWithOffset + TestToSortedBasicBehaviorHelper(fixedLength); + TestToSortedBasicBehaviorHelper(fixedLengthWithOffset); + TestToSortedBasicBehaviorHelper(lengthTracking); + TestToSortedBasicBehaviorHelper(lengthTrackingWithOffset); + } +})(); + +(function TestComparatorGrows() { + for (let TA of ctors) { + const gsab = CreateGrowableSharedArrayBuffer(4 * TA.BYTES_PER_ELEMENT, + 8 * TA.BYTES_PER_ELEMENT); + const lengthTracking = new TA(gsab, 0); + for (let i = 0; i < 4; i++) { + WriteToTypedArray(lengthTracking, i, (Math.random() * 100)|0) + } + + let resized = false; + const evilComparator = (x, y) => { + if (!resized) { + resized = true; + gsab.grow(6 * TA.BYTES_PER_ELEMENT); + } + if (x < y) return -1; + if (x > y) return 1; + return 0; + }; + + // Grows also don't affect toSorted because sorting is done on a snapshot + // taken at the beginning. + let s = lengthTracking.toSorted(evilComparator); + assertEquals(4, s.length); + // Source grew. + assertEquals(6, lengthTracking.length); + } +})(); + +(function TestNonTypedArray() { + for (let TA of ctors) { + assertThrows(() => { TA.prototype.toSorted.call([1,2,3,4]); }, TypeError); + } +})(); + +(function TestDetached() { + for (let TA of ctors) { + let a = new TA(4); + %ArrayBufferDetach(a.buffer); + assertThrows(() => { a.toSorted(); }, TypeError); + } +})(); + +(function TestNoSpecies() { + class MyUint8Array extends Uint8Array { + static get [Symbol.species]() { return MyUint8Array; } + } + assertEquals(Uint8Array, (new MyUint8Array()).toSorted().constructor); +})(); diff --git a/deps/v8/test/mjsunit/harmony/weakrefs/basics.js b/deps/v8/test/mjsunit/harmony/weakrefs/basics.js index f879df9a2a63b0..7628c641bc7984 100644 --- a/deps/v8/test/mjsunit/harmony/weakrefs/basics.js +++ b/deps/v8/test/mjsunit/harmony/weakrefs/basics.js @@ -47,7 +47,7 @@ (function TestRegisterWithNonObjectTarget() { let fg = new FinalizationRegistry(() => {}); - let message = "FinalizationRegistry.prototype.register: target must be an object"; + let message = "FinalizationRegistry.prototype.register: invalid target"; assertThrows(() => fg.register(1, "holdings"), TypeError, message); assertThrows(() => fg.register(false, "holdings"), TypeError, message); assertThrows(() => fg.register("foo", "holdings"), TypeError, message); @@ -116,7 +116,7 @@ })(); (function TestWeakRefConstructorWithNonObject() { - let message = "WeakRef: target must be an object"; + let message = "WeakRef: invalid target"; assertThrows(() => new WeakRef(), TypeError, message); assertThrows(() => new WeakRef(1), TypeError, message); assertThrows(() => new WeakRef(false), TypeError, message); diff --git a/deps/v8/test/mjsunit/harmony/weakrefs/symbol-as-weakref-target-gc.js b/deps/v8/test/mjsunit/harmony/weakrefs/symbol-as-weakref-target-gc.js new file mode 100644 index 00000000000000..561bf4f058bdd7 --- /dev/null +++ b/deps/v8/test/mjsunit/harmony/weakrefs/symbol-as-weakref-target-gc.js @@ -0,0 +1,39 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --harmony-symbol-as-weakmap-key --expose-gc --noincremental-marking + +(function TestWeakRefWithSymbolGC() { + let weakRef; + { + const innerKey = Symbol('123'); + weakRef = new WeakRef(innerKey); + } + // Since the WeakRef was created during this turn, it is not cleared by GC. + gc(); + assertNotEquals(undefined, weakRef.deref()); + // Next task. + setTimeout(() => { + gc(); + assertEquals(undefined, weakRef.deref()); + }, 0); +})(); + +(function TestFinalizationRegistryWithSymbolGC() { + let cleanUpCalled = false; + const fg = new FinalizationRegistry((target) => { + assertEquals('123', target); + cleanUpCalled = true; + }); + (function () { + const innerKey = Symbol('123'); + fg.register(innerKey, '123'); + })(); + gc(); + assertFalse(cleanUpCalled); + // Check that cleanup callback was called in a follow up task. + setTimeout(() => { + assertTrue(cleanUpCalled); + }, 0); +})(); diff --git a/deps/v8/test/mjsunit/harmony/weakrefs/symbol-as-weakref-target.js b/deps/v8/test/mjsunit/harmony/weakrefs/symbol-as-weakref-target.js new file mode 100644 index 00000000000000..1dc874ed83b3da --- /dev/null +++ b/deps/v8/test/mjsunit/harmony/weakrefs/symbol-as-weakref-target.js @@ -0,0 +1,41 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --harmony-symbol-as-weakmap-key + +(function TestRegisterWithSymbolTarget() { + const fg = new FinalizationRegistry(() => { }); + fg.register(Symbol('123'), 'holdings'); + // Registered symbols cannot be the target. + assertThrows(() => fg.register(Symbol.for('123'), 'holdings'), TypeError); +})(); + +(function TestRegisterWithSymbolUnregisterToken() { + const fg = new FinalizationRegistry(() => { }); + fg.register({}, 'holdings', Symbol('123')); + // Registered symbols cannot be the unregister token. + assertThrows(() => fg.register({}, 'holdings', Symbol.for('123')), TypeError); +})(); + +(function TestRegisterSymbolAndHoldingsSameValue() { + const fg = new FinalizationRegistry(() => {}); + const obj = Symbol('123'); + // SameValue(target, holdings) not ok. + assertThrows(() => fg.register(obj, obj), TypeError); + const holdings = {a: 1}; + fg.register(obj, holdings); +})(); + +(function TestUnregisterWithSymbolUnregisterToken() { + const fg = new FinalizationRegistry(() => {}); + fg.unregister(Symbol('123')); + // Registered symbols cannot be the unregister token. + assertThrows(() => fg.unregister(Symbol.for('123')), TypeError); +})(); + +(function TestWeakRefConstructorWithSymbol() { + new WeakRef(Symbol('123')); + // Registered symbols cannot be the WeakRef target. + assertThrows(() => new WeakRef(Symbol.for('123')), TypeError); +})(); diff --git a/deps/v8/test/mjsunit/json.js b/deps/v8/test/mjsunit/json.js index 37d427aa83b339..9732f08bfe73a5 100644 --- a/deps/v8/test/mjsunit/json.js +++ b/deps/v8/test/mjsunit/json.js @@ -489,6 +489,9 @@ assertTrue(Object.prototype.isPrototypeOf(o2)); var json = '{"stuff before slash\\\\stuff after slash":"whatever"}'; TestStringify(json, JSON.parse(json)); +// TODO(v8:12955): JSON parse with source access will assert failed when the +// reviver modifies the json value like this. See +// https://github.com/tc39/proposal-json-parse-with-source/issues/35. // https://bugs.chromium.org/p/v8/issues/detail?id=3139 diff --git a/deps/v8/test/mjsunit/maglev/omit-default-ctors-array-iterator.js b/deps/v8/test/mjsunit/maglev/omit-default-ctors-array-iterator.js new file mode 100644 index 00000000000000..7131912abcec26 --- /dev/null +++ b/deps/v8/test/mjsunit/maglev/omit-default-ctors-array-iterator.js @@ -0,0 +1,32 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --omit-default-ctors --allow-natives-syntax --maglev + +// This behavior is not spec compliant, see crbug.com/v8/13249. +(function ArrayIteratorMonkeyPatched() { + let iterationCount = 0; + const oldIterator = Array.prototype[Symbol.iterator]; + Array.prototype[Symbol.iterator] = + function () { ++iterationCount; return oldIterator.call(this); }; + + class A {} + class B extends A {} + class C extends B {} + + %PrepareFunctionForOptimization(C); + new C(); + %OptimizeMaglevOnNextCall(C); + + // C default ctor doing "...args" and B default ctor doing "...args". + assertEquals(2, iterationCount); + + new C(); + + // C default ctor doing "...args" and B default ctor doing "...args". + assertEquals(4, iterationCount); + assertTrue(isMaglevved(C)); // No deopt. + + Array.prototype[Symbol.iterator] = oldIterator; +})(); diff --git a/deps/v8/test/mjsunit/maglev/omit-default-ctors.js b/deps/v8/test/mjsunit/maglev/omit-default-ctors.js new file mode 100644 index 00000000000000..430a1118b8d5e2 --- /dev/null +++ b/deps/v8/test/mjsunit/maglev/omit-default-ctors.js @@ -0,0 +1,740 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --omit-default-ctors --allow-natives-syntax --maglev + +(function OmitDefaultBaseCtor() { + class A {}; // default base ctor -> will be omitted + class B extends A {}; + %PrepareFunctionForOptimization(B); + new B(); + %OptimizeMaglevOnNextCall(B); + const o = new B(); + assertSame(B.prototype, o.__proto__); + assertTrue(isMaglevved(B)); // No deopt. +})(); + +(function OmitDefaultDerivedCtor() { + class A { constructor() {} }; + class B extends A {}; // default derived ctor -> will be omitted + class C extends B {}; + %PrepareFunctionForOptimization(C); + new C(); + %OptimizeMaglevOnNextCall(C); + const o = new C(); + assertSame(C.prototype, o.__proto__); + assertTrue(isMaglevved(C)); // No deopt. +})(); + +(function OmitDefaultBaseAndDerivedCtor() { + class A {}; // default base ctor -> will be omitted + class B extends A {}; // default derived ctor -> will be omitted + class C extends B {}; + %PrepareFunctionForOptimization(C); + new C(); + %OptimizeMaglevOnNextCall(C); + const o = new C(); + assertSame(C.prototype, o.__proto__); + assertTrue(isMaglevved(C)); // No deopt. +})(); + +(function OmitDefaultBaseCtorWithExplicitSuper() { + class A {}; // default base ctor -> will be omitted + class B extends A { constructor() { super(); } }; + %PrepareFunctionForOptimization(B); + new B(); + %OptimizeMaglevOnNextCall(B); + const o = new B(); + assertSame(B.prototype, o.__proto__); + assertTrue(isMaglevved(B)); // No deopt. +})(); + +(function OmitDefaultDerivedCtorWithExplicitSuper() { + class A { constructor() {} }; + class B extends A {}; // default derived ctor -> will be omitted + class C extends B { constructor() { super(); } }; + %PrepareFunctionForOptimization(C); + new C(); + %OptimizeMaglevOnNextCall(C); + const o = new C(); + assertSame(C.prototype, o.__proto__); + assertTrue(isMaglevved(C)); // No deopt. +})(); + +(function OmitDefaultBaseAndDerivedCtorWithExplicitSuper() { + class A {}; // default base ctor -> will be omitted + class B extends A {}; // default derived ctor -> will be omitted + class C extends B { constructor() { super(); } }; + %PrepareFunctionForOptimization(C); + new C(); + %OptimizeMaglevOnNextCall(C); + const o = new C(); + assertSame(C.prototype, o.__proto__); + assertTrue(isMaglevved(C)); // No deopt. +})(); + +(function OmitDefaultBaseCtorWithExplicitSuperAndNonFinalSpread() { + class A {}; // default base ctor -> will be omitted + class B extends A { constructor(...args) { super(1, ...args, 2); } }; + %PrepareFunctionForOptimization(B); + new B(); + %OptimizeMaglevOnNextCall(B); + const o = new B(3, 4); + assertSame(B.prototype, o.__proto__); + // See https://bugs.chromium.org/p/v8/issues/detail?id=13337 + // assertTrue(isMaglevved(B)); // No deopt. + // This assert will fail when the above bug is fixed: + assertFalse(isMaglevved(B)); +})(); + +(function OmitDefaultDerivedCtorWithExplicitSuperAndNonFinalSpread() { + class A { constructor() {} }; + class B extends A {}; // default derived ctor -> will be omitted + class C extends B { constructor(...args) { super(1, ...args, 2); } }; + %PrepareFunctionForOptimization(C); + new C(); + %OptimizeMaglevOnNextCall(C); + const o = new C(3, 4); + assertSame(C.prototype, o.__proto__); + // See https://bugs.chromium.org/p/v8/issues/detail?id=13337 + // assertTrue(isMaglevved(C)); // No deopt. + // This assert will fail when the above bug is fixed: + assertFalse(isMaglevved(C)); +})(); + +(function OmitDefaultBaseAndDerivedCtorWithExplicitSuperAndNonFinalSpread() { + class A {}; // default base ctor -> will be omitted + class B extends A {}; // default derived ctor -> will be omitted + class C extends B { constructor(...args) { super(1, ...args, 2); } }; + %PrepareFunctionForOptimization(C); + new C(); + %OptimizeMaglevOnNextCall(C); + const o = new C(3, 4); + assertSame(C.prototype, o.__proto__); + // See https://bugs.chromium.org/p/v8/issues/detail?id=13337 + // assertTrue(isMaglevved(C)); // No deopt. + // This assert will fail when the above bug is fixed: + assertFalse(isMaglevved(C)); +})(); + +(function NonDefaultBaseConstructorCalled() { + let ctorCallCount = 0; + let lastArgs; + class Base { + constructor(...args) { + ++ctorCallCount; + this.baseTagged = true; + lastArgs = args; + } + }; + // Nothing will be omitted. + class A extends Base {}; + %PrepareFunctionForOptimization(A); + new A(); + %OptimizeMaglevOnNextCall(A); + const a = new A(1, 2, 3); + assertEquals(2, ctorCallCount); + assertEquals([1, 2, 3], lastArgs); + assertTrue(a.baseTagged); + assertTrue(isMaglevved(A)); // No deopt. + + // 'A' default ctor will be omitted. + class B1 extends A {}; + %PrepareFunctionForOptimization(B1); + new B1(); + %OptimizeMaglevOnNextCall(B1); + const b1 = new B1(4, 5, 6); + assertEquals(4, ctorCallCount); + assertEquals([4, 5, 6], lastArgs); + assertTrue(b1.baseTagged); + assertTrue(isMaglevved(B1)); // No deopt. + + // The same test with non-final spread; 'A' default ctor will be omitted. + class B2 extends A { + constructor(...args) { super(1, ...args, 2); } + }; + %PrepareFunctionForOptimization(B2); + new B2(); + %OptimizeMaglevOnNextCall(B2); + const b2 = new B2(4, 5, 6); + assertEquals(6, ctorCallCount); + assertEquals([1, 4, 5, 6, 2], lastArgs); + assertTrue(b2.baseTagged); + // See https://bugs.chromium.org/p/v8/issues/detail?id=13337 + // assertTrue(isMaglevved(B2)); // No deopt. + // This assert will fail when the above bug is fixed: + assertFalse(isMaglevved(B2)); // No deopt. +})(); + +(function NonDefaultDerivedConstructorCalled() { + let ctorCallCount = 0; + let lastArgs; + class Base {}; + class Derived extends Base { + constructor(...args) { + super(); + ++ctorCallCount; + this.derivedTagged = true; + lastArgs = args; + } + }; + // Nothing will be omitted. + class A extends Derived {}; + %PrepareFunctionForOptimization(A); + new A(); + %OptimizeMaglevOnNextCall(A); + const a = new A(1, 2, 3); + assertEquals(2, ctorCallCount); + assertEquals([1, 2, 3], lastArgs); + assertTrue(a.derivedTagged); + assertTrue(isMaglevved(A)); // No deopt. + + // 'A' default ctor will be omitted. + class B1 extends A {}; + %PrepareFunctionForOptimization(B1); + new B1(); + %OptimizeMaglevOnNextCall(B1); + const b1 = new B1(4, 5, 6); + assertEquals(4, ctorCallCount); + assertEquals([4, 5, 6], lastArgs); + assertTrue(b1.derivedTagged); + assertTrue(isMaglevved(B1)); // No deopt. + + // The same test with non-final spread. 'A' default ctor will be omitted. + class B2 extends A { + constructor(...args) { super(1, ...args, 2); } + }; + %PrepareFunctionForOptimization(B2); + new B2(); + %OptimizeMaglevOnNextCall(B2); + const b2 = new B2(4, 5, 6); + assertEquals(6, ctorCallCount); + assertEquals([1, 4, 5, 6, 2], lastArgs); + assertTrue(b2.derivedTagged); + // See https://bugs.chromium.org/p/v8/issues/detail?id=13337 + // assertTrue(isMaglevved(B2)); // No deopt. + // This assert will fail when the above bug is fixed: + assertFalse(isMaglevved(B2)); // No deopt. +})(); + +(function BaseFunctionCalled() { + let baseFunctionCallCount = 0; + function BaseFunction() { + ++baseFunctionCallCount; + this.baseTagged = true; + } + + class A1 extends BaseFunction {}; + %PrepareFunctionForOptimization(A1); + new A1(); + %OptimizeMaglevOnNextCall(A1); + const a1 = new A1(); + assertEquals(2, baseFunctionCallCount); + assertTrue(a1.baseTagged); + assertTrue(isMaglevved(A1)); // No deopt. + + class A2 extends BaseFunction { + constructor(...args) { super(1, ...args, 2); } + }; + %PrepareFunctionForOptimization(A2); + new A2(); + %OptimizeMaglevOnNextCall(A2); + const a2 = new A2(); + assertEquals(4, baseFunctionCallCount); + assertTrue(a2.baseTagged); + assertTrue(isMaglevved(A2)); // No deopt. +})(); + +(function NonSuperclassCtor() { + class A {}; + class B extends A {}; + class C extends B {}; + class D1 extends C {}; + class D2 extends C { constructor(...args) { super(1, ...args, 2); }}; + + %PrepareFunctionForOptimization(C); + %PrepareFunctionForOptimization(D1); + %PrepareFunctionForOptimization(D2); + new C(); + new D1(); + new D2(); + %OptimizeMaglevOnNextCall(C); + %OptimizeMaglevOnNextCall(D1); + %OptimizeMaglevOnNextCall(D2); + + // Install an object which is not a constructor into the class hierarchy. + C.__proto__ = {}; + + assertThrows(() => { new C(); }, TypeError); + assertThrows(() => { new D1(); }, TypeError); + assertThrows(() => { new D2(); }, TypeError); +})(); + +(function ArgumentsEvaluatedBeforeNonSuperclassCtorDetected() { + class A {}; + class B extends A {}; + class C extends B {}; + class D1 extends C {}; + class D2 extends C { constructor(...args) { super(1, ...args, 2); }}; + + %PrepareFunctionForOptimization(C); + %PrepareFunctionForOptimization(D1); + %PrepareFunctionForOptimization(D2); + new C(); + new D1(); + new D2(); + %OptimizeMaglevOnNextCall(C); + %OptimizeMaglevOnNextCall(D1); + %OptimizeMaglevOnNextCall(D2); + + // Install an object which is not a constructor into the class hierarchy. + C.__proto__ = {}; + + let callCount = 0; + function foo() { + ++callCount; + } + + assertThrows(() => { new C(foo()); }, TypeError); + assertEquals(1, callCount); + + assertThrows(() => { new D1(foo()); }, TypeError); + assertEquals(2, callCount); + + assertThrows(() => { new D2(foo()); }, TypeError); + assertEquals(3, callCount); +})(); + +(function ArgumentsEvaluatedBeforeNonSuperclassCtorDetected2() { + class A {}; + class B extends A {}; + class C extends B {}; + class D1 extends C { + constructor() { + super(foo()); + } + }; + + class D2 extends C { + constructor(...args) { + super(...args, foo()); + } + }; + + let callCount = 0; + function foo() { + ++callCount; + } + + %PrepareFunctionForOptimization(D1); + %PrepareFunctionForOptimization(D2); + new D1(); + new D2(); + %OptimizeMaglevOnNextCall(D1); + %OptimizeMaglevOnNextCall(D2); + assertEquals(2, callCount); + + // Install an object which is not a constructor into the class hierarchy. + C.__proto__ = {}; + + assertThrows(() => { new D1(); }, TypeError); + assertEquals(3, callCount); + + assertThrows(() => { new D2(); }, TypeError); + assertEquals(4, callCount); +})(); + +(function EvaluatingArgumentsChangesClassHierarchy() { + let ctorCallCount = 0; + class A {}; + class B extends A { constructor() { + super(); + ++ctorCallCount; + }}; + class C extends B {}; + class D extends C { + constructor() { + super(foo()); + } + }; + + let fooCallCount = 0; + let changeHierarchy = false; + function foo() { + if (changeHierarchy) { + C.__proto__ = A; + C.prototype.__proto__ = A.prototype; + } + ++fooCallCount; + } + + %PrepareFunctionForOptimization(D); + new D(); + assertEquals(1, fooCallCount); + assertEquals(1, ctorCallCount); + %OptimizeMaglevOnNextCall(D); + changeHierarchy = true; + + new D(); + assertEquals(2, fooCallCount); + assertEquals(1, ctorCallCount); + // No deopt (Maglev doesn't depend on the prototype chain not being mutated). + assertTrue(isMaglevved(D)); +})(); + +// The same test as the previous one, but with a ctor with a non-final spread. +(function EvaluatingArgumentsChangesClassHierarchyThisTimeWithNonFinalSpread() { + let ctorCallCount = 0; + class A {}; + class B extends A { constructor() { + super(); + ++ctorCallCount; + }}; + class C extends B {}; + class D extends C { + constructor(...args) { + super(...args, foo()); + } + }; + + let fooCallCount = 0; + let changeHierarchy = false; + function foo() { + if (changeHierarchy) { + C.__proto__ = A; + C.prototype.__proto__ = A.prototype; + } + ++fooCallCount; + } + + %PrepareFunctionForOptimization(D); + new D(); + assertEquals(1, fooCallCount); + assertEquals(1, ctorCallCount); + %OptimizeMaglevOnNextCall(D); + changeHierarchy = true; + + new D(); + assertEquals(2, fooCallCount); + assertEquals(1, ctorCallCount); + // No deopt (Maglev doesn't depend on the prototype chain not being mutated). + assertTrue(isMaglevved(D)); +})(); + +(function BasePrivateField() { + class A { + #aBrand = true; + isA() { + return #aBrand in this; + } + }; + class B extends A {}; + class C1 extends B {}; + class C2 extends B { constructor(...args) { super(1, ...args, 2); }}; + + %PrepareFunctionForOptimization(B); + new B(); + %OptimizeMaglevOnNextCall(B); + + const b = new B(); + assertTrue(b.isA()); + assertTrue(isMaglevved(B)); // No deopt. + + %PrepareFunctionForOptimization(C1); + new C1(); + %OptimizeMaglevOnNextCall(C1); + + const c1 = new C1(); + assertTrue(c1.isA()); + assertTrue(isMaglevved(C1)); // No deopt. + + %PrepareFunctionForOptimization(C2); + new C2(); + %OptimizeMaglevOnNextCall(C2); + + const c2 = new C2(); + assertTrue(c2.isA()); + assertTrue(isMaglevved(C2)); // No deopt. +})(); + +(function DerivedPrivateField() { + class A {}; + class B extends A { + #bBrand = true; + isB() { + return #bBrand in this; + } + }; + class C1 extends B {}; + class C2 extends B { constructor(...args) { super(1, ...args, 2); }}; + + %PrepareFunctionForOptimization(C1); + new C1(); + %OptimizeMaglevOnNextCall(C1); + + const c1 = new C1(); + assertTrue(c1.isB()); + assertTrue(isMaglevved(C1)); // No deopt. + + %PrepareFunctionForOptimization(C2); + new C2(); + %OptimizeMaglevOnNextCall(C2); + + const c2 = new C2(); + assertTrue(c2.isB()); + assertTrue(isMaglevved(C2)); // No deopt. +})(); + +(function BasePrivateMethod() { + class A { + #m() { return 'private'; } + callPrivate() { + return this.#m(); + } + }; + class B extends A {}; + class C1 extends B {}; + class C2 extends B { constructor(...args) { super(1, ...args, 2); }}; + + %PrepareFunctionForOptimization(B); + new B(); + %OptimizeMaglevOnNextCall(B); + + const b = new B(); + assertEquals('private', b.callPrivate()); + assertTrue(isMaglevved(B)); // No deopt. + + %PrepareFunctionForOptimization(C1); + new C1(); + %OptimizeMaglevOnNextCall(C1); + + const c1 = new C1(); + assertEquals('private', c1.callPrivate()); + assertTrue(isMaglevved(C1)); // No deopt. + + %PrepareFunctionForOptimization(C2); + new C2(); + %OptimizeMaglevOnNextCall(C2); + + const c2 = new C2(); + assertEquals('private', c2.callPrivate()); + assertTrue(isMaglevved(C2)); // No deopt. +})(); + +(function DerivedPrivateMethod() { + class A {}; + class B extends A { + #m() { return 'private'; } + callPrivate() { + return this.#m(); + } + }; + class C1 extends B {}; + class C2 extends B { constructor(...args) { super(1, ...args, 2); }}; + + %PrepareFunctionForOptimization(C1); + new C1(); + %OptimizeMaglevOnNextCall(C1); + + const c1 = new C1(); + assertEquals('private', c1.callPrivate()); + assertTrue(isMaglevved(C1)); // No deopt. + + %PrepareFunctionForOptimization(C2); + new C2(); + %OptimizeMaglevOnNextCall(C2); + + const c2 = new C2(); + assertEquals('private', c2.callPrivate()); + assertTrue(isMaglevved(C2)); // No deopt. +})(); + +(function BasePrivateGetter() { + class A { + get #p() { return 'private'; } + getPrivate() { + return this.#p; + } + }; + class B extends A {}; + class C1 extends B {}; + class C2 extends B { constructor(...args) { super(1, ...args, 2); }}; + + %PrepareFunctionForOptimization(B); + new B(); + %OptimizeMaglevOnNextCall(B); + + const b = new B(); + assertEquals('private', b.getPrivate()); + assertTrue(isMaglevved(B)); // No deopt. + + %PrepareFunctionForOptimization(C1); + new C1(); + %OptimizeMaglevOnNextCall(C1); + + const c1 = new C1(); + assertEquals('private', c1.getPrivate()); + assertTrue(isMaglevved(C1)); // No deopt. + + %PrepareFunctionForOptimization(C2); + new C2(); + %OptimizeMaglevOnNextCall(C2); + + const c2 = new C2(); + assertEquals('private', c2.getPrivate()); + assertTrue(isMaglevved(C2)); // No deopt. +})(); + +(function DerivedPrivateGetter() { + class A {}; + class B extends A { + get #p() { return 'private'; } + getPrivate() { + return this.#p; + } + }; + class C1 extends B {}; + class C2 extends B { constructor(...args) { super(1, ...args, 2); }}; + + %PrepareFunctionForOptimization(C1); + new C1(); + %OptimizeMaglevOnNextCall(C1); + + const c1 = new C1(); + assertEquals('private', c1.getPrivate()); + assertTrue(isMaglevved(C1)); // No deopt. + + %PrepareFunctionForOptimization(C2); + new C2(); + %OptimizeMaglevOnNextCall(C2); + + const c2 = new C2(); + assertEquals('private', c2.getPrivate()); + assertTrue(isMaglevved(C2)); // No deopt. +})(); + +(function BasePrivateSetter() { + class A { + set #p(value) { this.secret = value; } + setPrivate() { + this.#p = 'private'; + } + }; + class B extends A {}; + class C1 extends B {}; + class C2 extends B { constructor(...args) { super(1, ...args, 2); }}; + + %PrepareFunctionForOptimization(B); + new B(); + %OptimizeMaglevOnNextCall(B); + + const b = new B(); + b.setPrivate(); + assertEquals('private', b.secret); + + %PrepareFunctionForOptimization(C1); + new C1(); + %OptimizeMaglevOnNextCall(C1); + + const c1 = new C1(); + c1.setPrivate(); + assertEquals('private', c1.secret); + assertTrue(isMaglevved(C1)); // No deopt. + + %PrepareFunctionForOptimization(C2); + new C2(); + %OptimizeMaglevOnNextCall(C2); + + const c2 = new C2(); + c2.setPrivate(); + assertEquals('private', c2.secret); + assertTrue(isMaglevved(C2)); // No deopt. +})(); + +(function DerivedPrivateSetter() { + class A {}; + class B extends A { + set #p(value) { this.secret = value; } + setPrivate() { + this.#p = 'private'; + } + }; + class C1 extends B {}; + class C2 extends B { constructor(...args) { super(1, ...args, 2); }}; + + %PrepareFunctionForOptimization(C1); + new C1(); + %OptimizeMaglevOnNextCall(C1); + + const c1 = new C1(); + c1.setPrivate(); + assertEquals('private', c1.secret); + assertTrue(isMaglevved(C1)); // No deopt. + + %PrepareFunctionForOptimization(C2); + new C2(); + %OptimizeMaglevOnNextCall(C2); + + const c2 = new C2(); + c2.setPrivate(); + assertEquals('private', c2.secret); + assertTrue(isMaglevved(C2)); // No deopt. +})(); + +(function BaseClassFields() { + class A { + aField = true; + }; + class B extends A {}; + class C1 extends B {}; + class C2 extends B { constructor(...args) { super(1, ...args, 2); }}; + + %PrepareFunctionForOptimization(B); + new B(); + %OptimizeMaglevOnNextCall(B); + + const b = new B(); + assertTrue(b.aField); + + %PrepareFunctionForOptimization(C1); + new C1(); + %OptimizeMaglevOnNextCall(C1); + + const c1 = new C1(); + assertTrue(c1.aField); + assertTrue(isMaglevved(C1)); // No deopt. + + %PrepareFunctionForOptimization(C2); + new C2(); + %OptimizeMaglevOnNextCall(C2); + + const c2 = new C2(); + assertTrue(c2.aField); + assertTrue(isMaglevved(C2)); // No deopt. +})(); + +(function DerivedClassFields() { + class A {}; + class B extends A { + bField = true; + }; + class C1 extends B {}; + class C2 extends B { constructor(...args) { super(1, ...args, 2); }}; + + %PrepareFunctionForOptimization(C1); + new C1(); + %OptimizeMaglevOnNextCall(C1); + + const c1 = new C1(); + assertTrue(c1.bField); + assertTrue(isMaglevved(C1)); // No deopt. + + %PrepareFunctionForOptimization(C2); + new C2(); + %OptimizeMaglevOnNextCall(C2); + + const c2 = new C2(); + assertTrue(c2.bField); + assertTrue(isMaglevved(C2)); // No deopt. +})(); diff --git a/deps/v8/test/mjsunit/maglev/osr-to-tf.js b/deps/v8/test/mjsunit/maglev/osr-to-tf.js index d810226c0e1c73..9b8b998ee9b2a2 100644 --- a/deps/v8/test/mjsunit/maglev/osr-to-tf.js +++ b/deps/v8/test/mjsunit/maglev/osr-to-tf.js @@ -5,13 +5,25 @@ // Flags: --allow-natives-syntax --maglev --no-stress-opt // Flags: --no-baseline-batch-compilation --use-osr --turbofan -let keep_going = 100000; // A counter to avoid test hangs on failure. +let keep_going = 10000000; // A counter to avoid test hangs on failure. function f() { let reached_tf = false; + let prev_status = 0; while (!reached_tf && --keep_going) { // This loop should trigger OSR. reached_tf = %CurrentFrameIsTurbofan(); + let status = %GetOptimizationStatus(f); + if (status !== prev_status) { + let p = [] + for (let k in V8OptimizationStatus) { + if (V8OptimizationStatus[k] & status) { + p.push(k); + } + } + print(p.join(",")); + prev_status = status; + } } } diff --git a/deps/v8/test/mjsunit/maglev/regress/regress-1359723.js b/deps/v8/test/mjsunit/maglev/regress/regress-1359723.js new file mode 100644 index 00000000000000..ce42eb11fe2e34 --- /dev/null +++ b/deps/v8/test/mjsunit/maglev/regress/regress-1359723.js @@ -0,0 +1,21 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// +// Flags: --allow-natives-syntax + +function f(__v_51, __v_52, __v_53) { + var a = false; + var b = a; + try { + var c = false + false; + } catch {} + try { + var d = false - (null == true); + } catch {} + return a + b - c + d; +} +%PrepareFunctionForOptimization(f); +assertEquals(0, f()); +%OptimizeMaglevOnNextCall(f); +assertEquals(0, f()); diff --git a/deps/v8/test/mjsunit/maglev/regress/regress-1363450.js b/deps/v8/test/mjsunit/maglev/regress/regress-1363450.js new file mode 100644 index 00000000000000..0bc2ddf5471e37 --- /dev/null +++ b/deps/v8/test/mjsunit/maglev/regress/regress-1363450.js @@ -0,0 +1,22 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// +// Flags: --allow-natives-syntax + +class C extends (class {}) { + constructor() { + var f = () => { + try { C.__proto__ = null; } catch {} + try { super(); } catch {} + }; + %PrepareFunctionForOptimization(f); + f(); + %OptimizeMaglevOnNextCall(f); + } +} +try { new C(); } catch {} +// The next 2 calls deopt before reaching relevant bits. +try { new C(); } catch {} +try { new C(); } catch {} +try { new C(); } catch {} diff --git a/deps/v8/test/mjsunit/maglev/regress/regress-1364074.js b/deps/v8/test/mjsunit/maglev/regress/regress-1364074.js new file mode 100644 index 00000000000000..bdf96aa3f26f60 --- /dev/null +++ b/deps/v8/test/mjsunit/maglev/regress/regress-1364074.js @@ -0,0 +1,24 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// +// Flags: --maglev + +class Base { +} +let Class = class extends Base { + constructor() { + super(); + } +}; +for (let i = 0; i < 100; i++) { + Class = class extends Class { + constructor() { + try { + super(); + super(); + } catch (e) {} + } + }; +} +let instance = new Class(); diff --git a/deps/v8/test/mjsunit/maglev/regress/regress-1368046.js b/deps/v8/test/mjsunit/maglev/regress/regress-1368046.js new file mode 100644 index 00000000000000..4cff62a54bb803 --- /dev/null +++ b/deps/v8/test/mjsunit/maglev/regress/regress-1368046.js @@ -0,0 +1,19 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// +// Flags: --maglev --allow-natives-syntax + +function f(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9, arg10, arg11) { + for (let i = 0; i < 0; i++) {} + try { + throw 42; + } catch (e) { + } +} + +%PrepareFunctionForOptimization(f); +f(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 42); +f(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 42); +%OptimizeMaglevOnNextCall(f); +f(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 42); diff --git a/deps/v8/test/mjsunit/maglev/regress/regress-v8-13289.js b/deps/v8/test/mjsunit/maglev/regress/regress-v8-13289.js new file mode 100644 index 00000000000000..c400303ce618fa --- /dev/null +++ b/deps/v8/test/mjsunit/maglev/regress/regress-v8-13289.js @@ -0,0 +1,24 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// +// Flags: --allow-natives-syntax + +function Thingy() {} +Thingy.prototype = { + foo: function() { return 42; } +}; + +const x = new Thingy(); + +function f(o) { + return o.foo(); +} + +%PrepareFunctionForOptimization(f); +assertEquals(42, f(x)); +%OptimizeMaglevOnNextCall(f); +assertEquals(42, f(x)); + +Thingy.prototype.foo = function() { return 56; } +assertEquals(56, f(x)); diff --git a/deps/v8/test/mjsunit/mjsunit.js b/deps/v8/test/mjsunit/mjsunit.js index 9a70951758efd5..9cf8e4d6f3bd5e 100644 --- a/deps/v8/test/mjsunit/mjsunit.js +++ b/deps/v8/test/mjsunit/mjsunit.js @@ -136,6 +136,14 @@ var assertThrowsAsync; // Assert that the passed function or eval code does not throw an exception. var assertDoesNotThrow; +// Assert that the passed code throws an early error (i.e. throws a SyntaxError +// at parse time). +var assertEarlyError; + +// Assert that the passed code throws an exception when executed. +// Fails if the passed code throws an exception at parse time. +var assertThrowsAtRuntime; + // Asserts that the found value is an instance of the constructor passed // as the second argument. var assertInstanceof; @@ -586,6 +594,25 @@ var prettyPrinted; e => checkException(e, type_opt, cause_opt)); }; + assertEarlyError = function assertEarlyError(code) { + try { + new Function(code); + } catch (e) { + checkException(e, SyntaxError); + return; + } + failWithMessage('Did not throw exception while parsing'); + } + + assertThrowsAtRuntime = function assertThrowsAtRuntime(code, type_opt) { + const f = new Function(code); + if (arguments.length > 1 && type_opt !== undefined) { + assertThrows(f, type_opt); + } else { + assertThrows(f); + } + } + assertInstanceof = function assertInstanceof(obj, type) { if (!(obj instanceof type)) { var actualTypeName = null; diff --git a/deps/v8/test/mjsunit/mjsunit.status b/deps/v8/test/mjsunit/mjsunit.status index 7c70ab8184f00d..8f7a4f7a59452a 100644 --- a/deps/v8/test/mjsunit/mjsunit.status +++ b/deps/v8/test/mjsunit/mjsunit.status @@ -34,6 +34,8 @@ 'harmony/shadowrealm-skip*': [SKIP], 'regress/modules-skip*': [SKIP], 'wasm/exceptions-utils': [SKIP], + 'wasm/gc-js-interop-helpers': [SKIP], + 'wasm/gc-js-interop-export': [SKIP], 'wasm/wasm-module-builder': [SKIP], 'compiler/fast-api-helpers': [SKIP], 'typedarray-helpers': [SKIP], @@ -432,6 +434,10 @@ 'regress/regress-1262423': [PASS,FAIL], 'regress/regress-793588': [PASS,FAIL], + # RegExp unicode tests relies on ICU for property classes and + # case-insensitive unicode patterns. + 'harmony/regexp-unicode-sets': [PASS,FAIL], + # The noi18n build cannot parse characters in supplementary plane. 'harmony/regexp-named-captures': [FAIL], 'regress/regress-v8-10384': [FAIL], @@ -1073,6 +1079,9 @@ 'compiler/regress-1224277': [SKIP], 'regress/regress-1220974': [SKIP], 'regress/regress-992389': [SKIP], + + # BUG(v8:13331) Skipped until issue is fixed to reduce noise on alerts. + 'harmony/regress/regress-crbug-1367133': [SKIP], }], # gc_fuzzer or deopt_fuzzer or interrupt_fuzzer ############################################################################## @@ -1207,6 +1216,10 @@ # Baseline tests don't make sense with optimization stressing. 'baseline/*': [SKIP], + + # This test uses --wasm-speculative-inlining which is incompatible with + # stressing. + 'regress/wasm/regress-1364036': [SKIP], }], # variant == stress ############################################################################## @@ -1404,6 +1417,12 @@ # it. In the slow path, this results in one runtime call per element, which # takes several minutes overall. 'regress/wasm/regress-9017': [SKIP], + + # These test Array#toReversed and Array#toSpliced on a big packed array, which + # is created by repeated calls to Array#push. In the slow path this is very + # slow. + 'harmony/array-to-reversed-big': [SKIP], + 'harmony/array-to-spliced-big': [SKIP], }], # variant == slow_path ['((arch == mips64el or arch == mips64) and not simd_mips) or (arch in [ppc64])', { @@ -1548,8 +1567,7 @@ 'es6/collections-constructor-*': [SKIP], 'es6/map-constructor-entry-side-effect*': [SKIP], - 'shared-memory/shared-string-promotion-minor': [SKIP], - 'shared-memory/shared-string-promotion-major': [SKIP], + 'shared-memory/*': [SKIP], }], # single_generation ################################################################################ diff --git a/deps/v8/test/mjsunit/omit-default-ctors-array-iterator.js b/deps/v8/test/mjsunit/omit-default-ctors-array-iterator.js index d67cff70d9b82f..57588681541818 100644 --- a/deps/v8/test/mjsunit/omit-default-ctors-array-iterator.js +++ b/deps/v8/test/mjsunit/omit-default-ctors-array-iterator.js @@ -2,9 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -// Flags: --omit-default-ctors --no-turbofan --no-always-turbofan - -// TODO(v8:13091): Enable TurboFan. +// Flags: --omit-default-ctors // This behavior is not spec compliant, see crbug.com/v8/13249. (function ArrayIteratorMonkeyPatched() { diff --git a/deps/v8/test/mjsunit/omit-default-ctors.js b/deps/v8/test/mjsunit/omit-default-ctors.js index cbd70a925982e6..e06ca3964b8522 100644 --- a/deps/v8/test/mjsunit/omit-default-ctors.js +++ b/deps/v8/test/mjsunit/omit-default-ctors.js @@ -2,9 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -// Flags: --omit-default-ctors --no-turbofan --no-always-turbofan - -// TODO(v8:13091): Enable TurboFan. +// Flags: --omit-default-ctors (function OmitDefaultBaseCtor() { class A {} // default base ctor -> will be omitted diff --git a/deps/v8/test/mjsunit/regexp-tier-up-multiple.js b/deps/v8/test/mjsunit/regexp-tier-up-multiple.js index 3d161c40e8deb9..13f4eaabb32116 100644 --- a/deps/v8/test/mjsunit/regexp-tier-up-multiple.js +++ b/deps/v8/test/mjsunit/regexp-tier-up-multiple.js @@ -7,6 +7,10 @@ // Flags: --regexp-tier-up --regexp-tier-up-ticks=5 // Flags: --allow-natives-syntax --no-force-slow-path --no-regexp-interpret-all // Flags: --no-enable-experimental-regexp-engine +// +// Concurrent compiles can trigger interrupts which would cause regexp +// re-execution and thus mess with test expectations below. +// Flags: --no-concurrent-recompilation const kLatin1 = true; const kUnicode = false; diff --git a/deps/v8/test/mjsunit/regress-crbug-1374232.js b/deps/v8/test/mjsunit/regress-crbug-1374232.js new file mode 100644 index 00000000000000..984faf9c9892a6 --- /dev/null +++ b/deps/v8/test/mjsunit/regress-crbug-1374232.js @@ -0,0 +1,6 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// '[' within a character class is not special without /v. +/\1[[]()/ diff --git a/deps/v8/test/mjsunit/regress/regress-1359230.js b/deps/v8/test/mjsunit/regress/regress-1359230.js index bbb50ee6441a2e..ce0f958e7b9e45 100644 --- a/deps/v8/test/mjsunit/regress/regress-1359230.js +++ b/deps/v8/test/mjsunit/regress/regress-1359230.js @@ -14,17 +14,24 @@ try { } catch(e) { } -const str = /\dei7/sgiuy; -const obj = {"a":str, "length":9007199254740991}; -const increment = 2061353130; -let n = increment * 21; -for (let i = 0; i < 52; i++) { - n += increment; - try { - const v9 = d8.serializer.serialize(obj); - const v10 = new Uint8Array(v9); - v10[6] = n; - const v11 = d8.serializer.deserialize(v9); - } catch(v12) { +function test(length) { + const str = /\dei7/sgiuy; + const obj = {"a":str, "length":length}; + const increment = 2061353130; + let n = increment * 21; + for (let i = 0; i < 52; i++) { + n += increment; + try { + const v9 = d8.serializer.serialize(obj); + const v10 = new Uint8Array(v9); + v10[6] = n; + const v11 = d8.serializer.deserialize(v9); + } catch(v12) { + } } + } + + +test(9007199254740991); +test(0xFFFFFFFFFFFF3F43); diff --git a/deps/v8/test/mjsunit/regress/regress-1364319.js b/deps/v8/test/mjsunit/regress/regress-1364319.js new file mode 100644 index 00000000000000..c195afeaee36b2 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-1364319.js @@ -0,0 +1,15 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// +// Flags: --allow-natives-syntax + +function f() { + return "abcd".charCodeAt(BigInt.asUintN(0, -1307n)); +} + +%PrepareFunctionForOptimization(f); +try { f(); } catch(e) {} +try { f(); } catch(e) {} +%OptimizeFunctionOnNextCall(f); +assertThrows(f, TypeError); diff --git a/deps/v8/test/mjsunit/regress/regress-1364400.js b/deps/v8/test/mjsunit/regress/regress-1364400.js new file mode 100644 index 00000000000000..b7726817433101 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-1364400.js @@ -0,0 +1,22 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax + +function foo(deopt, x) { + x = x >>> 0; + return deopt ? Math.max(x) : x; +} + +function bar(deopt) { + return foo(deopt, 4294967295); +}; + +%PrepareFunctionForOptimization(bar); +%PrepareFunctionForOptimization(foo); +bar(false); +%OptimizeFunctionOnNextCall(bar); +// bar will bailout because of insufficient type feedback for generic named +// access. The HeapNumber should be correctly rematerialized in deoptimzer. +assertEquals(4294967295, bar(true)); diff --git a/deps/v8/test/mjsunit/regress/regress-1370398.js b/deps/v8/test/mjsunit/regress/regress-1370398.js new file mode 100644 index 00000000000000..72e4303df96436 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-1370398.js @@ -0,0 +1,18 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax --turbofan + +function f() { + let [x] = [1n]; + y = x; + x = 1n - y; + x = 1n - y; + y = x; +} + +%PrepareFunctionForOptimization(f); +f(); +%OptimizeFunctionOnNextCall(f); +f(); diff --git a/deps/v8/test/mjsunit/regress/regress-1371935.js b/deps/v8/test/mjsunit/regress/regress-1371935.js new file mode 100644 index 00000000000000..8ecd5880965cc6 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-1371935.js @@ -0,0 +1,24 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax --turbofan --no-always-turbofan + +function f(a, b, c) { + // CheckBigInt64 is required if the type of input is UnsignedBigInt64 + // because its value can be out of the range of SignedBigInt64. + let t = BigInt.asUintN(64, a + b); + // The addition is speculated as CheckedBigInt64Add and triggers the deopt + // for the large value coming in through <t>. + return t + c; +} + +%PrepareFunctionForOptimization(f); +assertEquals(12n, f(9n, 2n, 1n)); +%OptimizeFunctionOnNextCall(f); +assertEquals(12n, f(9n, 2n, 1n)); +assertOptimized(f); +assertEquals(2n ** 64n, f(1n, -2n, 1n)); +if (%Is64Bit()) { + assertUnoptimized(f); +} diff --git a/deps/v8/test/mjsunit/regress/regress-1376930.js b/deps/v8/test/mjsunit/regress/regress-1376930.js new file mode 100644 index 00000000000000..1c3c4b6b94a456 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-1376930.js @@ -0,0 +1,20 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax --turbofan --no-always-turbofan + +function f() { + let a = 42n; + // JSDecrement should be typed as BigInt. + let b = a--; + let c = -42n && 42n; + // JSDecrement was typed as Numeric instead of BigInt so the node could not + // be eliminated because of possible deoptimization. + let d = c & a; +}; + +%PrepareFunctionForOptimization(f); +f(); +%OptimizeFunctionOnNextCall(f); +f(); diff --git a/deps/v8/test/mjsunit/regress/regress-crbug-1357318.js b/deps/v8/test/mjsunit/regress/regress-crbug-1357318.js new file mode 100644 index 00000000000000..c13e929a717685 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-crbug-1357318.js @@ -0,0 +1,102 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + + +(function array_iterator() { + let count = 0; + [].values().__proto__.return = function(value) { + ++count; + return {value: value, done: true}; + }; + + let array = [1, 2, 3, 4, 5, 6, 7, 8]; + + // Aborted iteration in a builtin. + try { + new WeakSet(array); + } catch (e) {} + assertEquals(count, 1); + + // Aborted iteration via for..of. + let i = array.length / 2; + for (c of array) { + if (--i == 0) break; + } + assertEquals(count, 2); +})(); + +(function set_iterator() { + let count = 0; + new Set().values().__proto__.return = function(value) { + ++count; + return {value: value, done: true}; + }; + + let set = new Set(); + for (let i = 0; i < 26; i++) { + set.add("item" + i); + } + + // Aborted iteration in a builtin. + try { + new WeakSet(set); + } catch (e) {} + assertEquals(count, 1); + + // Aborted iteration via for..of. + let i = set.size / 2; + for (c of set.values()) { + if (--i == 0) break; + } + assertEquals(count, 2); +})(); + +(function map_iterator() { + let count = 0; + new Map().values().__proto__.return = function(value) { + ++count; + return {value: value, done: true}; + }; + + let map = new Map(); + for (let i = 0; i < 26; i++) { + map.set(String.fromCharCode(97 + i), i); + } + + // Aborted iteration in a builtin. + try { + new WeakMap(map); + } catch (e) {} + assertEquals(count, 1); + + // Aborted iteration via for..of. + let i = map.size / 2; + for (c of map.keys()) { + if (--i == 0) break; + } + assertEquals(count, 2); +})(); + +(function string_iterator() { + let count = 0; + let str = "some long string"; + let iterator = str[Symbol.iterator](); + iterator.__proto__.return = function(value) { + ++count; + return {value: value, done: true}; + }; + + // Aborted iteration in a builtin. + try { + new WeakSet(iterator); + } catch (e) {} + assertEquals(count, 1); + + // Aborted iteration via for..of. + let i = str.length / 2; + for (c of iterator) { + if (--i == 0) break; + } + assertEquals(count, 2); +})(); diff --git a/deps/v8/test/mjsunit/regress/regress-crbug-1362487.js b/deps/v8/test/mjsunit/regress/regress-crbug-1362487.js new file mode 100644 index 00000000000000..388df981588898 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-crbug-1362487.js @@ -0,0 +1,16 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --harmony-rab-gsab + +const rab1 = new ArrayBuffer(2000, {'maxByteLength': 4000}); +class MyInt8Array extends Int8Array { + constructor() { + super(rab1); + } +}; +const rab2 = new ArrayBuffer(1000, {'maxByteLength': 4000}); +const ta = new Int8Array(rab2); +ta.constructor = MyInt8Array; +ta.slice(); diff --git a/deps/v8/test/mjsunit/regress/regress-crbug-1373770.js b/deps/v8/test/mjsunit/regress/regress-crbug-1373770.js new file mode 100644 index 00000000000000..d3e8de2ab5ba04 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-crbug-1373770.js @@ -0,0 +1,10 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +JSON.parse('[0,0]', function (a, b, c) { + console.log(a); + console.log(b); + console.log(c); + quit(); +}); diff --git a/deps/v8/test/mjsunit/regress/wasm/regress-12945.js b/deps/v8/test/mjsunit/regress/wasm/regress-12945.js index c2505f32461b0b..a681465db3c9a4 100644 --- a/deps/v8/test/mjsunit/regress/wasm/regress-12945.js +++ b/deps/v8/test/mjsunit/regress/wasm/regress-12945.js @@ -15,7 +15,7 @@ let sig = makeSig([wasmRefNullType(supertype)], [kWasmI32]); let callee = builder.addFunction("callee", sig).addBody([ kExprLocalGet, 0, - kGCPrefix, kExprRefTest, sub1, + kGCPrefix, kExprRefTestDeprecated, sub1, kExprIf, kWasmVoid, kExprLocalGet, 0, kGCPrefix, kExprRefCast, sub1, diff --git a/deps/v8/test/mjsunit/regress/wasm/regress-1364036.js b/deps/v8/test/mjsunit/regress/wasm/regress-1364036.js new file mode 100644 index 00000000000000..a880a77555b5a8 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/wasm/regress-1364036.js @@ -0,0 +1,17 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --wasm-speculative-inlining --experimental-wasm-typed-funcref + +d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); + +const builder = new WasmModuleBuilder(); +builder.addType(kSig_i_i); +builder.addFunction("main", kSig_i_i) + .addBody([kExprI32Const, 0x00, kExprRefNull, 0x01, kExprCallRef, 0x01]) + .exportFunc(); + +let instance = builder.instantiate(); + +assertTraps(WebAssembly.RuntimeError, () => instance.exports.main()); diff --git a/deps/v8/test/mjsunit/shared-memory/shared-external-string-dictionary-lookup.js b/deps/v8/test/mjsunit/shared-memory/shared-external-string-dictionary-lookup.js new file mode 100644 index 00000000000000..bf942cdf120d6b --- /dev/null +++ b/deps/v8/test/mjsunit/shared-memory/shared-external-string-dictionary-lookup.js @@ -0,0 +1,38 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// +// Flags: --expose-externalize-string --shared-string-table + +const long_key = 'key1234567890abcdefg'; +const substr_key = long_key.substring(3,17); +const consstr_key = 'key' + 1234567890 + 'abcdefg'; +const integer_index = long_key.substring(3,8); + +{ + let obj = []; + for (let i = 0; i < 100; ++i) { + obj[i] = i; + obj['XXX' + i] = 'XXX' + i; + } + + obj['key1234567890abcdefg'] = 'long_key_value'; + obj['1234567890abcd'] = 'substr_value'; + obj[12345] = 'integer_index'; + + try { + externalizeString(long_key); + externalizeString(substr_key); + externalizeString(consstr_key); + externalizeString(integer_index); + } catch {} + + (function exerciseICs() { + for (let i = 0; i < 10; i++) { + assertEquals('long_key_value', obj[long_key]); + assertEquals('substr_value', obj[substr_key]); + assertEquals('long_key_value', obj[consstr_key]); + assertEquals('integer_index', obj[integer_index]); + } + })(); +} diff --git a/deps/v8/test/mjsunit/shared-memory/shared-external-string-megamorphic-ic.js b/deps/v8/test/mjsunit/shared-memory/shared-external-string-megamorphic-ic.js new file mode 100644 index 00000000000000..19d27a7cf858fc --- /dev/null +++ b/deps/v8/test/mjsunit/shared-memory/shared-external-string-megamorphic-ic.js @@ -0,0 +1,37 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// +// Flags: --expose-externalize-string --shared-string-table +// Flags: --allow-natives-syntax + +function set(o, ext_key) { + o[ext_key] = "bar"; +} +function get(o, ext_key) { + o[ext_key]; +} + +%PrepareFunctionForOptimization(set); +%OptimizeFunctionOnNextCall(set); +%PrepareFunctionForOptimization(get); +%OptimizeFunctionOnNextCall(get); + +(function test() { + let ext_key = "AAAAAAAAAAAAAAAAAAAAAA"; + externalizeString(ext_key); + + set({a:1}, ext_key); + set({b:2}, ext_key); + set({c:3}, ext_key); + set({d:4}, ext_key); + set({e:5}, ext_key); + set({f:6}, ext_key); + + get({a:1}, ext_key); + get({b:2}, ext_key); + get({c:3}, ext_key); + get({d:4}, ext_key); + get({e:5}, ext_key); + get({f:6}, ext_key); +})(); diff --git a/deps/v8/test/mjsunit/temporal/duration-to-json-boundary-cases.js b/deps/v8/test/mjsunit/temporal/duration-to-json-boundary-cases.js new file mode 100644 index 00000000000000..027945a4c8504d --- /dev/null +++ b/deps/v8/test/mjsunit/temporal/duration-to-json-boundary-cases.js @@ -0,0 +1,297 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// Flags: --harmony-temporal + +// Test Temporal.Duration toJSON with extreme cases. +// +// Test Number.MAX_VALUE +// +// This is out of the range of Number.MAX_SAFE_INTEGER so the specification +// does not mandate the precision. But we should still check certain property of +// the result. +// Number.MAX_VALUE is 1.7976931348623157e+308 so the first 16 characters should +// be "P179769313486231" which is 15 digits and only require 50 bits so that +// should be precious in 64 bit floating point. +// There are total 309 digits so it should be 179769313486231 with another +// 294 digits (309-15 = 294) +assertMatches(/P179769313486231\d{294}Y/, + (new Temporal.Duration(Number.MAX_VALUE)).toJSON()); +assertMatches(/-P179769313486231\d{294}Y/, + (new Temporal.Duration(-Number.MAX_VALUE)).toJSON()); + +assertMatches(/P179769313486231\d{294}M/, + (new Temporal.Duration(0, Number.MAX_VALUE)).toJSON()); +assertMatches(/-P179769313486231\d{294}M/, + (new Temporal.Duration(0, -Number.MAX_VALUE)).toJSON()); + +assertMatches(/P179769313486231\d{294}W/, + (new Temporal.Duration(0, 0, Number.MAX_VALUE)).toJSON()); +assertMatches(/-P179769313486231\d{294}W/, + (new Temporal.Duration(0, 0, -Number.MAX_VALUE)).toJSON()); + +assertMatches(/P179769313486231\d{294}D/, + (new Temporal.Duration(0, 0, 0, Number.MAX_VALUE)).toJSON()); +assertMatches(/-P179769313486231\d{294}D/, + (new Temporal.Duration(0, 0, 0, -Number.MAX_VALUE)).toJSON()); + +assertMatches(/PT179769313486231\d{294}H/, + (new Temporal.Duration(0, 0, 0, 0, Number.MAX_VALUE)).toJSON()); +assertMatches(/-PT179769313486231\d{294}H/, + (new Temporal.Duration(0, 0, 0, 0, -Number.MAX_VALUE)).toJSON()); + +assertMatches(/PT179769313486231\d{294}M/, + (new Temporal.Duration(0, 0, 0, 0, 0, Number.MAX_VALUE)).toJSON()); +assertMatches(/-PT179769313486231\d{294}M/, + (new Temporal.Duration(0, 0, 0, 0, 0, -Number.MAX_VALUE)).toJSON()); + +assertMatches(/PT179769313486231\d{294}S/, + (new Temporal.Duration(0, 0, 0, 0, 0, 0, Number.MAX_VALUE)).toJSON()); +assertMatches(/-PT179769313486231\d{294}S/, + (new Temporal.Duration(0, 0, 0, 0, 0, 0, -Number.MAX_VALUE)).toJSON()); + +// For milliseconds, we should have 179769313486231 with another 291 +// (309 - 15 - 3 = 291) digits, a '.', and then 3 digits +assertMatches(/PT179769313486231\d{291}[.]\d{3}S/, + (new Temporal.Duration(0, 0, 0, 0, 0, 0, 0, Number.MAX_VALUE)).toJSON()); +assertMatches(/-PT179769313486231\d{291}[.]\d{3}S/, + (new Temporal.Duration(0, 0, 0, 0, 0, 0, 0, -Number.MAX_VALUE)).toJSON()); + +// For microseconds, we should have 179769313486231 with another 288 +// (309 - 15 - 6 = 288) digits, a '.', and then 6 digits +assertMatches(/PT179769313486231\d{288}[.]\d{6}S/, + (new Temporal.Duration(0, 0, 0, 0, 0, 0, 0, 0, Number.MAX_VALUE)) + .toJSON()); +assertMatches(/-PT179769313486231\d{288}[.]\d{6}S/, + (new Temporal.Duration(0, 0, 0, 0, 0, 0, 0, 0, -Number.MAX_VALUE)) + .toJSON()); + +// For nanoseconds, we should have 179769313486231 with another 285 +// (309 - 15 - 9 = 285) digits, a '.', and then 9 digits +assertMatches(/PT179769313486231\d{285}[.]\d{9}S/, + (new Temporal.Duration(0, 0, 0, 0, 0, 0, 0, 0, 0, Number.MAX_VALUE)) + .toJSON()); +assertMatches(/-PT179769313486231\d{285}[.]\d{9}S/, + (new Temporal.Duration(0, 0, 0, 0, 0, 0, 0, 0, 0, -Number.MAX_VALUE)) + .toJSON()); + +// Test seconds + milliseconds +// Number.MAX_VALUE + Number.MAX_VALUE / 1000 = 1.7994908279971777e+308 +// So the first 17 characters should be "PT179949082799717" which is 15 digits +// and only require 50 bits so that should be precious in 64 bit floating point. +// For seconds and milliseconds, we should have 179949082799717 with another 294 +// digits, a '.', and then 3 digits +assertMatches(/PT179949082799717\d{294}[.]\d{3}S/, + (new Temporal.Duration(0, 0, 0, 0, 0, 0, Number.MAX_VALUE, + Number.MAX_VALUE)).toJSON()); +assertMatches(/-PT179949082799717\d{294}[.]\d{3}S/, + (new Temporal.Duration(0, 0, 0, 0, 0, 0, -Number.MAX_VALUE, + -Number.MAX_VALUE)).toJSON()); + +// Test milliseconds + microseconds +// For milliseconds and microseconds, we should have 179949082799717 with +// another 291 (309 - 15 - 3 = 291) digits, a '.', and then 6 digits. +assertMatches(/PT179949082799717\d{291}[.]\d{6}S/, + (new Temporal.Duration(0, 0, 0, 0, 0, 0, 0, Number.MAX_VALUE, + Number.MAX_VALUE)).toJSON()); +assertMatches(/-PT179949082799717\d{291}[.]\d{6}S/, + (new Temporal.Duration(0, 0, 0, 0, 0, 0, 0, -Number.MAX_VALUE, + -Number.MAX_VALUE)).toJSON()); + +// Test microseconds + nanoseconds +// For microseconds and nanoseconds, we should have 179949082799717 with another +// 288 (309 - 15 - 6 = 288) digits, a '.', and then 9 digits. +assertMatches(/PT179949082799717\d{288}[.]\d{9}S/, + (new Temporal.Duration(0, 0, 0, 0, 0, 0, 0, 0, Number.MAX_VALUE, + Number.MAX_VALUE)).toJSON()); +assertMatches(/-PT179949082799717\d{288}[.]\d{9}S/, + (new Temporal.Duration(0, 0, 0, 0, 0, 0, 0, 0, -Number.MAX_VALUE, + -Number.MAX_VALUE)).toJSON()); + +// Test seconds + milliseconds + microseconds +// Number.MAX_VALUE + Number.MAX_VALUE / 1000 + Number.MAX_VALUE / 1000000 = +// 1.7994926256903124e+308 +// So the first 17 characters should be "PT179949262569031" which is 15 digits +// and only require 50 bits so that should be precious in 64 bit floating point. +// For seconds and milliseconds and microseconds, we should have 179949262569031 +// with another 294 digits, a '.', and then 6 digits. +assertMatches(/PT179949262569031\d{294}[.]\d{6}S/, + (new Temporal.Duration(0, 0, 0, 0, 0, 0, Number.MAX_VALUE, + Number.MAX_VALUE, Number.MAX_VALUE)).toJSON()); +assertMatches(/-PT179949262569031\d{294}[.]\d{6}S/, + (new Temporal.Duration(0, 0, 0, 0, 0, 0, -Number.MAX_VALUE, + -Number.MAX_VALUE, -Number.MAX_VALUE)).toJSON()); + +// Test milliseconds + microseconds + nanoseconds +// For milliseconds and microseconds and nanoseconds, we should have +// 179949262569031 with another 291 digits, a '.', and then 9 digits. +assertMatches(/PT179949262569031\d{291}[.]\d{9}S/, + (new Temporal.Duration(0, 0, 0, 0, 0, 0, 0, Number.MAX_VALUE, + Number.MAX_VALUE, Number.MAX_VALUE)).toJSON()); +assertMatches(/-PT179949262569031\d{291}[.]\d{9}S/, + (new Temporal.Duration(0, 0, 0, 0, 0, 0, 0, -Number.MAX_VALUE, + -Number.MAX_VALUE, -Number.MAX_VALUE)).toJSON()); + +// Test seconds + milliseconds + microseconds + nanoseconds +// Number.MAX_VALUE + Number.MAX_VALUE / 1000 + Number.MAX_VALUE / 1000000 + +// Number.MAX_VALUE / 1000000000 = 1.7994926274880055e+308 +// So the first 17 characters should be "PT179949262748800" which is 15 digits +// and only require 50 bits so that should be precious in 64 bit floating point. +// For seconds and milliseconds and microseconds, and nanoseconds, we should +// have 179949262748800 with another 294 digits, a '.', and then 9 digits. +assertMatches(/PT179949262748800\d{294}[.]\d{9}S/, + (new Temporal.Duration( + 0, 0, 0, 0, 0, 0, Number.MAX_VALUE, Number.MAX_VALUE, Number.MAX_VALUE, + Number.MAX_VALUE)).toJSON()); +assertMatches(/-PT179949262748800\d{294}[.]\d{9}S/, + (new Temporal.Duration( + 0, 0, 0, 0, 0, 0, -Number.MAX_VALUE, -Number.MAX_VALUE, + -Number.MAX_VALUE, -Number.MAX_VALUE)).toJSON()); + +// Test Number.MAX_SAFE_INTEGER +// For MAX_SAFE_INTEGER, we need to test the result come out as exact, not just +// close. +let maxIntString = String(Number.MAX_SAFE_INTEGER); + +assertEquals("P" + maxIntString + "Y", + (new Temporal.Duration(Number.MAX_SAFE_INTEGER)).toJSON()); +assertEquals("-P" + maxIntString + "Y", + (new Temporal.Duration(-Number.MAX_SAFE_INTEGER)).toJSON()); + +assertEquals("P" + maxIntString + "M", + (new Temporal.Duration(0, Number.MAX_SAFE_INTEGER)).toJSON()); +assertEquals("-P" + maxIntString + "M", + (new Temporal.Duration(0, -Number.MAX_SAFE_INTEGER)).toJSON()); + +assertEquals("P" + maxIntString + "W", + (new Temporal.Duration(0, 0, Number.MAX_SAFE_INTEGER)).toJSON()); +assertEquals("-P" + maxIntString + "W", + (new Temporal.Duration(0, 0, -Number.MAX_SAFE_INTEGER)).toJSON()); + +assertEquals("P" + maxIntString + "D", + (new Temporal.Duration(0, 0, 0, Number.MAX_SAFE_INTEGER)).toJSON()); +assertEquals("-P" + maxIntString + "D", + (new Temporal.Duration(0, 0, 0, -Number.MAX_SAFE_INTEGER)).toJSON()); + +assertEquals("PT" + maxIntString + "H", + (new Temporal.Duration(0, 0, 0, 0, Number.MAX_SAFE_INTEGER)).toJSON()); +assertEquals("-PT" + maxIntString + "H", + (new Temporal.Duration(0, 0, 0, 0, -Number.MAX_SAFE_INTEGER)).toJSON()); + +assertEquals("PT" + maxIntString + "M", + (new Temporal.Duration(0, 0, 0, 0, 0, Number.MAX_SAFE_INTEGER)).toJSON()); +assertEquals("-PT" + maxIntString + "M", + (new Temporal.Duration(0, 0, 0, 0, 0, -Number.MAX_SAFE_INTEGER)).toJSON()); + +assertEquals("PT" + maxIntString + "S", + (new Temporal.Duration(0, 0, 0, 0, 0, 0, Number.MAX_SAFE_INTEGER)) + .toJSON()); +assertEquals("-PT" + maxIntString + "S", + (new Temporal.Duration(0, 0, 0, 0, 0, 0, -Number.MAX_SAFE_INTEGER)) + .toJSON()); + +const insertDotFromRight = (str, pos) => + `${str.slice(0, str.length-pos)}.${str.slice(str.length-pos)}`; + +// For milliseconds, microseconds, and nanoseconds +assertEquals("PT" + insertDotFromRight(maxIntString, 3) + "S", + (new Temporal.Duration(0, 0, 0, 0, 0, 0, 0, Number.MAX_SAFE_INTEGER)) + .toJSON()); +assertEquals("-PT" + insertDotFromRight(maxIntString, 3) + "S", + (new Temporal.Duration(0, 0, 0, 0, 0, 0, 0, -Number.MAX_SAFE_INTEGER)) + .toJSON()); + +assertEquals("PT" + insertDotFromRight(maxIntString, 6) + "S", + (new Temporal.Duration(0, 0, 0, 0, 0, 0, 0, 0, Number.MAX_SAFE_INTEGER)) + .toJSON()); +assertEquals("-PT" + insertDotFromRight(maxIntString, 6) + "S", + (new Temporal.Duration(0, 0, 0, 0, 0, 0, 0, 0, -Number.MAX_SAFE_INTEGER)) + .toJSON()); + +assertEquals("PT" + insertDotFromRight(maxIntString, 9) + "S", + (new Temporal.Duration(0, 0, 0, 0, 0, 0, 0, 0, 0, Number.MAX_SAFE_INTEGER)) + .toJSON()); +assertEquals("-PT" + insertDotFromRight(maxIntString, 9) + "S", + (new Temporal.Duration(0, 0, 0, 0, 0, 0, 0, 0, 0, -Number.MAX_SAFE_INTEGER)) + .toJSON()); + +// Test seconds + milliseconds +// Number.MAX_SAFE_INTEGER: 9007199254740991 +// 9007199254740991 +// 9007199254740.991 +//_+_____________________ +// 9016206453995731.991 +let twoMaxString = "9016206453995731991"; +assertEquals("PT" + insertDotFromRight(twoMaxString, 3) + "S", + (new Temporal.Duration( + 0, 0, 0, 0, 0, 0, Number.MAX_SAFE_INTEGER, Number.MAX_SAFE_INTEGER)) + .toJSON()); +assertEquals("-PT" + insertDotFromRight(twoMaxString, 3) + "S", + (new Temporal.Duration( + 0, 0, 0, 0, 0, 0, -Number.MAX_SAFE_INTEGER, -Number.MAX_SAFE_INTEGER)) + .toJSON()); + +// Test milliseconds + microseconds +assertEquals("PT" + insertDotFromRight(twoMaxString, 6) + "S", + (new Temporal.Duration( + 0, 0, 0, 0, 0, 0, 0, Number.MAX_SAFE_INTEGER, Number.MAX_SAFE_INTEGER)) + .toJSON()); +assertEquals("-PT" + insertDotFromRight(twoMaxString, 6) + "S", + (new Temporal.Duration( + 0, 0, 0, 0, 0, 0, 0, -Number.MAX_SAFE_INTEGER, -Number.MAX_SAFE_INTEGER) + ).toJSON()); + +// Test microseconds + nanoseconds +assertEquals("PT" + insertDotFromRight(twoMaxString, 9) + "S", + (new Temporal.Duration( + 0, 0, 0, 0, 0, 0, 0, 0, Number.MAX_SAFE_INTEGER, + Number.MAX_SAFE_INTEGER)).toJSON()); +assertEquals("-PT" + insertDotFromRight(twoMaxString, 9) + "S", + (new Temporal.Duration( + 0, 0, 0, 0, 0, 0, 0, 0, -Number.MAX_SAFE_INTEGER, + -Number.MAX_SAFE_INTEGER)).toJSON()); + +// Test seconds + milliseconds + microseconds +// Number.MAX_SAFE_INTEGER: 9007199254740991 +// 9007199254740991 +// 9007199254740.991 +// 9007199254.740991 +//_+_____________________ +// 9016215461194986.731991 +let threeMaxString = "9016215461194986731991"; +assertEquals("PT" + insertDotFromRight(threeMaxString, 6) + "S", + (new Temporal.Duration( + 0, 0, 0, 0, 0, 0, Number.MAX_SAFE_INTEGER, Number.MAX_SAFE_INTEGER, + Number.MAX_SAFE_INTEGER)).toJSON()); +assertEquals("-PT" + insertDotFromRight(threeMaxString, 6) + "S", + (new Temporal.Duration( + 0, 0, 0, 0, 0, 0, -Number.MAX_SAFE_INTEGER, -Number.MAX_SAFE_INTEGER, + -Number.MAX_SAFE_INTEGER)).toJSON()); + +// Test milliseconds + microseconds + nanoseconds +assertEquals("PT" + insertDotFromRight(threeMaxString, 9) + "S", + (new Temporal.Duration( + 0, 0, 0, 0, 0, 0, 0, Number.MAX_SAFE_INTEGER, Number.MAX_SAFE_INTEGER, + Number.MAX_SAFE_INTEGER)).toJSON()); +assertEquals("-PT" + insertDotFromRight(threeMaxString, 9) + "S", + (new Temporal.Duration( + 0, 0, 0, 0, 0, 0, 0, -Number.MAX_SAFE_INTEGER, -Number.MAX_SAFE_INTEGER, + -Number.MAX_SAFE_INTEGER)).toJSON()); + +// Test seconds + milliseconds + microseconds + nanoseconds +// Number.MAX_SAFE_INTEGER: 9007199254740991 +// 9007199254740991 +// 9007199254740.991 +// 9007199254.740991 +// 9007199.254740991 +//_+____________________________ +// 9016215470202185.986731991 +let fourMaxString = "9016215470202185.986731991"; +assertEquals("PT" + fourMaxString + "S", + (new Temporal.Duration( + 0, 0, 0, 0, 0, 0, Number.MAX_SAFE_INTEGER, Number.MAX_SAFE_INTEGER, + Number.MAX_SAFE_INTEGER, Number.MAX_SAFE_INTEGER)).toJSON()); +assertEquals("-PT" + fourMaxString + "S", + (new Temporal.Duration( + 0, 0, 0, 0, 0, 0, -Number.MAX_SAFE_INTEGER, -Number.MAX_SAFE_INTEGER, + -Number.MAX_SAFE_INTEGER, -Number.MAX_SAFE_INTEGER)).toJSON()); diff --git a/deps/v8/test/mjsunit/tools/foozzie.js b/deps/v8/test/mjsunit/tools/foozzie.js index fa3cc5a84a84db..4d7ca4cc1292f0 100644 --- a/deps/v8/test/mjsunit/tools/foozzie.js +++ b/deps/v8/test/mjsunit/tools/foozzie.js @@ -31,6 +31,8 @@ if (this.Intl) { // Dummy performance methods. assertEquals(1.2, performance.now()); +assertEquals(undefined, performance.mark("a mark")); +assertEquals(undefined, performance.measure("a measure")); assertEquals([], performance.measureMemory()); // Worker messages follow a predefined deterministic pattern. diff --git a/deps/v8/test/mjsunit/wasm/bigint-rematerialize.js b/deps/v8/test/mjsunit/wasm/bigint-rematerialize.js new file mode 100644 index 00000000000000..dd40297ecface1 --- /dev/null +++ b/deps/v8/test/mjsunit/wasm/bigint-rematerialize.js @@ -0,0 +1,43 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax --turbofan --no-always-turbofan --turbo-inline-js-wasm-calls + +d8.file.execute("test/mjsunit/wasm/wasm-module-builder.js"); + +let builder = new WasmModuleBuilder(); + +builder + .addFunction("f", kSig_l_v) // () -> i64 + .addBody([ + kExprI64Const, 0, + kExprI64Const, 1, + kExprI64Sub, // -1 + ]) + .exportFunc(); + +let module = builder.instantiate(); + +function f(x) { + let y = module.exports.f(); + try { + return x + y; + } catch(_) { + return y; + } +} + +%PrepareFunctionForOptimization(f); +assertEquals(0n, f(1n)); +assertEquals(1n, f(2n)); +%OptimizeFunctionOnNextCall(f); +assertEquals(0n, f(1n)); +assertOptimized(f); +// After optimization, the result of the js wasm call is stored in word64 and +// passed to StateValues without conversion. Rematerialization will happen +// in deoptimizer. +assertEquals(-1n, f(0)); +if (%Is64Bit()) { + assertUnoptimized(f); +} diff --git a/deps/v8/test/mjsunit/wasm/externref.js b/deps/v8/test/mjsunit/wasm/externref.js index cf052cdd754e47..11e6a6f837e781 100644 --- a/deps/v8/test/mjsunit/wasm/externref.js +++ b/deps/v8/test/mjsunit/wasm/externref.js @@ -336,10 +336,12 @@ d8.file.execute("test/mjsunit/wasm/wasm-module-builder.js"); (function MultiReturnRefTest() { print("MultiReturnTest"); let builder = new WasmModuleBuilder(); + let gc_sig = builder.addType(kSig_v_v); let sig = makeSig([kWasmExternRef], [kWasmExternRef, kWasmExternRef, kWasmExternRef, kWasmExternRef]); - builder.addFunction("callee", sig) + let gc_index = builder.addImport('q', 'gc', gc_sig); + let callee = builder.addFunction("callee", sig) .addBody([ kExprLocalGet, 0, kExprLocalGet, 0, @@ -349,11 +351,13 @@ d8.file.execute("test/mjsunit/wasm/wasm-module-builder.js"); builder.addFunction("main", sig) .addBody([ kExprLocalGet, 0, - kExprCallFunction, 0 + kExprCallFunction, callee.index, + kExprCallFunction, gc_index, ]) .exportAs("main"); - let module = new WebAssembly.Module(builder.toBuffer()); - let instance = new WebAssembly.Instance(module); + let instance = builder.instantiate({ + q: { gc: () => gc() } + }); assertEquals(instance.exports.main(null), [null, null, null, null]); })(); diff --git a/deps/v8/test/mjsunit/wasm/gc-casts-from-any.js b/deps/v8/test/mjsunit/wasm/gc-casts-from-any.js new file mode 100644 index 00000000000000..234ddb7cd1c185 --- /dev/null +++ b/deps/v8/test/mjsunit/wasm/gc-casts-from-any.js @@ -0,0 +1,137 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --experimental-wasm-gc + +d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); + +(function TestRefTest() { + var builder = new WasmModuleBuilder(); + let structSuper = builder.addStruct([makeField(kWasmI32, true)]); + let structSub = builder.addStruct([makeField(kWasmI32, true)], structSuper); + let array = builder.addArray(kWasmI32); + + let fct = + builder.addFunction('createStructSuper', + makeSig([kWasmI32], [kWasmExternRef])) + .addBody([ + kExprLocalGet, 0, + kGCPrefix, kExprStructNew, structSuper, + kGCPrefix, kExprExternExternalize, + ]).exportFunc(); + builder.addFunction('createStructSub', makeSig([kWasmI32], [kWasmExternRef])) + .addBody([ + kExprLocalGet, 0, + kGCPrefix, kExprStructNew, structSub, + kGCPrefix, kExprExternExternalize, + ]).exportFunc(); + builder.addFunction('createArray', makeSig([kWasmI32], [kWasmExternRef])) + .addBody([ + kExprLocalGet, 0, + kGCPrefix, kExprArrayNewFixed, array, 1, + kGCPrefix, kExprExternExternalize, + ]).exportFunc(); + builder.addFunction('createFuncRef', makeSig([], [kWasmFuncRef])) + .addBody([ + kExprRefFunc, fct.index, + ]).exportFunc(); + + [ + ["StructSuper", structSuper], + ["StructSub", structSub], + ["Array", array], + ["I31", kI31RefCode], + ["AnyArray", kArrayRefCode], + ["Data", kDataRefCode], + ["Eq", kEqRefCode], + // 'ref.test any' is semantically the same as '!ref.is_null' here. + ["Any", kAnyRefCode], + ].forEach(([typeName, typeCode]) => { + builder.addFunction(`refTest${typeName}`, + makeSig([kWasmExternRef], [kWasmI32, kWasmI32])) + .addBody([ + kExprLocalGet, 0, + kGCPrefix, kExprExternInternalize, + kGCPrefix, kExprRefTest, typeCode, + kExprLocalGet, 0, + kGCPrefix, kExprExternInternalize, + kGCPrefix, kExprRefTestNull, typeCode, + ]).exportFunc(); + }); + + var instance = builder.instantiate(); + let wasm = instance.exports; + // result: [ref.test, ref.test null] + assertEquals([0, 1], wasm.refTestStructSuper(null)); + assertEquals([0, 0], wasm.refTestStructSuper(undefined)); + assertEquals([1, 1], wasm.refTestStructSuper(wasm.createStructSuper())); + assertEquals([1, 1], wasm.refTestStructSuper(wasm.createStructSub())); + assertEquals([0, 0], wasm.refTestStructSuper(wasm.createArray())); + assertEquals([0, 0], wasm.refTestStructSuper(wasm.createFuncRef())); + assertEquals([0, 0], wasm.refTestStructSuper(1)); + assertEquals([0, 0], wasm.refTestStructSuper({'JavaScript': 'Object'})); + + assertEquals([0, 1], wasm.refTestStructSub(null)); + assertEquals([0, 0], wasm.refTestStructSub(undefined)); + assertEquals([0, 0], wasm.refTestStructSub(wasm.createStructSuper())); + assertEquals([1, 1], wasm.refTestStructSub(wasm.createStructSub())); + assertEquals([0, 0], wasm.refTestStructSub(wasm.createArray())); + assertEquals([0, 0], wasm.refTestStructSub(wasm.createFuncRef())); + assertEquals([0, 0], wasm.refTestStructSub(1)); + assertEquals([0, 0], wasm.refTestStructSub({'JavaScript': 'Object'})); + + assertEquals([0, 1], wasm.refTestArray(null)); + assertEquals([0, 0], wasm.refTestArray(undefined)); + assertEquals([0, 0], wasm.refTestArray(wasm.createStructSuper())); + assertEquals([0, 0], wasm.refTestArray(wasm.createStructSub())); + assertEquals([1, 1], wasm.refTestArray(wasm.createArray())); + assertEquals([0, 0], wasm.refTestArray(wasm.createFuncRef())); + assertEquals([0, 0], wasm.refTestArray(1)); + assertEquals([0, 0], wasm.refTestArray({'JavaScript': 'Object'})); + + assertEquals([0, 1], wasm.refTestI31(null)); + assertEquals([0, 0], wasm.refTestI31(undefined)); + assertEquals([0, 0], wasm.refTestI31(wasm.createStructSuper())); + assertEquals([0, 0], wasm.refTestI31(wasm.createStructSub())); + assertEquals([0, 0], wasm.refTestI31(wasm.createArray())); + assertEquals([0, 0], wasm.refTestI31(wasm.createFuncRef())); + assertEquals([1, 1], wasm.refTestI31(1)); + assertEquals([0, 0], wasm.refTestI31({'JavaScript': 'Object'})); + + assertEquals([0, 1], wasm.refTestAnyArray(null)); + assertEquals([0, 0], wasm.refTestAnyArray(undefined)); + assertEquals([0, 0], wasm.refTestAnyArray(wasm.createStructSuper())); + assertEquals([0, 0], wasm.refTestAnyArray(wasm.createStructSub())); + assertEquals([1, 1], wasm.refTestAnyArray(wasm.createArray())); + assertEquals([0, 0], wasm.refTestAnyArray(wasm.createFuncRef())); + assertEquals([0, 0], wasm.refTestAnyArray(1)); + assertEquals([0, 0], wasm.refTestAnyArray({'JavaScript': 'Object'})); + + assertEquals([0, 1], wasm.refTestData(null)); + assertEquals([0, 0], wasm.refTestData(undefined)); + assertEquals([1, 1], wasm.refTestData(wasm.createStructSuper())); + assertEquals([1, 1], wasm.refTestData(wasm.createStructSub())); + assertEquals([1, 1], wasm.refTestData(wasm.createArray())); + assertEquals([0, 0], wasm.refTestData(wasm.createFuncRef())); + assertEquals([0, 0], wasm.refTestData(1)); + assertEquals([0, 0], wasm.refTestData({'JavaScript': 'Object'})); + + assertEquals([0, 1], wasm.refTestEq(null)); + assertEquals([0, 0], wasm.refTestEq(undefined)); + assertEquals([1, 1], wasm.refTestEq(wasm.createStructSuper())); + assertEquals([1, 1], wasm.refTestEq(wasm.createStructSub())); + assertEquals([1, 1], wasm.refTestEq(wasm.createArray())); + assertEquals([0, 0], wasm.refTestEq(wasm.createFuncRef())); + assertEquals([1, 1], wasm.refTestEq(1)); // ref.i31 + assertEquals([0, 0], wasm.refTestEq({'JavaScript': 'Object'})); + + assertEquals([0, 1], wasm.refTestAny(null)); + assertEquals([1, 1], wasm.refTestAny(undefined)); + assertEquals([1, 1], wasm.refTestAny(wasm.createStructSuper())); + assertEquals([1, 1], wasm.refTestAny(wasm.createStructSub())); + assertEquals([1, 1], wasm.refTestAny(wasm.createArray())); + assertEquals([1, 1], wasm.refTestAny(wasm.createFuncRef())); + assertEquals([1, 1], wasm.refTestAny(1)); // ref.i31 + assertEquals([1, 1], wasm.refTestAny({'JavaScript': 'Object'})); +})(); diff --git a/deps/v8/test/mjsunit/wasm/gc-casts-invalid.js b/deps/v8/test/mjsunit/wasm/gc-casts-invalid.js new file mode 100644 index 00000000000000..236d6148bf7437 --- /dev/null +++ b/deps/v8/test/mjsunit/wasm/gc-casts-invalid.js @@ -0,0 +1,47 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --experimental-wasm-gc + +d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); + +(function TestRefTestInvalid() { + let struct = 0; + let array = 1; + let sig = 2; + let types = [ + // source value type |target heap type + [kWasmI32, kAnyRefCode], + [kWasmNullExternRef, struct], + [wasmRefType(struct), kNullFuncRefCode], + [wasmRefType(array), kFuncRefCode], + [wasmRefType(struct), sig], + [wasmRefType(sig), struct], + [wasmRefType(sig), kExternRefCode], + [kWasmAnyRef, kExternRefCode], + [kWasmAnyRef, kFuncRefCode], + ]; + let casts = [ + kExprRefTest, + kExprRefTestNull, + ]; + + for (let [source_type, target_type_imm] of types) { + for (let cast of casts) { + let builder = new WasmModuleBuilder(); + assertEquals(struct, builder.addStruct([makeField(kWasmI32, true)])); + assertEquals(array, builder.addArray(kWasmI32)); + assertEquals(sig, builder.addType(makeSig([kWasmI32], []))); + builder.addFunction('refTest', makeSig([kWasmI32], [source_type])) + .addBody([ + kExprLocalGet, 0, + kGCPrefix, cast, target_type_imm, + ]); + + assertThrows(() => builder.instantiate(), + WebAssembly.CompileError, + /has to be in the same reference type hierarchy/); + } + } +})(); diff --git a/deps/v8/test/mjsunit/wasm/gc-casts-subtypes.js b/deps/v8/test/mjsunit/wasm/gc-casts-subtypes.js new file mode 100644 index 00000000000000..249c70483ef9e9 --- /dev/null +++ b/deps/v8/test/mjsunit/wasm/gc-casts-subtypes.js @@ -0,0 +1,248 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --experimental-wasm-gc --experimental-wasm-type-reflection + +d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); + +// Test casting null from one type to another using ref.test. +(function RefTestNull() { + print(arguments.callee.name); + let builder = new WasmModuleBuilder(); + let structSuper = builder.addStruct([makeField(kWasmI32, true)]); + let structSub = builder.addStruct([makeField(kWasmI32, true)], structSuper); + let array = builder.addArray(kWasmI32); + + // Note: Casting between unrelated types is allowed as long as the types + // belong to the same type hierarchy (func / any / extern). In these cases the + // check will always fail. + let tests = [ + [kWasmAnyRef, kWasmAnyRef, 'AnyToAny'], + [kWasmFuncRef, kWasmFuncRef, 'FuncToFunc'], + [kWasmExternRef, kWasmExternRef, 'ExternToExtern'], + [kWasmNullFuncRef, kWasmNullFuncRef, 'NullFuncToNullFunc'], + [kWasmNullExternRef, kWasmNullExternRef, 'NullExternToNullExtern'], + [structSub, array, 'StructToArray'], + [kWasmFuncRef, kWasmNullFuncRef, 'FuncToNullFunc'], + [kWasmNullFuncRef, kWasmFuncRef, 'NullFuncToFunc'], + [kWasmExternRef, kWasmNullExternRef, 'ExternToNullExtern'], + [kWasmNullExternRef, kWasmExternRef, 'NullExternToExtern'], + [kWasmNullRef, kWasmAnyRef, 'NullToAny'], + [kWasmI31Ref, structSub, 'I31ToStruct'], + [kWasmEqRef, kWasmI31Ref, 'EqToI31'], + [structSuper, structSub, 'StructSuperToStructSub'], + [structSub, structSuper, 'StructSubToStructSuper'], + ]; + + for (let [sourceType, targetType, testName] of tests) { + builder.addFunction('testNull' + testName, + makeSig([], [kWasmI32])) + .addLocals(wasmRefNullType(sourceType), 1) + .addBody([ + kExprLocalGet, 0, + kGCPrefix, kExprRefTest, targetType & kLeb128Mask, + ]).exportFunc(); + } + + let instance = builder.instantiate(); + let wasm = instance.exports; + + for (let [sourceType, targetType, testName] of tests) { + assertEquals(0, wasm['testNull' + testName]()); + } +})(); + +(function RefTestFuncRef() { + print(arguments.callee.name); + let builder = new WasmModuleBuilder(); + let sigSuper = builder.addType(makeSig([kWasmI32], [])); + let sigSub = builder.addType(makeSig([kWasmI32], []), sigSuper); + + builder.addFunction('fctSuper', sigSuper).addBody([]).exportFunc(); + builder.addFunction('fctSub', sigSub).addBody([]).exportFunc(); + builder.addFunction('testFromFuncRef', + makeSig([kWasmFuncRef], [kWasmI32, kWasmI32, kWasmI32, kWasmI32])) + .addBody([ + kExprLocalGet, 0, kGCPrefix, kExprRefTest, kFuncRefCode, + kExprLocalGet, 0, kGCPrefix, kExprRefTest, kNullFuncRefCode, + kExprLocalGet, 0, kGCPrefix, kExprRefTest, sigSuper, + kExprLocalGet, 0, kGCPrefix, kExprRefTest, sigSub, + ]).exportFunc(); + + let instance = builder.instantiate(); + let wasm = instance.exports; + let jsFct = new WebAssembly.Function( + {parameters:['i32', 'i32'], results: ['i32']}, + function mul(a, b) { return a * b; }); + assertEquals([1, 0, 0, 0], wasm.testFromFuncRef(jsFct)); + assertEquals([1, 0, 1, 0], wasm.testFromFuncRef(wasm.fctSuper)); + assertEquals([1, 0, 1, 1], wasm.testFromFuncRef(wasm.fctSub)); +})(); + +(function RefTestExternRef() { + print(arguments.callee.name); + let builder = new WasmModuleBuilder(); + + builder.addFunction('testExternRef', + makeSig([kWasmExternRef], [kWasmI32, kWasmI32,])) + .addBody([ + kExprLocalGet, 0, kGCPrefix, kExprRefTest, kExternRefCode, + kExprLocalGet, 0, kGCPrefix, kExprRefTest, kNullExternRefCode, + ]).exportFunc(); + + let instance = builder.instantiate(); + let wasm = instance.exports; + assertEquals([0, 0], wasm.testExternRef(null)); + assertEquals([1, 0], wasm.testExternRef(undefined)); + assertEquals([1, 0], wasm.testExternRef(1)); + assertEquals([1, 0], wasm.testExternRef({})); + assertEquals([1, 0], wasm.testExternRef(wasm.testExternRef)); +})(); + +(function RefTestAnyRefHierarchy() { + print(arguments.callee.name); + let builder = new WasmModuleBuilder(); + let structSuper = builder.addStruct([makeField(kWasmI32, true)]); + let structSub = builder.addStruct([makeField(kWasmI32, true)], structSuper); + let array = builder.addArray(kWasmI32); + + let types = { + any: kWasmAnyRef, + eq: kWasmEqRef, + data: kWasmDataRef, + anyArray: kWasmArrayRef, + array: wasmRefNullType(array), + structSuper: wasmRefNullType(structSuper), + structSub: wasmRefNullType(structSub), + }; + + let createBodies = { + nullref: [kExprRefNull, kNullRefCode], + i31ref: [kExprI32Const, 42, kGCPrefix, kExprI31New], + structSuper: [kExprI32Const, 42, kGCPrefix, kExprStructNew, structSuper], + structSub: [kExprI32Const, 42, kGCPrefix, kExprStructNew, structSub], + array: [kExprI32Const, 42, kGCPrefix, kExprArrayNewFixed, array, 1], + }; + + // Each Test lists the following: + // source => The static type of the source value. + // values => All actual values that are subtypes of the static types. + // targets => A list of types for ref.test. For each type the values are + // listed for which ref.test should return 1 (i.e. the ref.test + // should succeed). + let tests = [ + { + source: 'any', + values: ['nullref', 'i31ref', 'structSuper', 'structSub', 'array'], + targets: { + eq: ['i31ref', 'structSuper', 'structSub', 'array'], + data: ['structSuper', 'structSub', 'array'], + anyArray: ['array'], + array: ['array'], + structSuper: ['structSuper', 'structSub'], + structSub: ['structSub'], + } + }, + { + source: 'eq', + values: ['nullref', 'i31ref', 'structSuper', 'structSub', 'array'], + targets: { + eq: ['i31ref', 'structSuper', 'structSub', 'array'], + data: ['structSuper', 'structSub', 'array'], + anyArray: ['array'], + array: ['array'], + structSuper: ['structSuper', 'structSub'], + structSub: ['structSub'], + } + }, + { + source: 'data', + values: ['nullref', 'structSuper', 'structSub', 'array'], + targets: { + eq: ['structSuper', 'structSub', 'array'], + data: ['structSuper', 'structSub', 'array'], + anyArray: ['array'], + array: ['array'], + structSuper: ['structSuper', 'structSub'], + structSub: ['structSub'], + } + }, + { + source: 'anyArray', + values: ['nullref', 'array'], + targets: { + eq: ['array'], + data: ['array'], + anyArray: ['array'], + array: ['array'], + structSuper: [], + structSub: [], + } + }, + { + source: 'structSuper', + values: ['nullref', 'structSuper', 'structSub'], + targets: { + eq: ['structSuper', 'structSub'], + data: ['structSuper', 'structSub'], + anyArray: [], + array: [], + structSuper: ['structSuper', 'structSub'], + structSub: ['structSub'], + } + }, + ]; + + for (let test of tests) { + let sourceType = types[test.source]; + // Add creator functions. + let creatorSig = makeSig([], [sourceType]); + let creatorType = builder.addType(creatorSig); + for (let value of test.values) { + builder.addFunction(`create_${test.source}_${value}`, creatorType) + .addBody(createBodies[value]).exportFunc(); + } + // Add ref.test tester functions. + // The functions take the creator functions as a callback to prevent the + // compiler to derive the actual type of the value and can only use the + // static source type. + for (let target in test.targets) { + // Get heap type for concrete types or apply Leb128 mask on the abstract + // type. + let heapType = types[target].heap_type ?? (types[target] & kLeb128Mask); + builder.addFunction(`test_${test.source}_to_${target}`, + makeSig([wasmRefType(creatorType)], [kWasmI32])) + .addBody([ + kExprLocalGet, 0, + kExprCallRef, creatorType, + kGCPrefix, kExprRefTest, heapType, + ]).exportFunc(); + builder.addFunction(`test_null_${test.source}_to_${target}`, + makeSig([wasmRefType(creatorType)], [kWasmI32])) + .addBody([ + kExprLocalGet, 0, + kExprCallRef, creatorType, + kGCPrefix, kExprRefTestNull, heapType, + ]).exportFunc(); + } + } + + let instance = builder.instantiate(); + let wasm = instance.exports; + + for (let test of tests) { + for (let [target, validValues] of Object.entries(test.targets)) { + for (let value of test.values) { + print(`Test ref.test: ${test.source}(${value}) -> ${target}`); + let create_value = wasm[`create_${test.source}_${value}`]; + let res = wasm[`test_${test.source}_to_${target}`](create_value); + assertEquals(validValues.includes(value) ? 1 : 0, res); + print(`Test ref.test null: ${test.source}(${value}) -> ${target}`); + res = wasm[`test_null_${test.source}_to_${target}`](create_value); + assertEquals( + (validValues.includes(value) || value == "nullref") ? 1 : 0, res); + } + } + } +})(); diff --git a/deps/v8/test/mjsunit/wasm/gc-experimental-string-conversions.js b/deps/v8/test/mjsunit/wasm/gc-experimental-string-conversions.js index f8356455ecb269..c1bdcb448c4583 100644 --- a/deps/v8/test/mjsunit/wasm/gc-experimental-string-conversions.js +++ b/deps/v8/test/mjsunit/wasm/gc-experimental-string-conversions.js @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -// Flags: --experimental-wasm-gc --wasm-gc-js-interop +// Flags: --experimental-wasm-gc --allow-natives-syntax d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); @@ -45,3 +45,24 @@ const array = for (let i = 0; i < string.length; ++i) { assertEquals(getChar(array, i), string.charCodeAt(i)); } + +// Test calling built-ins with different amount of (invalid) arguments. +function arrayToString() { + WebAssembly.experimentalConvertArrayToString(...arguments); +} +function stringToArray() { + WebAssembly.experimentalConvertStringToArray(...arguments); +} + +let args = []; +for (let i = 1; i <= 5; ++i) { + assertThrows(() => arrayToString(...args)); + assertThrows(() => stringToArray(...args)); + %PrepareFunctionForOptimization(arrayToString); + %PrepareFunctionForOptimization(stringToArray); + %OptimizeFunctionOnNextCall(arrayToString); + %OptimizeFunctionOnNextCall(stringToArray); + assertThrows(() => arrayToString(...args)); + assertThrows(() => stringToArray(...args)); + args.push(i); +} diff --git a/deps/v8/test/mjsunit/wasm/gc-js-interop-async.js b/deps/v8/test/mjsunit/wasm/gc-js-interop-async.js new file mode 100644 index 00000000000000..110e908b40de10 --- /dev/null +++ b/deps/v8/test/mjsunit/wasm/gc-js-interop-async.js @@ -0,0 +1,68 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --experimental-wasm-gc --wasm-gc-js-interop --allow-natives-syntax + +d8.file.execute('test/mjsunit/wasm/gc-js-interop-helpers.js'); + +let {struct, array} = CreateWasmObjects(); +for (const wasm_obj of [struct, array]) { + + repeated(() => assertThrowsAsync(Promise.all(wasm_obj), TypeError)); + repeated(() => Promise.all([wasm_obj])); + repeated(() => assertThrowsAsync(Promise.allSettled(wasm_obj), TypeError)); + repeated( + () => Promise.allSettled([wasm_obj]) + .then((info) => assertEquals('fulfilled', info[0].status))); + repeated(() => assertThrowsAsync(Promise.any(wasm_obj), TypeError)); + repeated(() => Promise.any([wasm_obj])); + repeated(() => assertThrowsAsync(Promise.race(wasm_obj), TypeError)); + repeated(() => Promise.race([wasm_obj])); + // Using wasm objects in Promise.resolve and Promise.reject should work as + // for any other object. + repeated( + () => (new Promise((resolve, reject) => resolve(wasm_obj))) + .then((v) => assertSame(wasm_obj, v))); + repeated( + () => (new Promise((resolve, reject) => reject(wasm_obj))) + .then(() => assertUnreachable()) + .catch((v) => assertSame(wasm_obj, v))); + // Wasm objects can also be passed as a result in a then chain. + repeated( + () => (new Promise((resolve) => resolve({}))) + .then(() => wasm_obj) + .then((v) => assertSame(wasm_obj, v))); + // If the `then` argument isn't a callback, it will simply be replaced with + // an identity function (x) => x. + repeated( + () => (new Promise((resolve) => resolve({}))) + .then(wasm_obj) // The value itself doesn't have any impact. + .then((v) => assertEquals({}, v), () => assertUnreachable())); + // If the `catch` argument isn't a callback, it will be replaced with a + // thrower function (x) => { throw x; }. + repeated( + () => (new Promise((resolve, reject) => reject({}))) + .then(() => null) + .catch(wasm_obj) // The value itself doesn't have any impact. + .then(() => assertUnreachable(), (v) => assertEquals({}, v))); + // `finally(wasm_obj)` behaves just like `then(wasm_obj, wasm_obj)` + repeated( + () => (new Promise((resolve, reject) => resolve({}))) + .finally(wasm_obj) + .then((v) => assertEquals({}, v), () => assertUnreachable())); + repeated( + () => (new Promise((resolve, reject) => reject({}))) + .finally(wasm_obj) + .then(() => assertUnreachable(), (v) => assertEquals({}, v))); + + // Ensure no statement re-assigned wasm_obj by accident. + assertTrue(wasm_obj == struct || wasm_obj == array); +} + +repeated(async function testAsync() { + for (let wasm_obj of [struct, array]) { + let async_wasm_obj = await wasm_obj; + assertSame(wasm_obj, async_wasm_obj); + } +}); diff --git a/deps/v8/test/mjsunit/wasm/gc-js-interop-collections.js b/deps/v8/test/mjsunit/wasm/gc-js-interop-collections.js new file mode 100644 index 00000000000000..aea0ce7300b20e --- /dev/null +++ b/deps/v8/test/mjsunit/wasm/gc-js-interop-collections.js @@ -0,0 +1,159 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --experimental-wasm-gc --wasm-gc-js-interop --allow-natives-syntax + +d8.file.execute('test/mjsunit/wasm/gc-js-interop-helpers.js'); + +let {struct, array} = CreateWasmObjects(); +for (const wasm_obj of [struct, array]) { + + // Test Array. + testThrowsRepeated(() => Array.from(wasm_obj), TypeError); + repeated(() => assertFalse(Array.isArray(wasm_obj))); + repeated(() => assertEquals([wasm_obj], Array.of(wasm_obj))); + testThrowsRepeated(() => [1, 2].at(wasm_obj), TypeError); + repeated(() => assertEquals([1, wasm_obj], [1].concat(wasm_obj))); + testThrowsRepeated(() => [1, 2].copyWithin(wasm_obj), TypeError); + testThrowsRepeated(() => [1, 2].every(wasm_obj), TypeError); + repeated( + () => assertEquals([1, wasm_obj, 3], [1, 2, 3].fill(wasm_obj, 1, 2))); + testThrowsRepeated(() => [1, 2].filter(wasm_obj), TypeError); + repeated( + () => assertEquals( + [wasm_obj], [undefined, wasm_obj, null].filter(function(v) { + return v == this; + }, wasm_obj))); + testThrowsRepeated(() => [1, 2].find(wasm_obj), TypeError); + testThrowsRepeated(() => [1, 2].findIndex(wasm_obj), TypeError); + testThrowsRepeated(() => [1, 2].findLast(wasm_obj), TypeError); + testThrowsRepeated(() => [1, 2].findLastIndex(wasm_obj), TypeError); + testThrowsRepeated(() => [1, 2].flat(wasm_obj), TypeError); + testThrowsRepeated(() => [1, 2].flatMap(wasm_obj), TypeError); + testThrowsRepeated(() => [1, 2].forEach(wasm_obj), TypeError); + repeated(() => { + let res = []; + [1, 2].forEach(function(x) { + res.push(this); + }, wasm_obj); + assertEquals([wasm_obj, wasm_obj], res); + }); + repeated(() => assertTrue([wasm_obj].includes(wasm_obj))); + repeated(() => assertFalse([1].includes(wasm_obj))); + repeated(() => assertEquals(1, [0, wasm_obj, 2].indexOf(wasm_obj))); + testThrowsRepeated(() => ['a', 'b'].join(wasm_obj), TypeError); + repeated(() => assertEquals(1, [0, wasm_obj, 2].lastIndexOf(wasm_obj))); + testThrowsRepeated(() => [1, 2].map(wasm_obj), TypeError); + repeated(() => assertEquals([wasm_obj, wasm_obj], [1, 2].map(function() { + return this; + }, wasm_obj))); + repeated(() => { + let arr = [1]; + arr.push(wasm_obj, 3); + assertEquals([1, wasm_obj, 3], arr); + }); + testThrowsRepeated(() => [1, 2].reduce(wasm_obj), TypeError); + repeated(() => assertSame(wasm_obj, [].reduce(() => null, wasm_obj))); + testThrowsRepeated(() => [1, 2].reduceRight(wasm_obj), TypeError); + testThrowsRepeated(() => [1, 2].slice(wasm_obj, 2), TypeError); + testThrowsRepeated(() => [1, 2].some(wasm_obj), TypeError); + testThrowsRepeated(() => [1, 2].sort(wasm_obj), TypeError); + testThrowsRepeated(() => [1, 2].splice(1, wasm_obj), TypeError); + repeated(() => { + let arr = [1, 2]; + arr.unshift(wasm_obj); + assertEquals([wasm_obj, 1, 2], arr); + }); + testThrowsRepeated(() => Int8Array.from(wasm_obj), TypeError); + testThrowsRepeated(() => Int8Array.of(wasm_obj), TypeError); + for (let ArrayType + of [Int8Array, Int16Array, Int32Array, Uint8Array, Uint16Array, + Uint32Array]) { + let array = ArrayType.of(1, 2, 3); + testThrowsRepeated(() => array.at(wasm_obj), TypeError); + testThrowsRepeated(() => array.copyWithin(wasm_obj), TypeError); + testThrowsRepeated(() => array.fill(wasm_obj, 0, 1), TypeError); + testThrowsRepeated(() => array.filter(wasm_obj), TypeError); + testThrowsRepeated(() => array.find(wasm_obj), TypeError); + testThrowsRepeated(() => array.findIndex(wasm_obj), TypeError); + testThrowsRepeated(() => array.findLast(wasm_obj), TypeError); + testThrowsRepeated(() => array.findLastIndex(wasm_obj), TypeError); + testThrowsRepeated(() => array.forEach(wasm_obj), TypeError); + repeated(() => assertFalse(array.includes(wasm_obj))); + repeated(() => assertEquals(-1, array.indexOf(wasm_obj))); + testThrowsRepeated(() => array.join(wasm_obj), TypeError); + repeated(() => assertEquals(-1, array.lastIndexOf(wasm_obj))); + testThrowsRepeated(() => array.map(wasm_obj), TypeError); + testThrowsRepeated(() => array.map(() => wasm_obj), TypeError); + testThrowsRepeated(() => array.reduce(wasm_obj), TypeError); + testThrowsRepeated(() => array.reduceRight(wasm_obj), TypeError); + testThrowsRepeated(() => array.set(wasm_obj), TypeError); + testThrowsRepeated(() => array.set([wasm_obj]), TypeError); + testThrowsRepeated(() => array.slice(wasm_obj, 1), TypeError); + testThrowsRepeated(() => array.some(wasm_obj), TypeError); + testThrowsRepeated(() => array.sort(wasm_obj), TypeError); + testThrowsRepeated(() => array.subarray(0, wasm_obj), TypeError); + } + + // Test Map. + for (let MapType of [Map, WeakMap]) { + repeated(() => { + let val = new String('a'); + let map = new MapType([[val, wasm_obj], [wasm_obj, val]]); + assertSame(wasm_obj, map.get(val)); + assertEquals(val, map.get(wasm_obj)); + assertTrue(map.has(wasm_obj)); + map.delete(wasm_obj); + assertFalse(map.has(wasm_obj)); + assertThrows(() => map.forEach(wasm_obj), TypeError); + map.set(wasm_obj, wasm_obj); + assertSame(wasm_obj, map.get(wasm_obj)); + }); + } + + // Test Set. + for (let SetType of [Set, WeakSet]) { + repeated(() => { + let set = new SetType([new String('a'), wasm_obj]); + set.add(wasm_obj); + assertTrue(set.has(wasm_obj)); + set.delete(wasm_obj); + assertFalse(set.has(wasm_obj)); + }); + } + + // Test ArrayBuffer. + repeated(() => assertFalse(ArrayBuffer.isView(wasm_obj))); + testThrowsRepeated( + () => (new ArrayBuffer(32)).slice(wasm_obj, wasm_obj), TypeError); + testThrowsRepeated( + () => (new SharedArrayBuffer(32)).slice(wasm_obj, wasm_obj), TypeError); + + // Test Dataview. + let arrayBuf = new ArrayBuffer(32); + let dataView = new DataView(arrayBuf); + testThrowsRepeated(() => dataView.getBigInt64(wasm_obj), TypeError); + testThrowsRepeated(() => dataView.getBigUint64(wasm_obj), TypeError); + testThrowsRepeated(() => dataView.getFloat32(wasm_obj), TypeError); + testThrowsRepeated(() => dataView.getFloat64(wasm_obj), TypeError); + testThrowsRepeated(() => dataView.getInt8(wasm_obj), TypeError); + testThrowsRepeated(() => dataView.getInt16(wasm_obj), TypeError); + testThrowsRepeated(() => dataView.getInt32(wasm_obj), TypeError); + testThrowsRepeated(() => dataView.getUint8(wasm_obj), TypeError); + testThrowsRepeated(() => dataView.getUint16(wasm_obj), TypeError); + testThrowsRepeated(() => dataView.getUint32(wasm_obj), TypeError); + testThrowsRepeated(() => dataView.setBigInt64(wasm_obj), TypeError); + testThrowsRepeated(() => dataView.setBigUint64(wasm_obj), TypeError); + testThrowsRepeated(() => dataView.setFloat32(wasm_obj), TypeError); + testThrowsRepeated(() => dataView.setFloat64(0, wasm_obj), TypeError); + testThrowsRepeated(() => dataView.setInt8(wasm_obj), TypeError); + testThrowsRepeated(() => dataView.setInt16(0, wasm_obj), TypeError); + testThrowsRepeated(() => dataView.setInt32(wasm_obj), TypeError); + testThrowsRepeated(() => dataView.setUint8(0, wasm_obj), TypeError); + testThrowsRepeated(() => dataView.setUint16(wasm_obj), TypeError); + testThrowsRepeated(() => dataView.setUint32(0, wasm_obj), TypeError); + + // Ensure no statement re-assigned wasm_obj by accident. + assertTrue(wasm_obj == struct || wasm_obj == array); +} diff --git a/deps/v8/test/mjsunit/wasm/gc-js-interop-export.mjs b/deps/v8/test/mjsunit/wasm/gc-js-interop-export.mjs new file mode 100644 index 00000000000000..ee5e4a23bc2c96 --- /dev/null +++ b/deps/v8/test/mjsunit/wasm/gc-js-interop-export.mjs @@ -0,0 +1,6 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +d8.file.execute('test/mjsunit/wasm/gc-js-interop-helpers.js'); +export let {struct, array} = CreateWasmObjects(); diff --git a/deps/v8/test/mjsunit/wasm/gc-js-interop-global-constructors.js b/deps/v8/test/mjsunit/wasm/gc-js-interop-global-constructors.js new file mode 100644 index 00000000000000..d45cd30ec5b556 --- /dev/null +++ b/deps/v8/test/mjsunit/wasm/gc-js-interop-global-constructors.js @@ -0,0 +1,95 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --experimental-wasm-gc --wasm-gc-js-interop --allow-natives-syntax + +d8.file.execute('test/mjsunit/wasm/gc-js-interop-helpers.js'); + +let {struct, array} = CreateWasmObjects(); +for (const wasm_obj of [struct, array]) { + + // Test constructors of the global object as function. + testThrowsRepeated(() => AggregateError(wasm_obj), TypeError); + repeated(() => assertSame(wasm_obj, Array(wasm_obj)[0])); + testThrowsRepeated(() => ArrayBuffer(wasm_obj), TypeError); + testThrowsRepeated(() => BigInt(wasm_obj), TypeError); + testThrowsRepeated(() => BigInt64Array(wasm_obj), TypeError); + testThrowsRepeated(() => BigUint64Array(wasm_obj), TypeError); + repeated(() => assertEquals(true, Boolean(wasm_obj))); + testThrowsRepeated(() => DataView(wasm_obj), TypeError); + repeated(() => { + let date = Date(wasm_obj); + assertEquals('string', typeof date); + }); + testThrowsRepeated(() => Error(wasm_obj), TypeError); + testThrowsRepeated(() => EvalError(wasm_obj), TypeError); + testThrowsRepeated(() => Float64Array(wasm_obj), TypeError); + testThrowsRepeated(() => Function(wasm_obj), TypeError); + testThrowsRepeated(() => Int8Array(wasm_obj), TypeError); + testThrowsRepeated(() => Int16Array(wasm_obj), TypeError); + testThrowsRepeated(() => Int32Array(wasm_obj), TypeError); + testThrowsRepeated(() => Map(wasm_obj), TypeError); + testThrowsRepeated(() => Number(wasm_obj), TypeError); + repeated(() => assertSame(wasm_obj, Object(wasm_obj))); + testThrowsRepeated(() => Promise(wasm_obj), TypeError); + testThrowsRepeated(() => Proxy(wasm_obj), TypeError); + testThrowsRepeated(() => RangeError(wasm_obj), TypeError); + testThrowsRepeated(() => ReferenceError(wasm_obj), TypeError); + testThrowsRepeated(() => RegExp(wasm_obj), TypeError); + testThrowsRepeated(() => Set(wasm_obj), TypeError); + testThrowsRepeated(() => SharedArrayBuffer(wasm_obj), TypeError); + testThrowsRepeated(() => String(wasm_obj), TypeError); + testThrowsRepeated(() => Symbol(wasm_obj), TypeError); + testThrowsRepeated(() => SyntaxError(wasm_obj), TypeError); + testThrowsRepeated(() => TypeError(wasm_obj), TypeError); + testThrowsRepeated(() => Uint8Array(wasm_obj), TypeError); + testThrowsRepeated(() => Uint16Array(wasm_obj), TypeError); + testThrowsRepeated(() => Uint32Array(wasm_obj), TypeError); + testThrowsRepeated(() => URIError(wasm_obj), TypeError); + testThrowsRepeated(() => WeakMap(wasm_obj), TypeError); + testThrowsRepeated(() => WeakRef(wasm_obj), TypeError); + testThrowsRepeated(() => WeakSet(wasm_obj), TypeError); + + // Test constructors of the global object with new. + testThrowsRepeated(() => new AggregateError(wasm_obj), TypeError); + repeated(() => assertSame(wasm_obj, new Array(wasm_obj)[0])); + testThrowsRepeated(() => new ArrayBuffer(wasm_obj), TypeError); + testThrowsRepeated(() => new BigInt(wasm_obj), TypeError); + testThrowsRepeated(() => new BigInt64Array(wasm_obj), TypeError); + testThrowsRepeated(() => new BigUint64Array(wasm_obj), TypeError); + repeated(() => assertEquals(true, (new Boolean(wasm_obj)).valueOf())); + testThrowsRepeated(() => new DataView(wasm_obj), TypeError); + testThrowsRepeated(() => new Date(wasm_obj), TypeError); + testThrowsRepeated(() => new Error(wasm_obj), TypeError); + testThrowsRepeated(() => new EvalError(wasm_obj), TypeError); + testThrowsRepeated(() => new Float64Array(wasm_obj), TypeError); + testThrowsRepeated(() => new Function(wasm_obj), TypeError); + testThrowsRepeated(() => new Int8Array(wasm_obj), TypeError); + testThrowsRepeated(() => new Int16Array(wasm_obj), TypeError); + testThrowsRepeated(() => new Int32Array(wasm_obj), TypeError); + testThrowsRepeated(() => new Map(wasm_obj), TypeError); + testThrowsRepeated(() => new Number(wasm_obj), TypeError); + repeated(() => assertSame(wasm_obj, new Object(wasm_obj))); + testThrowsRepeated(() => new Promise(wasm_obj), TypeError); + testThrowsRepeated(() => new Proxy(wasm_obj), TypeError); + testThrowsRepeated(() => new RangeError(wasm_obj), TypeError); + testThrowsRepeated(() => new ReferenceError(wasm_obj), TypeError); + testThrowsRepeated(() => new RegExp(wasm_obj), TypeError); + testThrowsRepeated(() => new Set(wasm_obj), TypeError); + testThrowsRepeated(() => new SharedArrayBuffer(wasm_obj), TypeError); + testThrowsRepeated(() => new String(wasm_obj), TypeError); + testThrowsRepeated(() => new Symbol(wasm_obj), TypeError); + testThrowsRepeated(() => new SyntaxError(wasm_obj), TypeError); + testThrowsRepeated(() => new TypeError(wasm_obj), TypeError); + testThrowsRepeated(() => new Uint8Array(wasm_obj), TypeError); + testThrowsRepeated(() => new Uint16Array(wasm_obj), TypeError); + testThrowsRepeated(() => new Uint32Array(wasm_obj), TypeError); + testThrowsRepeated(() => new URIError(wasm_obj), TypeError); + testThrowsRepeated(() => new WeakMap(wasm_obj), TypeError); + repeated(() => assertSame(wasm_obj, new WeakRef(wasm_obj).deref())); + testThrowsRepeated(() => new WeakSet(wasm_obj), TypeError); + + // Ensure no statement re-assigned wasm_obj by accident. + assertTrue(wasm_obj == struct || wasm_obj == array); +} diff --git a/deps/v8/test/mjsunit/wasm/gc-js-interop-helpers.js b/deps/v8/test/mjsunit/wasm/gc-js-interop-helpers.js new file mode 100644 index 00000000000000..2047862d1682a7 --- /dev/null +++ b/deps/v8/test/mjsunit/wasm/gc-js-interop-helpers.js @@ -0,0 +1,54 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Helpers to test interoperability of Wasm objects in JavaScript. + +d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); + +function CreateWasmObjects() { + let builder = new WasmModuleBuilder(); + builder.setSingletonRecGroups(); + let struct_type = builder.addStruct([makeField(kWasmI32, true)]); + let array_type = builder.addArray(kWasmI32, true); + builder.addFunction('MakeStruct', makeSig([], [kWasmExternRef])) + .exportFunc() + .addBody([ + kExprI32Const, 42, // -- + kGCPrefix, kExprStructNew, struct_type, // -- + kGCPrefix, kExprExternExternalize // -- + ]); + builder.addFunction('MakeArray', makeSig([], [kWasmExternRef])) + .exportFunc() + .addBody([ + kExprI32Const, 2, // length + kGCPrefix, kExprArrayNewDefault, array_type, // -- + kGCPrefix, kExprExternExternalize // -- + ]); + + let instance = builder.instantiate(); + return { + struct: instance.exports.MakeStruct(), + array: instance.exports.MakeArray(), + }; +} + +function testThrowsRepeated(fn, ErrorType) { + %PrepareFunctionForOptimization(fn); + for (let i = 0; i < 5; i++) assertThrows(fn, ErrorType); + %OptimizeFunctionOnNextCall(fn); + assertThrows(fn, ErrorType); + // TODO(7748): This assertion doesn't hold true, as some cases run into + // deopt loops. + // assertTrue(%ActiveTierIsTurbofan(fn)); +} + +function repeated(fn) { + %PrepareFunctionForOptimization(fn); + for (let i = 0; i < 5; i++) fn(); + %OptimizeFunctionOnNextCall(fn); + fn(); + // TODO(7748): This assertion doesn't hold true, as some cases run into + // deopt loops. + // assertTrue(%ActiveTierIsTurbofan(fn)); +} diff --git a/deps/v8/test/mjsunit/wasm/gc-js-interop-import.mjs b/deps/v8/test/mjsunit/wasm/gc-js-interop-import.mjs new file mode 100644 index 00000000000000..ce4d5f9633eb7b --- /dev/null +++ b/deps/v8/test/mjsunit/wasm/gc-js-interop-import.mjs @@ -0,0 +1,37 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --experimental-wasm-gc --wasm-gc-js-interop --allow-natives-syntax + +import {struct, array} from 'gc-js-interop-export.mjs'; + +// Read struct and array with new wasm module. +let builder = new WasmModuleBuilder(); +builder.setSingletonRecGroups(); +let struct_type = builder.addStruct([makeField(kWasmI32, true)]); +let array_type = builder.addArray(kWasmI32, true); +builder.addFunction('readStruct', makeSig([kWasmExternRef], [kWasmI32])) + .exportFunc() + .addBody([ + kExprLocalGet, 0, // -- + kGCPrefix, kExprExternInternalize, // -- + kGCPrefix, kExprRefAsData, // -- + kGCPrefix, kExprRefCast, struct_type, // -- + kGCPrefix, kExprStructGet, struct_type, 0, // -- + ]); +builder.addFunction('readArrayLength', makeSig([kWasmExternRef], [kWasmI32])) + .exportFunc() + .addBody([ + kExprLocalGet, 0, // -- + kGCPrefix, kExprExternInternalize, // -- + kGCPrefix, kExprRefAsArray, // -- + kGCPrefix, kExprArrayLen, + ]); + +let instance = builder.instantiate(); +let wasm = instance.exports; +assertEquals(42, wasm.readStruct(struct)); +assertEquals(2, wasm.readArrayLength(array)); +assertTraps(kTrapIllegalCast, () => wasm.readStruct(array)); +assertTraps(kTrapIllegalCast, () => wasm.readArrayLength(struct)); diff --git a/deps/v8/test/mjsunit/wasm/gc-js-interop-numeric.js b/deps/v8/test/mjsunit/wasm/gc-js-interop-numeric.js new file mode 100644 index 00000000000000..72c22da66fa4c0 --- /dev/null +++ b/deps/v8/test/mjsunit/wasm/gc-js-interop-numeric.js @@ -0,0 +1,104 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --experimental-wasm-gc --wasm-gc-js-interop --allow-natives-syntax + +d8.file.execute('test/mjsunit/wasm/gc-js-interop-helpers.js'); + +let {struct, array} = CreateWasmObjects(); +for (const wasm_obj of [struct, array]) { + + // Test numeric operators. + testThrowsRepeated(() => ++wasm_obj, TypeError); + testThrowsRepeated(() => wasm_obj--, TypeError); + testThrowsRepeated(() => +wasm_obj, TypeError); + testThrowsRepeated(() => -wasm_obj, TypeError); + testThrowsRepeated(() => ~wasm_obj, TypeError); + testThrowsRepeated(() => wasm_obj - 2, TypeError); + testThrowsRepeated(() => wasm_obj * 2, TypeError); + testThrowsRepeated(() => wasm_obj / 2, TypeError); + testThrowsRepeated(() => wasm_obj ** 2, TypeError); + testThrowsRepeated(() => wasm_obj << 2, TypeError); + testThrowsRepeated(() => wasm_obj >> 2, TypeError); + testThrowsRepeated(() => 2 >>> wasm_obj, TypeError); + testThrowsRepeated(() => 2 % wasm_obj, TypeError); + testThrowsRepeated(() => wasm_obj | 1, TypeError); + testThrowsRepeated(() => 1 & wasm_obj, TypeError); + testThrowsRepeated(() => wasm_obj ^ wasm_obj, TypeError); + testThrowsRepeated(() => wasm_obj += 1, TypeError); + let tmp = 1; + testThrowsRepeated(() => tmp += wasm_obj, TypeError); + testThrowsRepeated(() => tmp <<= wasm_obj, TypeError); + testThrowsRepeated(() => tmp &= wasm_obj, TypeError); + testThrowsRepeated(() => tmp **= wasm_obj, TypeError); + + // Test numeric functions of the global object. + testThrowsRepeated(() => isFinite(wasm_obj), TypeError); + testThrowsRepeated(() => isNaN(wasm_obj), TypeError); + testThrowsRepeated(() => parseFloat(wasm_obj), TypeError); + testThrowsRepeated(() => parseInt(wasm_obj), TypeError); + + // Test Number. + repeated(() => assertFalse(Number.isFinite(wasm_obj))); + repeated(() => assertFalse(Number.isInteger(wasm_obj))); + repeated(() => assertFalse(Number.isNaN(wasm_obj))); + repeated(() => assertFalse(Number.isSafeInteger(wasm_obj))); + testThrowsRepeated(() => Number.parseFloat(wasm_obj), TypeError); + testThrowsRepeated(() => Number.parseInt(wasm_obj), TypeError); + + // Test BigInt. + testThrowsRepeated(() => BigInt.asIntN(2, wasm_obj), TypeError); + testThrowsRepeated( + () => BigInt.asUintN(wasm_obj, 123n), TypeError); + + // Test Math. + testThrowsRepeated(() => Math.abs(wasm_obj), TypeError); + testThrowsRepeated(() => Math.acos(wasm_obj), TypeError); + testThrowsRepeated(() => Math.acosh(wasm_obj), TypeError); + testThrowsRepeated(() => Math.asin(wasm_obj), TypeError); + testThrowsRepeated(() => Math.asinh(wasm_obj), TypeError); + testThrowsRepeated(() => Math.atan(wasm_obj), TypeError); + testThrowsRepeated(() => Math.atanh(wasm_obj), TypeError); + testThrowsRepeated(() => Math.atan2(wasm_obj), TypeError); + testThrowsRepeated(() => Math.cbrt(wasm_obj), TypeError); + testThrowsRepeated(() => Math.ceil(wasm_obj), TypeError); + testThrowsRepeated(() => Math.clz32(wasm_obj), TypeError); + testThrowsRepeated(() => Math.cos(wasm_obj), TypeError); + testThrowsRepeated(() => Math.cosh(wasm_obj), TypeError); + testThrowsRepeated(() => Math.exp(wasm_obj), TypeError); + testThrowsRepeated(() => Math.expm1(wasm_obj), TypeError); + testThrowsRepeated(() => Math.floor(wasm_obj), TypeError); + testThrowsRepeated(() => Math.fround(wasm_obj), TypeError); + testThrowsRepeated(() => Math.hypot(wasm_obj), TypeError); + testThrowsRepeated(() => Math.imul(wasm_obj, wasm_obj), TypeError); + testThrowsRepeated(() => Math.log(wasm_obj), TypeError); + testThrowsRepeated(() => Math.log1p(wasm_obj), TypeError); + testThrowsRepeated(() => Math.log10(wasm_obj), TypeError); + testThrowsRepeated(() => Math.log2(wasm_obj), TypeError); + testThrowsRepeated(() => Math.max(2, wasm_obj), TypeError); + testThrowsRepeated(() => Math.min(2, wasm_obj), TypeError); + testThrowsRepeated(() => Math.pow(2, wasm_obj), TypeError); + testThrowsRepeated(() => Math.pow(wasm_obj, 2), TypeError); + testThrowsRepeated(() => Math.round(wasm_obj), TypeError); + testThrowsRepeated(() => Math.sign(wasm_obj), TypeError); + testThrowsRepeated(() => Math.sin(wasm_obj), TypeError); + testThrowsRepeated(() => Math.sinh(wasm_obj), TypeError); + testThrowsRepeated(() => Math.sqrt(wasm_obj), TypeError); + testThrowsRepeated(() => Math.tan(wasm_obj), TypeError); + testThrowsRepeated(() => Math.tanh(wasm_obj), TypeError); + testThrowsRepeated(() => Math.trunc(wasm_obj), TypeError); + + // Test boolean. + repeated(() => assertFalse(!wasm_obj)); + repeated(() => assertTrue(wasm_obj ? true : false)); + tmp = true; + repeated(() => assertSame(wasm_obj, tmp &&= wasm_obj)); + tmp = 0; + repeated(() => assertSame(wasm_obj, tmp ||= wasm_obj)); + tmp = null; + repeated(() => assertSame(wasm_obj, tmp ??= wasm_obj)); + + // Ensure no statement re-assigned wasm_obj by accident. + assertTrue(wasm_obj == struct || wasm_obj == array); +} diff --git a/deps/v8/test/mjsunit/wasm/gc-js-interop-objects.js b/deps/v8/test/mjsunit/wasm/gc-js-interop-objects.js new file mode 100644 index 00000000000000..9581248fa7d8d6 --- /dev/null +++ b/deps/v8/test/mjsunit/wasm/gc-js-interop-objects.js @@ -0,0 +1,176 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --experimental-wasm-gc --wasm-gc-js-interop --allow-natives-syntax + +d8.file.execute('test/mjsunit/wasm/gc-js-interop-helpers.js'); + +let {struct, array} = CreateWasmObjects(); +for (const wasm_obj of [struct, array]) { + + // Test Object. + testThrowsRepeated(() => Object.freeze(wasm_obj), TypeError); + testThrowsRepeated(() => Object.seal(wasm_obj), TypeError); + testThrowsRepeated( + () => Object.prototype.__lookupGetter__.call(wasm_obj, 'foo'), TypeError); + testThrowsRepeated( + () => Object.prototype.__lookupSetter__.call(wasm_obj, 'foo'), TypeError); + testThrowsRepeated( + () => Object.prototype.__defineGetter__.call(wasm_obj, 'foo', () => 42), + TypeError); + testThrowsRepeated( + () => Object.prototype.__defineSetter__.call(wasm_obj, 'foo', () => {}), + TypeError); + testThrowsRepeated( + () => Object.defineProperty(wasm_obj, 'foo', {value: 42}), TypeError); + + repeated(() => assertEquals([], Object.getOwnPropertyNames(wasm_obj))); + repeated(() => assertEquals([], Object.getOwnPropertySymbols(wasm_obj))); + repeated(() => assertEquals({}, Object.getOwnPropertyDescriptors(wasm_obj))); + repeated(() => assertEquals([], Object.keys(wasm_obj))); + repeated(() => assertEquals([], Object.entries(wasm_obj))); + repeated( + () => assertEquals( + undefined, Object.getOwnPropertyDescriptor(wasm_obj, 'foo'))); + repeated(() => assertEquals(false, 'foo' in wasm_obj)); + repeated( + () => assertEquals( + false, Object.prototype.hasOwnProperty.call(wasm_obj, 'foo'))); + repeated(() => assertEquals(true, Object.isSealed(wasm_obj))); + repeated(() => assertEquals(true, Object.isFrozen(wasm_obj))); + repeated(() => assertEquals(false, Object.isExtensible(wasm_obj))); + repeated(() => assertEquals('object', typeof wasm_obj)); + repeated( + () => assertEquals( + '[object Object]', Object.prototype.toString.call(wasm_obj))); + + repeated(() => { + let tgt = {}; + Object.assign(tgt, wasm_obj); + assertEquals({}, tgt); + }); + repeated(() => Object.create(wasm_obj)); + repeated(() => ({}).__proto__ = wasm_obj); + testThrowsRepeated( + () => Object.defineProperties(wasm_obj, {prop: {value: 1}}), TypeError); + testThrowsRepeated( + () => Object.defineProperty(wasm_obj, 'prop', {value: 1}), TypeError); + testThrowsRepeated(() => Object.fromEntries(wasm_obj), TypeError); + testThrowsRepeated(() => Object.getPrototypeOf(wasm_obj), TypeError); + repeated(() => assertFalse(Object.hasOwn(wasm_obj, 'test'))); + testThrowsRepeated(() => Object.preventExtensions(wasm_obj), TypeError); + testThrowsRepeated(() => Object.setPrototypeOf(wasm_obj, Object), TypeError); + repeated(() => assertEquals([], Object.values(wasm_obj))); + testThrowsRepeated(() => wasm_obj.toString(), TypeError); + + // Test prototype chain containing a wasm object. + { + let obj = Object.create(wasm_obj); + repeated(() => assertSame(wasm_obj, Object.getPrototypeOf(obj))); + repeated(() => assertSame(wasm_obj, Reflect.getPrototypeOf(obj))); + testThrowsRepeated(() => obj.__proto__, TypeError); + testThrowsRepeated(() => obj.__proto__ = wasm_obj, TypeError); + // Property access fails. + testThrowsRepeated(() => obj[0], TypeError); + testThrowsRepeated(() => obj.prop, TypeError); + testThrowsRepeated(() => obj.toString(), TypeError); + // Most conversions fail as it will use .toString(), .valueOf(), ... + testThrowsRepeated(() => `${obj}`, TypeError); + testThrowsRepeated(() => obj + 1, TypeError); + repeated(() => assertTrue(!!obj)); + } + repeated(() => { + let obj = {}; + Object.setPrototypeOf(obj, wasm_obj); + assertSame(wasm_obj, Object.getPrototypeOf(obj)); + Object.setPrototypeOf(obj, null); + assertSame(null, Object.getPrototypeOf(obj)); + Reflect.setPrototypeOf(obj, wasm_obj); + assertSame(wasm_obj, Reflect.getPrototypeOf(obj)); + }) + + // Test Reflect. + { + let fct = function(x) { + return [this, x] + }; + repeated( + () => assertEquals([wasm_obj, 1], Reflect.apply(fct, wasm_obj, [1]))); + repeated( + () => assertEquals([{}, wasm_obj], Reflect.apply(fct, {}, [wasm_obj]))); + testThrowsRepeated(() => Reflect.apply(fct, 1, wasm_obj), TypeError); + testThrowsRepeated(() => Reflect.apply(wasm_obj, null, []), TypeError); + } + testThrowsRepeated(() => Reflect.construct(wasm_obj, []), TypeError); + testThrowsRepeated(() => Reflect.construct(Object, wasm_obj), TypeError); + testThrowsRepeated(() => Reflect.construct(Object, [], wasm_obj), TypeError); + testThrowsRepeated( + () => Reflect.defineProperty(wasm_obj, 'prop', {value: 1}), TypeError); + testThrowsRepeated( + () => Reflect.defineProperty({}, wasm_obj, {value: 1}), TypeError); + + // Reflect.defineProperty performs ToPropertyDescriptor on the third + // argument which checks whether {value} etc. exist before accessing them. + // Therefore it does not throw but add the property with value undefined. + repeated(() => { + let obj = {}; + assertTrue(Reflect.defineProperty(obj, 'prop', wasm_obj)); + assertTrue(obj.hasOwnProperty('prop')); + assertEquals(undefined, obj.prop); + }); + repeated(() => { + let obj = {}; + assertTrue(Reflect.defineProperty(obj, 'prop2', {value: wasm_obj})); + assertSame(wasm_obj, obj.prop2); + }); + testThrowsRepeated(() => Reflect.deleteProperty(wasm_obj, 'prop'), TypeError); + testThrowsRepeated(() => Reflect.deleteProperty({}, wasm_obj), TypeError); + testThrowsRepeated(() => Reflect.get(wasm_obj, 'prop'), TypeError); + testThrowsRepeated(() => Reflect.getPrototypeOf(wasm_obj), TypeError); + repeated(() => assertFalse(Reflect.has(wasm_obj, 'prop'))); + repeated(() => assertTrue(Reflect.has({wasm_obj}, 'wasm_obj'))); + + repeated(() => assertFalse(Reflect.isExtensible(wasm_obj))); + repeated(() => assertEquals([], Reflect.ownKeys(wasm_obj))); + testThrowsRepeated(() => Reflect.preventExtensions(wasm_obj), TypeError); + testThrowsRepeated(() => Reflect.set(wasm_obj, 'prop', 123), TypeError); + testThrowsRepeated( + () => Reflect.setPrototypeOf(wasm_obj, Object.prototype), TypeError); + repeated(() => Reflect.setPrototypeOf({}, wasm_obj)); + + // Test Proxy. + { + const handler = { + get(target, prop, receiver) { + return 'proxied'; + } + }; + let proxy = new Proxy(wasm_obj, handler); + repeated(() => assertEquals('proxied', proxy.abc)); + testThrowsRepeated(() => proxy.abc = 123, TypeError); + } + { + let proxy = new Proxy({}, wasm_obj); + testThrowsRepeated(() => proxy.abc, TypeError); + } + { + const handler = { + get(target, prop, receiver) { + return 'proxied'; + } + }; + let {proxy, revoke} = Proxy.revocable(wasm_obj, handler); + repeated(() => assertEquals('proxied', proxy.abc)); + testThrowsRepeated(() => proxy.abc = 123, TypeError); + revoke(); + testThrowsRepeated(() => proxy.abc, TypeError); + } + { + let proxy = Proxy.revocable({}, wasm_obj).proxy; + testThrowsRepeated(() => proxy.abc, TypeError); + } + + // Ensure no statement re-assigned wasm_obj by accident. + assertTrue(wasm_obj == struct || wasm_obj == array); +} diff --git a/deps/v8/test/mjsunit/wasm/gc-js-interop-wasm.js b/deps/v8/test/mjsunit/wasm/gc-js-interop-wasm.js new file mode 100644 index 00000000000000..d97bff89cdb7a2 --- /dev/null +++ b/deps/v8/test/mjsunit/wasm/gc-js-interop-wasm.js @@ -0,0 +1,87 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --experimental-wasm-gc --wasm-gc-js-interop --wasm-test-streaming +// Flags: --allow-natives-syntax + +d8.file.execute('test/mjsunit/wasm/gc-js-interop-helpers.js'); + +let {struct, array} = CreateWasmObjects(); +for (const wasm_obj of [struct, array]) { + testThrowsRepeated(() => new WebAssembly.Global(wasm_obj), TypeError); + testThrowsRepeated( + () => new WebAssembly.Global({value: wasm_obj}), TypeError); + testThrowsRepeated( + () => new WebAssembly.Global({value: 'i32'}, wasm_obj), TypeError); + repeated( + () => assertSame( + wasm_obj, + (new WebAssembly.Global({value: 'anyref'}, wasm_obj)).value)); + + testThrowsRepeated(() => new WebAssembly.Module(wasm_obj), TypeError); + let module = () => { + let buffer = (new Uint8Array((new WasmModuleBuilder()).toArray())).buffer; + return new WebAssembly.Module(buffer); + }; + testThrowsRepeated( + () => WebAssembly.Module.customSections(wasm_obj), TypeError); + testThrowsRepeated( + () => WebAssembly.Module.customSections(module, wasm_obj), TypeError); + testThrowsRepeated(() => WebAssembly.Module.exports(wasm_obj), TypeError); + testThrowsRepeated(() => WebAssembly.Module.imports(wasm_obj), TypeError); + + testThrowsRepeated(() => new WebAssembly.Instance(wasm_obj), TypeError); + testThrowsRepeated( + () => new WebAssembly.Instance(module, wasm_obj), TypeError); + + repeated(() => assertThrowsAsync(WebAssembly.compile(wasm_obj), TypeError)); + repeated( + () => + assertThrowsAsync(WebAssembly.compileStreaming(wasm_obj), TypeError)); + repeated( + () => assertThrowsAsync(WebAssembly.instantiate(wasm_obj), TypeError)); + repeated( + () => assertThrowsAsync( + WebAssembly.instantiateStreaming(wasm_obj), TypeError)); + testThrowsRepeated(() => WebAssembly.validate(wasm_obj), TypeError); + + testThrowsRepeated(() => new WebAssembly.Memory(wasm_obj), TypeError); + testThrowsRepeated( + () => new WebAssembly.Memory({initial: wasm_obj}), TypeError); + testThrowsRepeated( + () => new WebAssembly.Memory({initial: 1, shared: wasm_obj}), TypeError); + let memory = new WebAssembly.Memory({initial: 1}); + testThrowsRepeated(() => memory.grow(wasm_obj), TypeError); + + testThrowsRepeated(() => new WebAssembly.Table(wasm_obj), TypeError); + testThrowsRepeated( + () => new WebAssembly.Table({element: wasm_obj, initial: wasm_obj}), + TypeError); + let table = new WebAssembly.Table({initial: 1, element: 'externref'}); + testThrowsRepeated(() => table.get(wasm_obj), TypeError); + testThrowsRepeated(() => table.grow(wasm_obj), TypeError); + testThrowsRepeated(() => table.set(wasm_obj, null), TypeError); + repeated(() => table.set(0, wasm_obj)); + + testThrowsRepeated(() => new WebAssembly.Tag(wasm_obj), TypeError); + testThrowsRepeated( + () => new WebAssembly.Tag({parameters: wasm_obj}), TypeError); + testThrowsRepeated( + () => new WebAssembly.Tag({parameters: [wasm_obj]}), TypeError); + + let tag = new WebAssembly.Tag({parameters: ['dataref']}); + testThrowsRepeated(() => new WebAssembly.Exception(wasm_obj), TypeError); + testThrowsRepeated(() => new WebAssembly.Exception(tag, wasm_obj), TypeError); + repeated(() => new WebAssembly.Exception(tag, [wasm_obj])); + let exception = new WebAssembly.Exception(tag, [wasm_obj]); + testThrowsRepeated(() => exception.is(wasm_obj), TypeError); + testThrowsRepeated(() => exception.getArg(wasm_obj), TypeError); + testThrowsRepeated(() => exception.getArg(tag, wasm_obj), TypeError); + testThrowsRepeated(() => new WebAssembly.CompileError(wasm_obj), TypeError); + testThrowsRepeated(() => new WebAssembly.LinkError(wasm_obj), TypeError); + testThrowsRepeated(() => new WebAssembly.RuntimeError(wasm_obj), TypeError); + + // Ensure no statement re-assigned wasm_obj by accident. + assertTrue(wasm_obj == struct || wasm_obj == array); +} diff --git a/deps/v8/test/mjsunit/wasm/gc-js-interop.js b/deps/v8/test/mjsunit/wasm/gc-js-interop.js new file mode 100644 index 00000000000000..d05144d13a1de2 --- /dev/null +++ b/deps/v8/test/mjsunit/wasm/gc-js-interop.js @@ -0,0 +1,280 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --experimental-wasm-gc --wasm-gc-js-interop --allow-natives-syntax + +d8.file.execute('test/mjsunit/wasm/gc-js-interop-helpers.js'); + +let {struct, array} = CreateWasmObjects(); +for (const wasm_obj of [struct, array]) { + testThrowsRepeated(() => wasm_obj.foo, TypeError); + testThrowsRepeated(() => wasm_obj.foo = 42, TypeError); + testThrowsRepeated(() => wasm_obj[0], TypeError); + testThrowsRepeated(() => wasm_obj[0] = undefined, TypeError); + testThrowsRepeated(() => wasm_obj.__proto__, TypeError); + testThrowsRepeated( + () => Object.prototype.__proto__.call(wasm_obj), TypeError); + testThrowsRepeated(() => wasm_obj.__proto__ = null, TypeError); + testThrowsRepeated(() => JSON.stringify(wasm_obj), TypeError); + testThrowsRepeated(() => { + for (let p in wasm_obj) { + } + }, TypeError); + testThrowsRepeated(() => { + for (let p of wasm_obj) { + } + }, TypeError); + testThrowsRepeated(() => wasm_obj.toString(), TypeError); + testThrowsRepeated(() => wasm_obj.valueOf(), TypeError); + testThrowsRepeated(() => '' + wasm_obj, TypeError); + testThrowsRepeated(() => 0 + wasm_obj, TypeError); + testThrowsRepeated(() => { delete wasm_obj.foo; }, TypeError); + + { + let js_obj = {}; + js_obj.foo = wasm_obj; + repeated(() => assertSame(wasm_obj, js_obj.foo)); + js_obj[0] = wasm_obj; + repeated(() => assertSame(wasm_obj, js_obj[0])); + } + + repeated(() => assertEquals(42, wasm_obj ? 42 : 0)); + testThrowsRepeated(() => wasm_obj(), TypeError); + + testThrowsRepeated(() => [...wasm_obj], TypeError); + repeated(() => assertEquals({}, {...wasm_obj})); + repeated(() => ((...wasm_obj) => {})()); + repeated(() => assertSame(wasm_obj, ({wasm_obj}).wasm_obj)); + testThrowsRepeated(() => ({[wasm_obj]: null}), TypeError); + testThrowsRepeated(() => `${wasm_obj}`, TypeError); + testThrowsRepeated(() => wasm_obj`test`, TypeError); + testThrowsRepeated(() => new wasm_obj, TypeError); + testThrowsRepeated(() => wasm_obj?.property, TypeError); + + repeated(() => assertEquals(undefined, void wasm_obj)); + testThrowsRepeated(() => 2 == wasm_obj, TypeError); + repeated(() => assertFalse(2 === wasm_obj)); + repeated(() => assertFalse({} === wasm_obj)); + repeated(() => assertTrue(wasm_obj == wasm_obj)); + repeated(() => assertTrue(wasm_obj === wasm_obj)); + repeated(() => assertFalse(wasm_obj != wasm_obj)); + repeated(() => assertFalse(wasm_obj !== wasm_obj)); + repeated(() => assertFalse(struct == array)); + repeated(() => assertTrue(struct != array)); + testThrowsRepeated(() => wasm_obj < wasm_obj, TypeError); + testThrowsRepeated(() => wasm_obj <= wasm_obj, TypeError); + testThrowsRepeated(() => wasm_obj >= wasm_obj, TypeError); + + testThrowsRepeated(() => { let [] = wasm_obj; }, TypeError); + testThrowsRepeated(() => { let [a, b] = wasm_obj; }, TypeError); + testThrowsRepeated(() => { let [...all] = wasm_obj; }, TypeError); + testThrowsRepeated(() => { let {a} = wasm_obj; }, TypeError); + repeated(() => { let {} = wasm_obj; }, TypeError); + repeated(() => { + let {...rest} = wasm_obj; + assertTrue(rest instanceof Object); + }); + testThrowsRepeated(() => { + with(wasm_obj) test; + }, ReferenceError); + repeated(() => { + let tmp = 1; + with(wasm_obj) var with_lookup = tmp; + assertEquals(tmp, with_lookup); + }); + repeated(() => { + switch (wasm_obj) { + case 0: + default: + throw 1; + case wasm_obj: + break; + } + }); + repeated(() => { + try { + throw wasm_obj; + } catch (e) { + assertEquals(e, wasm_obj); + } + }); + testThrowsRepeated( + () => {class SubClass extends wasm_obj {}}, TypeError, + 'Class extends value [object Object] is not a constructor or null'); + repeated(() => { + class TestMemberInit { + x = wasm_obj; + }; + assertSame(wasm_obj, new TestMemberInit().x); + }); + repeated(() => assertSame(wasm_obj, eval('wasm_obj'))); + + // Test functions of the global object. + testThrowsRepeated(() => decodeURI(wasm_obj), TypeError); + testThrowsRepeated(() => decodeURIComponent(wasm_obj), TypeError); + testThrowsRepeated(() => encodeURI(wasm_obj), TypeError); + testThrowsRepeated(() => encodeURIComponent(wasm_obj), TypeError); + + { + let fct = function(x) { + return [this, x] + }; + repeated(() => assertEquals([wasm_obj, 1], fct.apply(wasm_obj, [1]))); + repeated( + () => + assertEquals([new Number(1), wasm_obj], fct.apply(1, [wasm_obj]))); + testThrowsRepeated(() => fct.apply(1, wasm_obj), TypeError); + repeated(() => assertEquals([wasm_obj, 1], fct.bind(wasm_obj)(1))); + repeated(() => assertEquals([wasm_obj, 1], fct.call(wasm_obj, 1))); + } + + testThrowsRepeated(() => Symbol.for(wasm_obj), TypeError); + testThrowsRepeated(() => Symbol.keyFor(wasm_obj), TypeError); + testThrowsRepeated(() => Date.parse(wasm_obj), TypeError); + testThrowsRepeated(() => Date.UTC(wasm_obj), TypeError); + testThrowsRepeated(() => (new Date()).setDate(wasm_obj), TypeError); + testThrowsRepeated(() => (new Date()).setFullYear(wasm_obj), TypeError); + testThrowsRepeated(() => (new Date()).setHours(wasm_obj), TypeError); + testThrowsRepeated(() => (new Date()).setMilliseconds(wasm_obj), TypeError); + testThrowsRepeated(() => (new Date()).setMinutes(wasm_obj), TypeError); + testThrowsRepeated(() => (new Date()).setMonth(wasm_obj), TypeError); + testThrowsRepeated(() => (new Date()).setSeconds(wasm_obj), TypeError); + testThrowsRepeated(() => (new Date()).setTime(wasm_obj), TypeError); + testThrowsRepeated(() => (new Date()).setUTCDate(wasm_obj), TypeError); + testThrowsRepeated(() => (new Date()).setUTCFullYear(wasm_obj), TypeError); + testThrowsRepeated(() => (new Date()).setUTCHours(wasm_obj), TypeError); + testThrowsRepeated( + () => (new Date()).setUTCMilliseconds(wasm_obj), TypeError); + testThrowsRepeated(() => (new Date()).setUTCMinutes(wasm_obj), TypeError); + testThrowsRepeated(() => (new Date()).setUTCMonth(wasm_obj), TypeError); + testThrowsRepeated(() => (new Date()).setUTCSeconds(wasm_obj), TypeError); + // Date.prototype.toJSON() parameter `key` is ignored. + repeated(() => (new Date()).toJSON(wasm_obj)); + testThrowsRepeated(() => String.fromCharCode(wasm_obj), TypeError); + testThrowsRepeated(() => String.fromCodePoint(wasm_obj), TypeError); + testThrowsRepeated(() => String.raw(wasm_obj), TypeError); + testThrowsRepeated(() => 'test'.at(wasm_obj), TypeError); + testThrowsRepeated(() => 'test'.charAt(wasm_obj), TypeError); + testThrowsRepeated(() => 'test'.charCodeAt(wasm_obj), TypeError); + testThrowsRepeated(() => 'test'.codePointAt(wasm_obj), TypeError); + testThrowsRepeated(() => 'test'.concat(wasm_obj), TypeError); + testThrowsRepeated(() => 'test'.endsWith(wasm_obj), TypeError); + testThrowsRepeated(() => 'test'.endsWith('t', wasm_obj), TypeError); + testThrowsRepeated(() => 'test'.includes(wasm_obj), TypeError); + testThrowsRepeated(() => 'test'.includes('t', wasm_obj), TypeError); + testThrowsRepeated(() => 'test'.indexOf(wasm_obj), TypeError); + testThrowsRepeated(() => 'test'.lastIndexOf(wasm_obj), TypeError); + testThrowsRepeated(() => 'test'.localeCompare(wasm_obj), TypeError); + testThrowsRepeated(() => 'test'.match(wasm_obj), TypeError); + testThrowsRepeated(() => 'test'.matchAll(wasm_obj), TypeError); + testThrowsRepeated(() => 'test'.normalize(wasm_obj), TypeError); + testThrowsRepeated(() => 'test'.padEnd(wasm_obj), TypeError); + testThrowsRepeated(() => 'test'.padStart(10, wasm_obj), TypeError); + testThrowsRepeated(() => 'test'.repeat(wasm_obj), TypeError); + testThrowsRepeated(() => 'test'.replace(wasm_obj, ''), TypeError); + testThrowsRepeated(() => 'test'.replace('t', wasm_obj), TypeError); + testThrowsRepeated(() => 'test'.replaceAll(wasm_obj, ''), TypeError); + testThrowsRepeated(() => 'test'.search(wasm_obj), TypeError); + testThrowsRepeated(() => 'test'.slice(wasm_obj, 2), TypeError); + testThrowsRepeated(() => 'test'.split(wasm_obj), TypeError); + testThrowsRepeated(() => 'test'.startsWith(wasm_obj), TypeError); + testThrowsRepeated(() => 'test'.substring(wasm_obj), TypeError); + + let i8Array = new Int8Array(32); + testThrowsRepeated(() => Atomics.add(wasm_obj, 1, 2), TypeError); + testThrowsRepeated(() => Atomics.add(i8Array, wasm_obj, 2), TypeError); + testThrowsRepeated(() => Atomics.add(i8Array, 1, wasm_obj), TypeError); + testThrowsRepeated(() => Atomics.and(wasm_obj, 1, 2), TypeError); + testThrowsRepeated(() => Atomics.and(i8Array, wasm_obj, 2), TypeError); + testThrowsRepeated(() => Atomics.and(i8Array, 1, wasm_obj), TypeError); + testThrowsRepeated( + () => Atomics.compareExchange(wasm_obj, 1, 2, 3), TypeError); + testThrowsRepeated( + () => Atomics.compareExchange(i8Array, wasm_obj, 2, 3), TypeError); + testThrowsRepeated( + () => Atomics.compareExchange(i8Array, 1, wasm_obj, 3), TypeError); + testThrowsRepeated( + () => Atomics.compareExchange(i8Array, 1, 2, wasm_obj), TypeError); + testThrowsRepeated(() => Atomics.exchange(wasm_obj, 1, 2), TypeError); + testThrowsRepeated(() => Atomics.exchange(i8Array, wasm_obj, 2), TypeError); + testThrowsRepeated(() => Atomics.exchange(i8Array, 1, wasm_obj), TypeError); + testThrowsRepeated(() => Atomics.isLockFree(wasm_obj), TypeError); + testThrowsRepeated(() => Atomics.load(wasm_obj, 1), TypeError); + testThrowsRepeated(() => Atomics.load(i8Array, wasm_obj), TypeError); + testThrowsRepeated(() => Atomics.or(wasm_obj, 1, 2), TypeError); + testThrowsRepeated(() => Atomics.or(i8Array, wasm_obj, 2), TypeError); + testThrowsRepeated(() => Atomics.or(i8Array, 1, wasm_obj), TypeError); + testThrowsRepeated(() => Atomics.store(wasm_obj, 1, 2), TypeError); + testThrowsRepeated(() => Atomics.store(i8Array, wasm_obj, 2), TypeError); + testThrowsRepeated(() => Atomics.store(i8Array, 1, wasm_obj), TypeError); + testThrowsRepeated(() => Atomics.sub(wasm_obj, 1, 2), TypeError); + testThrowsRepeated(() => Atomics.sub(i8Array, wasm_obj, 2), TypeError); + testThrowsRepeated(() => Atomics.sub(i8Array, 1, wasm_obj), TypeError); + testThrowsRepeated(() => Atomics.wait(wasm_obj, 1, 2, 3), TypeError); + testThrowsRepeated(() => Atomics.wait(i8Array, wasm_obj, 2, 3), TypeError); + testThrowsRepeated(() => Atomics.wait(i8Array, 1, wasm_obj, 3), TypeError); + testThrowsRepeated(() => Atomics.wait(i8Array, 1, 2, wasm_obj), TypeError); + testThrowsRepeated(() => Atomics.notify(wasm_obj, 1, 2), TypeError); + testThrowsRepeated(() => Atomics.notify(i8Array, wasm_obj, 2), TypeError); + testThrowsRepeated(() => Atomics.notify(i8Array, 1, wasm_obj), TypeError); + testThrowsRepeated(() => Atomics.xor(wasm_obj, 1, 2), TypeError); + testThrowsRepeated(() => Atomics.xor(i8Array, wasm_obj, 2), TypeError); + testThrowsRepeated(() => Atomics.xor(i8Array, 1, wasm_obj), TypeError); + + testThrowsRepeated(() => JSON.parse(wasm_obj), TypeError); + repeated(() => assertEquals({x: 1}, JSON.parse('{"x": 1}', wasm_obj))); + testThrowsRepeated(() => JSON.stringify(wasm_obj), TypeError); + repeated(() => assertEquals('{"x":1}', JSON.stringify({x: 1}, wasm_obj))); + repeated( + () => assertEquals('{"x":1}', JSON.stringify({x: 1}, null, wasm_obj))); + + // Yielding wasm objects from a generator function is valid. + repeated(() => { + let gen = (function*() { + yield wasm_obj; + })(); + assertSame(wasm_obj, gen.next().value); + }); + // Test passing wasm objects via next() back to a generator function. + repeated(() => { + let gen = (function*() { + assertSame(wasm_obj, yield 1); + })(); + assertEquals(1, gen.next().value); + assertTrue(gen.next(wasm_obj).done); // triggers the assertEquals. + }); + // Test passing wasm objects via return() to a generator function. + repeated(() => { + let gen = (function*() { + yield 1; + assertTrue(false); + })(); + assertEquals({value: wasm_obj, done: true}, gen.return(wasm_obj)); + }); + // Test passing wasm objects via throw() to a generator function. + repeated(() => { + let gen = (function*() { + try { + yield 1; + assertTrue(false); // unreached + } catch (e) { + assertSame(wasm_obj, e); + return 2; + } + })(); + assertEquals({value: 1, done: false}, gen.next()); + // wasm_obj is caught inside the generator + assertEquals({value: 2, done: true}, gen.throw(wasm_obj)); + }); + // Treating wasm objects as generators is invalid. + repeated(() => { + let gen = function*() { + yield* wasm_obj; + }; + assertThrows(() => gen().next(), TypeError); + }); + + // Ensure no statement re-assigned wasm_obj by accident. + assertTrue(wasm_obj == struct || wasm_obj == array); +} diff --git a/deps/v8/test/mjsunit/wasm/gc-optimizations.js b/deps/v8/test/mjsunit/wasm/gc-optimizations.js index a05b417bf5d340..c814abdc53fd38 100644 --- a/deps/v8/test/mjsunit/wasm/gc-optimizations.js +++ b/deps/v8/test/mjsunit/wasm/gc-optimizations.js @@ -409,7 +409,7 @@ d8.file.execute("test/mjsunit/wasm/wasm-module-builder.js"); .addLocals(kWasmI32, 1) .addBody([ kExprLocalGet, 0, - kGCPrefix, kExprRefTest, sub_struct, + kGCPrefix, kExprRefTestDeprecated, sub_struct, // These casts have to be preserved. kExprLocalGet, 0, diff --git a/deps/v8/test/mjsunit/wasm/gc-typecheck-reducer.js b/deps/v8/test/mjsunit/wasm/gc-typecheck-reducer.js new file mode 100644 index 00000000000000..af790ab9f4f8d6 --- /dev/null +++ b/deps/v8/test/mjsunit/wasm/gc-typecheck-reducer.js @@ -0,0 +1,45 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --experimental-wasm-gc --no-liftoff + +d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); + +// Test inlining of a non-trivial type check (i.e. the decoder can't remove it +// directly) that becomes trivial after inlining. +// This covers a bug in the optimizing compiler treating null as a test failure +// for the "ref.test null" instruction. +(function TestRefTestNonTrivialTypeCheckInlinedTrivial() { + var builder = new WasmModuleBuilder(); + let struct = builder.addStruct([makeField(kWasmI32, true)]); + + + let refTestFromAny = builder.addFunction(`refTestFromAny`, + makeSig([kWasmAnyRef], [kWasmI32, kWasmI32])) + .addBody([ + kExprLocalGet, 0, + kGCPrefix, kExprRefTest, struct, + kExprLocalGet, 0, + kGCPrefix, kExprRefTestNull, struct, + ]); + + builder.addFunction(`main`, + makeSig([], [kWasmI32, kWasmI32, kWasmI32, kWasmI32])) + .addBody([ + kExprI32Const, 1, + kGCPrefix, kExprStructNew, struct, + kExprCallFunction, refTestFromAny.index, + kExprRefNull, kNullRefCode, + kExprCallFunction, refTestFromAny.index, + ]).exportFunc(); + + var instance = builder.instantiate(); + let expected = [ + 1, // ref.test <struct> (struct) + 1, // ref.test null <struct> (struct) + 0, // ref.test <struct> (null) + 1 // ref.test null <struct> (null) + ] + assertEquals(expected, instance.exports.main()); +})(); diff --git a/deps/v8/test/mjsunit/wasm/load-immutable.js b/deps/v8/test/mjsunit/wasm/load-immutable.js index 8239d0016f97ea..988f48bac2e6ed 100644 --- a/deps/v8/test/mjsunit/wasm/load-immutable.js +++ b/deps/v8/test/mjsunit/wasm/load-immutable.js @@ -127,7 +127,7 @@ d8.file.execute("test/mjsunit/wasm/wasm-module-builder.js"); // TF should be able to eliminate the second type check, and return the // constant 1. kExprLocalGet, 0, - kGCPrefix, kExprRefTest, sig]) + kGCPrefix, kExprRefTestDeprecated, sig]) .exportFunc(); var instance = builder.instantiate({m : { f: function () {} }}); diff --git a/deps/v8/test/mjsunit/wasm/reference-globals.js b/deps/v8/test/mjsunit/wasm/reference-globals.js index 92b24bc1606dd0..57a37e6552007e 100644 --- a/deps/v8/test/mjsunit/wasm/reference-globals.js +++ b/deps/v8/test/mjsunit/wasm/reference-globals.js @@ -380,3 +380,18 @@ d8.file.execute("test/mjsunit/wasm/wasm-module-builder.js"); assertThrows(() => builder.instantiate(), WebAssembly.CompileError, /i31.new\[0\] expected type i32, found i64.const of type i64/); })(); + +(function TestConstantExprFuncIndexOutOfBounds() { + print(arguments.callee.name); + let builder = new WasmModuleBuilder(); + let struct_index = builder.addStruct([makeField(kWasmFuncRef, true)]); + let func = builder.addFunction("element", kSig_i_i) + .addBody([kExprLocalGet, 0]) + .exportFunc() + + builder.addGlobal(wasmRefType(struct_index), false, + [kExprRefFunc, func.index + 1, kExprStructNew, struct_index]); + + assertThrows(() => builder.instantiate(), WebAssembly.CompileError, + /function index #1 is out of bounds/); +})(); diff --git a/deps/v8/test/mjsunit/wasm/reference-table-js-interop.js b/deps/v8/test/mjsunit/wasm/reference-table-js-interop.js index 5071ecd0d1ff25..5d91b68940989d 100644 --- a/deps/v8/test/mjsunit/wasm/reference-table-js-interop.js +++ b/deps/v8/test/mjsunit/wasm/reference-table-js-interop.js @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -// Flags: --experimental-wasm-gc --experimental-wasm-stringref +// Flags: --experimental-wasm-gc --experimental-wasm-stringref --wasm-gc-js-interop d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); @@ -116,18 +116,6 @@ for (let [typeName, type] of Object.entries(tableTypes)) { kExprUnreachable, // conversion failure kExprEnd, ]; - // TODO(7748): Directly compare the externrefs in JS once - // FLAG_wasm_gc_js_interop is supported. - builder.addFunction("eq", - makeSig([kWasmExternRef, kWasmExternRef], [kWasmI32])) - .addBody([ - kExprLocalGet, 0, - ...castExternToEqRef, - kExprLocalGet, 1, - ...castExternToEqRef, - kExprRefEq, - ]) - .exportFunc(); builder.addFunction("createNull", creatorSig) .addBody([kExprRefNull, kNullRefCode]) @@ -173,29 +161,29 @@ for (let [typeName, type] of Object.entries(tableTypes)) { // Set i31. if (typeName != "dataref" && typeName != "arrayref") { table.set(2, wasm.exported(wasm.createI31)); - assertEquals(1, wasm.eq(table.get(2), wasm.tableGet(2))); + assertSame(table.get(2), wasm.tableGet(2)); wasm.tableSet(3, wasm.createI31); - assertEquals(1, wasm.eq(table.get(3), wasm.tableGet(3))); - assertEquals(1, wasm.eq(table.get(2), table.get(3))); // The same smi. + assertSame(table.get(3), wasm.tableGet(3)); + assertSame(table.get(2), table.get(3)); // The same smi. } // Set struct. if (typeName != "arrayref") { table.set(4, wasm.exported(wasm.createStruct)); - assertEquals(1, wasm.eq(table.get(4), wasm.tableGet(4))); + assertSame(table.get(4), wasm.tableGet(4)); assertEquals(12, wasm.tableGetStructVal(4)); wasm.tableSet(5, wasm.createStruct); - assertEquals(1, wasm.eq(table.get(5), wasm.tableGet(5))); + assertSame(table.get(5), wasm.tableGet(5)); assertEquals(12, wasm.tableGetStructVal(5)); - assertEquals(0, wasm.eq(table.get(4), table.get(5))); // Not the same. + assertNotSame(table.get(4), table.get(5)); } // Set array. table.set(6, wasm.exported(wasm.createArray)); - assertEquals(1, wasm.eq(table.get(6), wasm.tableGet(6))); + assertSame(table.get(6), wasm.tableGet(6)); assertEquals(12, wasm.tableGetArrayVal(6)); wasm.tableSet(7, wasm.createArray); - assertEquals(1, wasm.eq(table.get(7), wasm.tableGet(7))); + assertSame(table.get(7), wasm.tableGet(7)); assertEquals(12, wasm.tableGetArrayVal(7)); - assertEquals(0, wasm.eq(table.get(6), table.get(7))); // Not the same. + assertNotSame(table.get(6), table.get(7)); // Set stringref. if (typeName == "anyref") { @@ -209,11 +197,6 @@ for (let [typeName, type] of Object.entries(tableTypes)) { assertEquals(largeString, table.get(9)); } - // Ensure all objects are externalized, so they can be handled by JS. - for (let i = 0; i < table.length; ++i) { - JSON.stringify(table.get(i)); - } - if (typeName != "arrayref") { // Grow table with explicit value. table.grow(2, wasm.exported(wasm.createStruct)); diff --git a/deps/v8/test/mjsunit/wasm/runtime-type-canonicalization.js b/deps/v8/test/mjsunit/wasm/runtime-type-canonicalization.js index fbd758ddc81014..1d87812cf47d7f 100644 --- a/deps/v8/test/mjsunit/wasm/runtime-type-canonicalization.js +++ b/deps/v8/test/mjsunit/wasm/runtime-type-canonicalization.js @@ -20,12 +20,12 @@ let struct_init = builder.addFunction("struct_init", let test_pass = builder.addFunction("test_pass", makeSig([kWasmDataRef], [kWasmI32])) .addBody([kExprLocalGet, 0, - kGCPrefix, kExprRefTest, identical_struct_index]) + kGCPrefix, kExprRefTestDeprecated, identical_struct_index]) .exportFunc(); let test_fail = builder.addFunction("test_fail", makeSig([kWasmDataRef], [kWasmI32])) .addBody([kExprLocalGet, 0, - kGCPrefix, kExprRefTest, distinct_struct_index]) + kGCPrefix, kExprRefTestDeprecated, distinct_struct_index]) .exportFunc(); (function TestCanonicalizationSameInstance() { diff --git a/deps/v8/test/mjsunit/wasm/stack-switching-export.js b/deps/v8/test/mjsunit/wasm/stack-switching-export.js index 3fbe979ab6f729..7b8c91e09efd0a 100644 --- a/deps/v8/test/mjsunit/wasm/stack-switching-export.js +++ b/deps/v8/test/mjsunit/wasm/stack-switching-export.js @@ -4,7 +4,6 @@ // Flags: --wasm-generic-wrapper --expose-gc --allow-natives-syntax // Flags: --experimental-wasm-stack-switching -// Flags: --experimental-wasm-type-reflection // This is a port of the generic-wrapper.js tests for the JS Promise Integration // variant of the wrapper. We don't suspend the stacks in this test, we only diff --git a/deps/v8/test/mjsunit/wasm/stack-switching.js b/deps/v8/test/mjsunit/wasm/stack-switching.js index 65d2704efa7446..667b2e01b0ae8b 100644 --- a/deps/v8/test/mjsunit/wasm/stack-switching.js +++ b/deps/v8/test/mjsunit/wasm/stack-switching.js @@ -3,8 +3,8 @@ // found in the LICENSE file. // Flags: --allow-natives-syntax --experimental-wasm-stack-switching -// Flags: --experimental-wasm-type-reflection --expose-gc -// Flags: --wasm-stack-switching-stack-size=100 +// Flags: --expose-gc --wasm-stack-switching-stack-size=100 +// Flags: --experimental-wasm-typed-funcref // We pick a small stack size to run the stack overflow test quickly, but big // enough to run all the tests. @@ -521,3 +521,58 @@ function TestNestedSuspenders(suspend) { /invalid suspender object for suspend/); } })(); + +(function SuspendCallRef() { + print(arguments.callee.name); + let builder = new WasmModuleBuilder(); + let funcref_type = builder.addType(kSig_i_r); + let table = builder.addTable(wasmRefNullType(funcref_type), 1) + .exportAs('table'); + builder.addFunction("test", kSig_i_r) + .addBody([ + kExprLocalGet, 0, + kExprI32Const, 0, kExprTableGet, table.index, + kExprCallRef, funcref_type, + ]).exportFunc(); + let instance = builder.instantiate(); + + let funcref = new WebAssembly.Function( + {parameters: ['externref'], results: ['i32']}, + () => Promise.resolve(42), + {suspending: 'first'}); + instance.exports.table.set(0, funcref); + + let exp = new WebAssembly.Function( + {parameters: [], results: ['externref']}, + instance.exports.test, + {promising: 'first'}); + assertPromiseResult(exp(), v => assertEquals(42, v)); +})(); + +(function SuspendCallIndirect() { + print(arguments.callee.name); + let builder = new WasmModuleBuilder(); + let functype = builder.addType(kSig_i_r); + let table = builder.addTable(kWasmFuncRef, 10, 10); + let callee = builder.addImport('m', 'f', kSig_i_r); + builder.addActiveElementSegment(table, wasmI32Const(0), [callee]); + builder.addFunction("test", kSig_i_r) + .addBody([ + kExprLocalGet, 0, + kExprI32Const, 0, + kExprCallIndirect, functype, table.index, + ]).exportFunc(); + + let create_promise = new WebAssembly.Function( + {parameters: ['externref'], results: ['i32']}, + () => Promise.resolve(42), + {suspending: 'first'}); + + let instance = builder.instantiate({m: {f: create_promise}}); + + let exp = new WebAssembly.Function( + {parameters: [], results: ['externref']}, + instance.exports.test, + {promising: 'first'}); + assertPromiseResult(exp(), v => assertEquals(42, v)); +})(); diff --git a/deps/v8/test/mjsunit/wasm/stringrefs-regressions.js b/deps/v8/test/mjsunit/wasm/stringrefs-regressions.js new file mode 100644 index 00000000000000..c12111f29d80d7 --- /dev/null +++ b/deps/v8/test/mjsunit/wasm/stringrefs-regressions.js @@ -0,0 +1,98 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --experimental-wasm-stringref --allow-natives-syntax +// We just want speculative inlining, but the "stress" variant doesn't like +// that flag for some reason, so use the GC flag which implies it. +// Flags: --experimental-wasm-gc + +d8.file.execute("test/mjsunit/wasm/wasm-module-builder.js"); + +let kSig_w_v = makeSig([], [kWasmStringRef]); +let kSig_w_i = makeSig([kWasmI32], [kWasmStringRef]); +let kSig_v_w = makeSig([kWasmStringRef], []); + +(function () { + let huge_builder = new WasmModuleBuilder(); + huge_builder.addMemory(65001, undefined, false, false); + + huge_builder.addFunction("huge", kSig_w_v).exportFunc().addBody([ + kExprI32Const, 0, // address + ...wasmI32Const(65000 * 65536), // bytes + ...GCInstr(kExprStringNewUtf8), 0 // memory index + ]); + + let callee = huge_builder.addFunction("callee", kSig_w_i).addBody([ + kExprI32Const, 0, // address + kExprLocalGet, 0, // bytes + ...GCInstr(kExprStringNewUtf8), 0 // memory index + ]); + + let caller = huge_builder.addFunction("caller", kSig_i_i).exportFunc() + .addBody([ + kExprTry, kWasmI32, + kExprLocalGet, 0, + kExprCallFunction, callee.index, + kExprDrop, + kExprI32Const, 1, + kExprCatchAll, + kExprI32Const, 0, + kExprEnd + ]); + + let instance; + try { + instance = huge_builder.instantiate(); + // On 64-bit platforms, expect to see this message. + console.log("Instantiation successful, proceeding."); + } catch (e) { + // 32-bit builds don't have enough virtual memory, that's OK. + assertInstanceof(e, RangeError); + assertMatches(/Cannot allocate Wasm memory for new instance/, e.message, + 'Error message'); + return; + } + + // Bug 1: The Utf8Decoder can't handle more than kMaxInt bytes as input. + assertThrows(() => instance.exports.huge(), RangeError); + + // Bug 2: Exceptions created by the JS-focused strings infrastructure must + // be marked as uncatchable by Wasm. + let f1 = instance.exports.caller; + assertThrows(() => f1(2147483647), RangeError); + + // Bug 3: Builtin calls that have neither a kNoThrow annotation nor exception- + // handling support make the Wasm inliner sad. + for (let i = 0; i < 20; i++) f1(10); + %WasmTierUpFunction(instance, caller.index); + f1(10); +})(); + +let builder = new WasmModuleBuilder(); + +let concat_body = []; +// This doubles the string 26 times, i.e. multiplies its length with a factor +// of ~65 million. +for (let i = 0; i < 26; i++) { + concat_body.push(...[ + kExprLocalGet, 0, kExprLocalGet, 0, + ...GCInstr(kExprStringConcat), + kExprLocalSet, 0 + ]); +} + +let concat = + builder.addFunction('concat', kSig_v_w).exportFunc().addBody(concat_body); + +let instance = builder.instantiate(); + +// Bug 4: Throwing in StringAdd must clear the "thread in wasm" bit. +let f2 = instance.exports.concat; +assertThrows(() => f2("1234567890")); // 650M characters is too much. + +// Bug 5: Operations that can trap must not be marked as kEliminatable, +// otherwise the trap may be eliminated. +for (let i = 0; i < 3; i++) f2("a"); // 65M characters is okay. +%WasmTierUpFunction(instance, concat.index); +assertThrows(() => f2("1234567890")); // Optimized code still traps. diff --git a/deps/v8/test/mjsunit/wasm/type-based-optimizations.js b/deps/v8/test/mjsunit/wasm/type-based-optimizations.js index 40eb18b1046fed..5ac4debefb1180 100644 --- a/deps/v8/test/mjsunit/wasm/type-based-optimizations.js +++ b/deps/v8/test/mjsunit/wasm/type-based-optimizations.js @@ -33,11 +33,11 @@ d8.file.execute("test/mjsunit/wasm/wasm-module-builder.js"); // while (true) { kExprLoop, kWasmVoid, // if (ref.test temp bottom1) { - kExprLocalGet, 2, kGCPrefix, kExprRefTest, bottom1, + kExprLocalGet, 2, kGCPrefix, kExprRefTestDeprecated, bottom1, kExprIf, kWasmVoid, // counter += ((bottom1) temp).field_2; - // TODO(manoskouk): Implement path-based type tracking so we can - // eliminate this check. + // Note: This cast should get optimized away with path-based type + // tracking. kExprLocalGet, 2, kGCPrefix, kExprRefCast, bottom1, kGCPrefix, kExprStructGet, bottom1, 2, kExprLocalGet, 3, kExprI32Add, kExprLocalSet, 3, diff --git a/deps/v8/test/mjsunit/wasm/type-reflection-with-externref.js b/deps/v8/test/mjsunit/wasm/type-reflection-with-externref.js index 40da63a57d2390..c324472e1c9cb2 100644 --- a/deps/v8/test/mjsunit/wasm/type-reflection-with-externref.js +++ b/deps/v8/test/mjsunit/wasm/type-reflection-with-externref.js @@ -7,6 +7,7 @@ d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); (function TestTableType() { + print(arguments.callee.name); let table = new WebAssembly.Table({initial: 1, element: "externref"}); let type = table.type(); assertEquals(1, type.minimum); @@ -22,6 +23,7 @@ d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); })(); (function TestGlobalType() { + print(arguments.callee.name); let global = new WebAssembly.Global({value: "externref", mutable: true}); let type = global.type(); assertEquals("externref", type.value); @@ -48,6 +50,7 @@ d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); })(); (function TestFunctionGlobalGetAndSet() { + print(arguments.callee.name); let builder = new WasmModuleBuilder(); let fun1 = new WebAssembly.Function({parameters:[], results:["i32"]}, _ => 7); let fun2 = new WebAssembly.Function({parameters:[], results:["i32"]}, _ => 9); @@ -80,6 +83,7 @@ d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); })(); (function TestFunctionMultiTableSetAndCall() { + print(arguments.callee.name); let builder = new WasmModuleBuilder(); let v1 = 7; let v2 = 9; let v3 = 0.0; let f1 = new WebAssembly.Function({parameters:[], results:["i32"]}, _ => v1); diff --git a/deps/v8/test/mjsunit/wasm/type-reflection.js b/deps/v8/test/mjsunit/wasm/type-reflection.js index 1c88ed62b2e2df..f81980b2510218 100644 --- a/deps/v8/test/mjsunit/wasm/type-reflection.js +++ b/deps/v8/test/mjsunit/wasm/type-reflection.js @@ -7,6 +7,7 @@ d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); (function TestMemoryType() { + print(arguments.callee.name); let mem = new WebAssembly.Memory({initial: 1}); let type = mem.type(); assertEquals(1, type.minimum); @@ -22,6 +23,7 @@ d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); })(); (function TestMemoryExports() { + print(arguments.callee.name); let builder = new WasmModuleBuilder(); builder.addMemory(1).exportMemoryAs("a") let module = new WebAssembly.Module(builder.toBuffer()); @@ -44,6 +46,7 @@ d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); })(); (function TestMemoryImports() { + print(arguments.callee.name); let builder = new WasmModuleBuilder(); builder.addImportedMemory("m", "a", 1); let module = new WebAssembly.Module(builder.toBuffer()); @@ -68,6 +71,7 @@ d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); })(); (function TestTableType() { + print(arguments.callee.name); let table = new WebAssembly.Table({initial: 1, element: "funcref"}); let type = table.type(); assertEquals(1, type.minimum); @@ -98,6 +102,7 @@ d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); })(); (function TestTableExports() { + print(arguments.callee.name); let builder = new WasmModuleBuilder(); builder.addTable(kWasmAnyFunc, 20).exportAs("a"); let module = new WebAssembly.Module(builder.toBuffer()); @@ -122,6 +127,7 @@ d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); })(); (function TestTableImports() { + print(arguments.callee.name); let builder = new WasmModuleBuilder(); builder.addImportedTable("m", "a", 20, undefined, kWasmAnyFunc); let module = new WebAssembly.Module(builder.toBuffer()); @@ -148,6 +154,7 @@ d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); })(); (function TestGlobalType() { + print(arguments.callee.name); let global = new WebAssembly.Global({value: "i32", mutable: true}); let type = global.type(); assertEquals("i32", type.value); @@ -180,6 +187,7 @@ d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); })(); (function TestGlobalExports() { + print(arguments.callee.name); let builder = new WasmModuleBuilder(); builder.addGlobal(kWasmI32).exportAs("a"); builder.addGlobal(kWasmF64, true).exportAs("b"); @@ -198,6 +206,7 @@ d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); })(); (function TestGlobalImports() { + print(arguments.callee.name); let builder = new WasmModuleBuilder(); builder.addImportedGlobal("m", "a", kWasmI32); builder.addImportedGlobal("m", "b", kWasmF64, true); @@ -218,6 +227,7 @@ d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); })(); (function TestMemoryConstructorWithMinimum() { + print(arguments.callee.name); let mem = new WebAssembly.Memory({minimum: 1}); assertTrue(mem instanceof WebAssembly.Memory); let type = mem.type(); @@ -252,6 +262,7 @@ d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); })(); (function TestTableConstructorWithMinimum() { + print(arguments.callee.name); let table = new WebAssembly.Table({minimum: 1, element: 'funcref'}); assertTrue(table instanceof WebAssembly.Table); let type = table.type(); @@ -280,6 +291,7 @@ d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); })(); (function TestFunctionConstructor() { + print(arguments.callee.name); let toolong = new Array(1000 + 1); let desc = Object.getOwnPropertyDescriptor(WebAssembly, 'Function'); assertEquals(typeof desc.value, 'function'); @@ -323,6 +335,7 @@ d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); })(); (function TestFunctionConstructorWithWasmExportedFunction() { + print(arguments.callee.name); let builder = new WasmModuleBuilder(); builder.addFunction('func1', kSig_v_i).addBody([]).exportFunc(); @@ -343,6 +356,7 @@ d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); })(); (function TestFunctionConstructorWithWasmJSFunction() { + print(arguments.callee.name); const func = new WebAssembly.Function({parameters: [], results: []}, _ => 0); assertDoesNotThrow( @@ -356,6 +370,7 @@ d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); })(); (function TestFunctionConstructorNonArray1() { + print(arguments.callee.name); let log = []; // Populated with a log of accesses. let two = { toString: () => "2" }; // Just a fancy "2". let logger = new Proxy({ length: two, "0": "i32", "1": "f32"}, { @@ -368,6 +383,7 @@ d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); })(); (function TestFunctionConstructorNonArray2() { + print(arguments.callee.name); let throw1 = { get length() { throw new Error("cannot see length"); }}; let throw2 = { length: { toString: _ => { throw new Error("no length") } } }; let throw3 = { length: "not a length value, this also throws" }; @@ -392,6 +408,7 @@ d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); })(); (function TestFunctionConstructedFunction() { + print(arguments.callee.name); let fun = new WebAssembly.Function({parameters:[], results:[]}, _ => 0); assertTrue(fun instanceof WebAssembly.Function); assertTrue(fun instanceof Function); @@ -405,6 +422,7 @@ d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); })(); (function TestFunctionExportedFunction() { + print(arguments.callee.name); let builder = new WasmModuleBuilder(); builder.addFunction("fun", kSig_v_v).addBody([]).exportFunc(); let instance = builder.instantiate(); @@ -421,6 +439,7 @@ d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); })(); (function TestFunctionTypeOfConstructedFunction() { + print(arguments.callee.name); let testcases = [ {parameters:[], results:[]}, {parameters:["i32"], results:[]}, @@ -436,6 +455,7 @@ d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); })(); (function TestFunctionTypeOfExportedFunction() { + print(arguments.callee.name); let testcases = [ [kSig_v_v, {parameters:[], results:[]}], [kSig_v_i, {parameters:["i32"], results:[]}], @@ -453,6 +473,7 @@ d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); })(); (function TestFunctionExports() { + print(arguments.callee.name); let testcases = [ [kSig_v_v, {parameters:[], results:[]}], [kSig_v_i, {parameters:["i32"], results:[]}], @@ -472,6 +493,7 @@ d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); })(); (function TestFunctionImports() { + print(arguments.callee.name); let testcases = [ [kSig_v_v, {parameters:[], results:[]}], [kSig_v_i, {parameters:["i32"], results:[]}], @@ -492,6 +514,7 @@ d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); })(); (function TestFunctionConstructedCoercions() { + print(arguments.callee.name); let obj1 = { valueOf: _ => 123.45 }; let obj2 = { toString: _ => "456" }; let gcer = { valueOf: _ => gc() }; @@ -529,6 +552,7 @@ d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); })(); (function TestFunctionTableSetI64() { + print(arguments.callee.name); let builder = new WasmModuleBuilder(); let fun = new WebAssembly.Function({parameters:[], results:["i64"]}, _ => 0n); let table = new WebAssembly.Table({element: "anyfunc", initial: 2}); @@ -550,6 +574,7 @@ d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); })(); (function TestFunctionModuleImportMatchingSig() { + print(arguments.callee.name); let builder = new WasmModuleBuilder(); let fun = new WebAssembly.Function({parameters:[], results:["i32"]}, _ => 7); let fun_index = builder.addImport("m", "fun", kSig_i_v) @@ -563,6 +588,7 @@ d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); })(); (function TestFunctionModuleImportMismatchingSig() { + print(arguments.callee.name); let builder = new WasmModuleBuilder(); let fun1 = new WebAssembly.Function({parameters:[], results:[]}, _ => 7); let fun2 = new WebAssembly.Function({parameters:["i32"], results:[]}, _ => 8); @@ -585,6 +611,7 @@ d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); })(); (function TestFunctionModuleImportReExport () { + print(arguments.callee.name); let builder = new WasmModuleBuilder(); let fun = new WebAssembly.Function({parameters:[], results:["i32"]}, _ => 7); let fun_index = builder.addImport("m", "fun", kSig_i_v) @@ -594,3 +621,28 @@ d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); assertSame(instance.exports.fun1, instance.exports.fun2); assertSame(fun, instance.exports.fun1); })(); + +(function TestCallIndirectJSFunction() { + print(arguments.callee.name); + let imp = new WebAssembly.Function( + {parameters:["i32", "i32", "i32"], results:["i32"]}, + function(a, b, c) { if (c) return a; return b; }); + + let builder = new WasmModuleBuilder(); + let sig_index = builder.addType(kSig_i_iii); + let fun_index = builder.addImport("m", "imp", kSig_i_iii) + builder.addTable(kWasmFuncRef, 1, 1); + let table_index = 0; + let segment = builder.addActiveElementSegment( + table_index, wasmI32Const(0), [[kExprRefFunc, 0]], kWasmFuncRef); + + let main = builder.addFunction("rc", kSig_i_i) + .addBody([...wasmI32Const(-2), kExprI32Const, 3, kExprLocalGet, 0, + kExprI32Const, 0, kExprCallIndirect, sig_index, table_index]) + .exportFunc(); + + let instance = builder.instantiate({ m: { imp: imp }}); + + assertEquals(instance.exports.rc(1), -2); + assertEquals(instance.exports.rc(0), 3); +})(); diff --git a/deps/v8/test/mjsunit/wasm/wasm-array-js-interop.js b/deps/v8/test/mjsunit/wasm/wasm-array-js-interop.js deleted file mode 100644 index d1b663e425f203..00000000000000 --- a/deps/v8/test/mjsunit/wasm/wasm-array-js-interop.js +++ /dev/null @@ -1,82 +0,0 @@ -// Copyright 2021 the V8 project authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -// Flags: --allow-natives-syntax --experimental-wasm-gc --wasm-gc-js-interop -// Flags: --expose-gc - -d8.file.execute("test/mjsunit/wasm/wasm-module-builder.js"); - -const kIterationsCountForICProgression = 20; - -function createArray_i() { - let builder = new WasmModuleBuilder(); - - const type_index = builder.addArray(kWasmI32, true); - - let sig_a_i = makeSig_r_x(kWasmDataRef, kWasmI32); - let sig_i_ai = makeSig([kWasmDataRef, kWasmI32], [kWasmI32]); - let sig_v_aii = makeSig([kWasmDataRef, kWasmI32, kWasmI32], []); - - builder.addFunction("new_array", sig_a_i) - .addBody([ - kExprLocalGet, 0, // -- - kExprI32Const, 10, // -- - kGCPrefix, kExprArrayNew, type_index]) // -- - .exportAs("new_array"); - - builder.addFunction("array_get", sig_i_ai) - .addBody([ - kExprLocalGet, 0, // -- - kGCPrefix, kExprRefCast, type_index, // -- - kExprLocalGet, 1, // -- - kGCPrefix, kExprArrayGet, type_index]) // -- - .exportAs("array_get"); - - builder.addFunction("array_set", sig_v_aii) - .addBody([ - kExprLocalGet, 0, // -- - kGCPrefix, kExprRefCast, type_index, // -- - kExprLocalGet, 1, // -- - kExprLocalGet, 2, // -- - kGCPrefix, kExprArraySet, type_index]) // -- - .exportAs("array_set"); - - let instance = builder.instantiate(); - let new_array = instance.exports.new_array; - let array_get = instance.exports.array_get; - let array_set = instance.exports.array_set; - - let value = 42; - let o = new_array(value); - %DebugPrint(o); - assertEquals(10, o.length); - for (let i = 0; i < o.length; i++) { - let res; - res = array_get(o, i); - assertEquals(value, res); - - array_set(o, i, i); - res = array_get(o, i); - assertEquals(i, res); - } - return o; -} - -(function TestSimpleArrayInterop() { - function f(o) { - assertEquals(10, o.length); - for (let i = 0; i < o.length; i++) { - let len = o.length; - assertEquals(10, len); - let v = o[i]; - assertEquals(i, v); - } - } - - let o = createArray_i(); - %DebugPrint(o); - - f(o); - gc(); -})(); diff --git a/deps/v8/test/mjsunit/wasm/wasm-gc-js-ref.js b/deps/v8/test/mjsunit/wasm/wasm-gc-js-ref.js index 7b5cb91e605ba9..f59d10b236126b 100644 --- a/deps/v8/test/mjsunit/wasm/wasm-gc-js-ref.js +++ b/deps/v8/test/mjsunit/wasm/wasm-gc-js-ref.js @@ -22,18 +22,15 @@ let instance = (() => { })(); let obj = instance.exports.createStruct(123); -// The struct is wrapped in the special wrapper. -// It doesn't have any observable properties. -assertTrue(obj instanceof Object); +// The struct is opaque and doesn't have any observable properties. +assertFalse(obj instanceof Object); assertEquals([], Object.getOwnPropertyNames(obj)); -assertEquals("{}", JSON.stringify(obj)); // It can be passed as externref without any observable change. let passObj = instance.exports.passObj; -obj = passObj(obj); -assertTrue(obj instanceof Object); -assertEquals([], Object.getOwnPropertyNames(obj)); -assertEquals("{}", JSON.stringify(obj)); +let obj2 = passObj(obj); +assertFalse(obj2 instanceof Object); +assertEquals([], Object.getOwnPropertyNames(obj2)); +assertSame(obj, obj2); // A JavaScript object can be passed as externref. -// It will not be wrapped. -obj = passObj({"hello": "world"}); -assertEquals({"hello": "world"}, obj); +let jsObject = {"hello": "world"}; +assertSame(jsObject, passObj(jsObject)); diff --git a/deps/v8/test/mjsunit/wasm/wasm-invalid-local.js b/deps/v8/test/mjsunit/wasm/wasm-invalid-local.js new file mode 100644 index 00000000000000..1bf6c1e9cd2218 --- /dev/null +++ b/deps/v8/test/mjsunit/wasm/wasm-invalid-local.js @@ -0,0 +1,19 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --experimental-wasm-gc + +d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js'); + +(function TestLocalInvalidHeapType() { + let builder = new WasmModuleBuilder(); + builder.addFunction('testEqLocal', + makeSig([], [kWasmAnyRef])) + .addLocals(wasmRefNullType(123), 1) // 123 is not a valid type index + .addBody([ + kExprRefNull, kNullRefCode, + kExprLocalSet, 0, + ]).exportFunc(); + assertThrows(() => builder.instantiate(), WebAssembly.CompileError); +})(); diff --git a/deps/v8/test/mjsunit/wasm/wasm-module-builder.js b/deps/v8/test/mjsunit/wasm/wasm-module-builder.js index 390337def08690..18928fe4be3b1b 100644 --- a/deps/v8/test/mjsunit/wasm/wasm-module-builder.js +++ b/deps/v8/test/mjsunit/wasm/wasm-module-builder.js @@ -507,11 +507,13 @@ let kExprArrayNewElem = 0x1f; let kExprI31New = 0x20; let kExprI31GetS = 0x21; let kExprI31GetU = 0x22; -let kExprRefTest = 0x44; +let kExprRefTest = 0x40; +let kExprRefTestNull = 0x48; +let kExprRefTestDeprecated = 0x44; let kExprRefCast = 0x45; let kExprBrOnCast = 0x46; let kExprBrOnCastFail = 0x47; -let kExprRefCastNop = 0x48; +let kExprRefCastNop = 0x4c; let kExprRefIsData = 0x51; let kExprRefIsI31 = 0x52; let kExprRefIsArray = 0x53; @@ -1053,9 +1055,6 @@ class Binary { } emit_init_expr(expr) { - // TODO(manoskouk): This is redundant, remove it once we are confident we - // check everything. - checkExpr(expr); this.emit_bytes(expr); this.emit_u8(kExprEnd); } diff --git a/deps/v8/test/mjsunit/web-snapshot/web-snapshot-array.js b/deps/v8/test/mjsunit/web-snapshot/web-snapshot-array.js index 04802579e6ece0..d1d021ab8a5e69 100644 --- a/deps/v8/test/mjsunit/web-snapshot/web-snapshot-array.js +++ b/deps/v8/test/mjsunit/web-snapshot/web-snapshot-array.js @@ -107,3 +107,27 @@ d8.file.execute('test/mjsunit/web-snapshot/web-snapshot-helpers.js'); // cctests. assertEquals('foobarfoo', foo.array.join('')); })(); + +(function TestArrayWithSlackElements() { + function createObjects() { + globalThis.foo = { + array: [], + doubleArray: [], + objectArray: [] + }; + for (let i = 0; i < 100; ++i) { + globalThis.foo.array.push(i); + globalThis.foo.doubleArray.push(i + 0.1); + globalThis.foo.objectArray.push({}); + } + } + const { foo } = takeAndUseWebSnapshot(createObjects, ['foo']); + assertEquals(100, foo.array.length); + assertEquals(100, foo.doubleArray.length); + assertEquals(100, foo.objectArray.length); + for (let i = 0; i < 100; ++i){ + assertEquals(i, foo.array[i]); + assertEquals(i + 0.1, foo.doubleArray[i]); + assertEquals({}, foo.objectArray[i]); + } +})(); diff --git a/deps/v8/test/mjsunit/web-snapshot/web-snapshot-object.js b/deps/v8/test/mjsunit/web-snapshot/web-snapshot-object.js index b81122a96aa60d..394bb3de55e678 100644 --- a/deps/v8/test/mjsunit/web-snapshot/web-snapshot-object.js +++ b/deps/v8/test/mjsunit/web-snapshot/web-snapshot-object.js @@ -186,3 +186,19 @@ d8.file.execute('test/mjsunit/web-snapshot/web-snapshot-helpers.js'); assertEquals(['4394967296'], Object.getOwnPropertyNames(obj)); assertEquals['lol', obj[4394967296]]; })(); + +(function TestObjectWithSlackElements() { + function createObjects() { + globalThis.foo = {}; + globalThis.bar = {}; + for (let i = 0; i < 100; ++i) { + globalThis.foo[i] = i; + globalThis.bar[i] = {}; + } + } + const { foo, bar } = takeAndUseWebSnapshot(createObjects, ['foo', 'bar']); + for (let i = 0; i < 100; ++i) { + assertEquals(i, foo[i]); + assertEquals({}, bar[i]); + } +})(); diff --git a/deps/v8/test/mkgrokdump/mkgrokdump.cc b/deps/v8/test/mkgrokdump/mkgrokdump.cc index 800964e26e174c..4e532a5ab2294c 100644 --- a/deps/v8/test/mkgrokdump/mkgrokdump.cc +++ b/deps/v8/test/mkgrokdump/mkgrokdump.cc @@ -31,7 +31,7 @@ static const char* kHeader = "# yapf: disable\n\n"; // Debug builds emit debug code, affecting code object sizes. -#ifndef DEBUG +#if !defined(DEBUG) && defined(V8_ENABLE_SANDBOX) static const char* kBuild = "shipping"; #else static const char* kBuild = "non-shipping"; @@ -102,7 +102,8 @@ static void DumpKnownObject(FILE* out, i::Heap* heap, const char* space_name, static void DumpSpaceFirstPageAddress(FILE* out, i::BaseSpace* space, i::Address first_page) { const char* name = space->name(); - i::Tagged_t compressed = i::CompressTagged(first_page); + i::Tagged_t compressed = + i::V8HeapCompressionScheme::CompressTagged(first_page); uintptr_t unsigned_compressed = static_cast<uint32_t>(compressed); i::PrintF(out, " 0x%08" V8PRIxPTR ": \"%s\",\n", unsigned_compressed, name); } diff --git a/deps/v8/test/test262/test262.status b/deps/v8/test/test262/test262.status index d7b120af66841c..529aab96afd76d 100644 --- a/deps/v8/test/test262/test262.status +++ b/deps/v8/test/test262/test262.status @@ -225,10 +225,6 @@ 'language/expressions/async-generator/generator-created-after-decl-inst': [FAIL], 'language/statements/async-generator/generator-created-after-decl-inst': [FAIL], - # https://bugs.chromium.org/p/v8/issues/detail?id=13275 - 'language/statements/async-generator/yield-star-promise-not-unwrapped': [FAIL], - 'language/statements/async-generator/yield-star-return-then-getter-ticks': [FAIL], - # https://bugs.chromium.org/p/v8/issues/detail?id=9875 'language/expressions/coalesce/tco-pos-undefined': [FAIL], 'language/expressions/coalesce/tco-pos-null': [FAIL], @@ -321,76 +317,28 @@ 'built-ins/RegExp/property-escapes/generated/strings/RGI_Emoji_ZWJ_Sequence-negative-CharacterClass': [SKIP], 'built-ins/RegExp/property-escapes/generated/strings/RGI_Emoji_ZWJ_Sequence-negative-P': [SKIP], 'built-ins/RegExp/property-escapes/generated/strings/RGI_Emoji_ZWJ_Sequence-negative-u': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-class-difference-character-class-escape': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-class-difference-character-class': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-class-difference-character': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-class-difference-character-property-escape': [SKIP], 'built-ins/RegExp/unicodeSets/generated/character-class-difference-property-of-strings-escape': [SKIP], 'built-ins/RegExp/unicodeSets/generated/character-class-difference-string-literal': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-class-escape-difference-character-class-escape': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-class-escape-difference-character-class': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-class-escape-difference-character': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-class-escape-difference-character-property-escape': [SKIP], 'built-ins/RegExp/unicodeSets/generated/character-class-escape-difference-property-of-strings-escape': [SKIP], 'built-ins/RegExp/unicodeSets/generated/character-class-escape-difference-string-literal': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-class-escape-intersection-character-class-escape': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-class-escape-intersection-character-class': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-class-escape-intersection-character': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-class-escape-intersection-character-property-escape': [SKIP], 'built-ins/RegExp/unicodeSets/generated/character-class-escape-intersection-property-of-strings-escape': [SKIP], 'built-ins/RegExp/unicodeSets/generated/character-class-escape-intersection-string-literal': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-class-escape-union-character-class-escape': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-class-escape-union-character-class': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-class-escape-union-character': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-class-escape-union-character-property-escape': [SKIP], 'built-ins/RegExp/unicodeSets/generated/character-class-escape-union-property-of-strings-escape': [SKIP], 'built-ins/RegExp/unicodeSets/generated/character-class-escape-union-string-literal': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-class-intersection-character-class-escape': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-class-intersection-character-class': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-class-intersection-character': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-class-intersection-character-property-escape': [SKIP], 'built-ins/RegExp/unicodeSets/generated/character-class-intersection-property-of-strings-escape': [SKIP], 'built-ins/RegExp/unicodeSets/generated/character-class-intersection-string-literal': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-class-union-character-class-escape': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-class-union-character-class': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-class-union-character': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-class-union-character-property-escape': [SKIP], 'built-ins/RegExp/unicodeSets/generated/character-class-union-property-of-strings-escape': [SKIP], 'built-ins/RegExp/unicodeSets/generated/character-class-union-string-literal': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-difference-character-class-escape': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-difference-character-class': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-difference-character': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-difference-character-property-escape': [SKIP], 'built-ins/RegExp/unicodeSets/generated/character-difference-property-of-strings-escape': [SKIP], 'built-ins/RegExp/unicodeSets/generated/character-difference-string-literal': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-intersection-character-class-escape': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-intersection-character-class': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-intersection-character': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-intersection-character-property-escape': [SKIP], 'built-ins/RegExp/unicodeSets/generated/character-intersection-property-of-strings-escape': [SKIP], 'built-ins/RegExp/unicodeSets/generated/character-intersection-string-literal': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-property-escape-difference-character-class-escape': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-property-escape-difference-character-class': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-property-escape-difference-character': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-property-escape-difference-character-property-escape': [SKIP], 'built-ins/RegExp/unicodeSets/generated/character-property-escape-difference-property-of-strings-escape': [SKIP], 'built-ins/RegExp/unicodeSets/generated/character-property-escape-difference-string-literal': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-property-escape-intersection-character-class-escape': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-property-escape-intersection-character-class': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-property-escape-intersection-character': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-property-escape-intersection-character-property-escape': [SKIP], 'built-ins/RegExp/unicodeSets/generated/character-property-escape-intersection-property-of-strings-escape': [SKIP], 'built-ins/RegExp/unicodeSets/generated/character-property-escape-intersection-string-literal': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-property-escape-union-character-class-escape': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-property-escape-union-character-class': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-property-escape-union-character': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-property-escape-union-character-property-escape': [SKIP], 'built-ins/RegExp/unicodeSets/generated/character-property-escape-union-property-of-strings-escape': [SKIP], 'built-ins/RegExp/unicodeSets/generated/character-property-escape-union-string-literal': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-union-character-class-escape': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-union-character-class': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-union-character': [SKIP], - 'built-ins/RegExp/unicodeSets/generated/character-union-character-property-escape': [SKIP], 'built-ins/RegExp/unicodeSets/generated/character-union-property-of-strings-escape': [SKIP], 'built-ins/RegExp/unicodeSets/generated/character-union-string-literal': [SKIP], 'built-ins/RegExp/unicodeSets/generated/property-of-strings-escape-difference-character-class-escape': [SKIP], @@ -449,116 +397,24 @@ 'intl402/Temporal/Calendar/prototype/dateFromFields/infinity-throws-rangeerror': [FAIL], 'intl402/Temporal/Calendar/prototype/monthDayFromFields/infinity-throws-rangeerror': [FAIL], 'intl402/Temporal/Calendar/prototype/yearMonthFromFields/infinity-throws-rangeerror': [FAIL], - 'intl402/Temporal/Duration/prototype/round/relativeto-string-datetime': [FAIL], 'intl402/Temporal/Duration/prototype/total/relativeto-string-datetime': [FAIL], 'intl402/Temporal/PlainYearMonth/from/argument-object': [FAIL], - 'built-ins/Temporal/Instant/prototype/round/rounding-direction': [FAIL], - 'built-ins/Temporal/Instant/prototype/toString/rounding-direction': [FAIL], - 'built-ins/Temporal/PlainDateTime/prototype/since/roundingmode-ceil-basic': [FAIL], - 'built-ins/Temporal/PlainDateTime/prototype/since/roundingmode-floor-basic': [FAIL], - 'built-ins/Temporal/PlainDateTime/prototype/until/roundingmode-ceil-basic': [FAIL], - 'built-ins/Temporal/PlainDateTime/prototype/until/roundingmode-floor-basic': [FAIL], - 'built-ins/Temporal/ZonedDateTime/prototype/toString/rounding-direction': [FAIL], 'intl402/Temporal/PlainDateTime/prototype/withPlainDate/argument-string-calendar': [FAIL], - # https://github.com/tc39/test262/issues/3553 - 'built-ins/Temporal/Calendar/from/calendar-number': [FAIL], - 'built-ins/Temporal/Calendar/prototype/dateAdd/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/Calendar/prototype/dateUntil/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/Calendar/prototype/day/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/Calendar/prototype/dayOfWeek/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/Calendar/prototype/dayOfYear/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/Calendar/prototype/daysInMonth/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/Calendar/prototype/daysInWeek/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/Calendar/prototype/daysInYear/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/Calendar/prototype/inLeapYear/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/Calendar/prototype/month/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/Calendar/prototype/monthCode/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/Calendar/prototype/monthsInYear/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/Calendar/prototype/weekOfYear/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/Calendar/prototype/year/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/PlainDate/compare/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/PlainDate/from/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/PlainDate/prototype/equals/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/PlainDate/prototype/since/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/PlainDate/prototype/until/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/PlainDateTime/compare/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/PlainDateTime/from/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/PlainDateTime/prototype/equals/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/PlainDateTime/prototype/since/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/PlainDateTime/prototype/until/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/PlainDateTime/prototype/withPlainDate/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/PlainMonthDay/from/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/PlainMonthDay/prototype/equals/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/PlainTime/prototype/toPlainDateTime/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/PlainTime/prototype/toZonedDateTime/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/PlainYearMonth/compare/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/PlainYearMonth/from/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/PlainYearMonth/prototype/equals/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/PlainYearMonth/prototype/since/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/PlainYearMonth/prototype/until/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/TimeZone/prototype/getInstantFor/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/TimeZone/prototype/getPossibleInstantsFor/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/ZonedDateTime/compare/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/ZonedDateTime/from/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/ZonedDateTime/prototype/equals/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/ZonedDateTime/prototype/since/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/ZonedDateTime/prototype/until/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/ZonedDateTime/prototype/withPlainDate/argument-propertybag-calendar-number': [FAIL], - 'intl402/Temporal/Calendar/prototype/era/argument-propertybag-calendar-number': [FAIL], - 'intl402/Temporal/Calendar/prototype/eraYear/argument-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/Instant/prototype/toZonedDateTime/calendar-number': [FAIL], - 'built-ins/Temporal/Now/plainDate/calendar-number': [FAIL], - 'built-ins/Temporal/Now/plainDateTime/calendar-number': [FAIL], - 'built-ins/Temporal/Now/zonedDateTime/calendar-number': [FAIL], - 'built-ins/Temporal/PlainDate/calendar-number': [FAIL], - 'built-ins/Temporal/PlainDate/prototype/withCalendar/calendar-number': [FAIL], - 'built-ins/Temporal/PlainDateTime/calendar-number': [FAIL], - 'built-ins/Temporal/PlainDateTime/prototype/withCalendar/calendar-number': [FAIL], - 'built-ins/Temporal/PlainMonthDay/calendar-number': [FAIL], - 'built-ins/Temporal/PlainYearMonth/calendar-number': [FAIL], - 'built-ins/Temporal/TimeZone/prototype/getPlainDateTimeFor/calendar-number': [FAIL], - 'built-ins/Temporal/ZonedDateTime/calendar-number': [FAIL], - 'built-ins/Temporal/ZonedDateTime/prototype/withCalendar/calendar-number': [FAIL], - 'built-ins/Temporal/Duration/prototype/add/relativeto-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/Duration/prototype/round/relativeto-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/Duration/prototype/subtract/relativeto-propertybag-calendar-number': [FAIL], - 'built-ins/Temporal/Duration/prototype/total/relativeto-propertybag-calendar-number': [FAIL], - 'intl402/Temporal/Calendar/prototype/dateFromFields/order-of-operations': [FAIL], 'intl402/Temporal/Calendar/prototype/monthDayFromFields/order-of-operations': [FAIL], 'intl402/Temporal/Calendar/prototype/yearMonthFromFields/order-of-operations': [FAIL], - 'intl402/Temporal/Duration/compare/relativeto-hour': [FAIL], 'built-ins/Temporal/Duration/prototype/total/relativeto-zoneddatetime-with-fractional-days-different-sign': [FAIL], 'built-ins/Temporal/Duration/prototype/total/relativeto-zoneddatetime-with-fractional-days': [FAIL], - 'intl402/Temporal/TimeZone/prototype/getNextTransition/subtract-second-and-nanosecond-from-last-transition': [FAIL], - 'intl402/Temporal/TimeZone/prototype/getPreviousTransition/nanoseconds-subtracted-or-added-at-dst-transition': [FAIL], - 'intl402/Temporal/TimeZone/prototype/getOffsetNanosecondsFor/nanoseconds-subtracted-or-added-at-dst-transition': [FAIL], - 'intl402/Temporal/TimeZone/prototype/getPlainDateTimeFor/dst': [FAIL], - 'staging/Temporal/Duration/old/add': [FAIL], - 'staging/Temporal/Duration/old/limits': [FAIL], 'staging/Temporal/Duration/old/round': [FAIL], - 'staging/Temporal/Duration/old/subtract': [FAIL], - 'staging/Temporal/Duration/old/toString': [FAIL], 'staging/Temporal/Duration/old/total': [FAIL], - 'staging/Temporal/Regex/old/plaintime': [FAIL], - 'staging/Temporal/Regex/old/timezone': [FAIL], - 'staging/Temporal/TimeZone/old/subminute-offset': [FAIL], 'staging/Temporal/ZonedDateTime/old/construction-and-properties': [FAIL], 'staging/Temporal/ZonedDateTime/old/dst-math': [FAIL], - 'staging/Temporal/ZonedDateTime/old/dst-properties': [FAIL], 'staging/Temporal/ZonedDateTime/old/equals': [FAIL], - 'staging/Temporal/ZonedDateTime/old/property-bags': [FAIL], - 'staging/Temporal/ZonedDateTime/old/round': [FAIL], - 'staging/Temporal/ZonedDateTime/old/since': [FAIL], - 'staging/Temporal/ZonedDateTime/old/string-parsing': [FAIL], 'staging/Temporal/ZonedDateTime/old/toPlainMonthDay': [FAIL], 'staging/Temporal/ZonedDateTime/old/toPlainYearMonth': [FAIL], - 'staging/Temporal/ZonedDateTime/old/toString': [FAIL], - 'staging/Temporal/ZonedDateTime/old/until': [FAIL], - 'staging/Temporal/ZonedDateTime/old/with': [FAIL], # UBSan complain about static_cast<int32_t> from double in AddISODate() 'built-ins/Temporal/Calendar/prototype/dateAdd/argument-duration-years-and-months-number-max-value': [SKIP], @@ -578,64 +434,175 @@ 'staging/Intl402/Temporal/old/yearmonth-toLocaleString': [FAIL], 'staging/Intl402/Temporal/old/zoneddatetime-toLocaleString': [FAIL], - # https://github.com/tc39/proposal-intl-numberformat-v3/pull/107 - 'intl402/NumberFormat/test-option-useGrouping': [FAIL], + 'built-ins/Temporal/Calendar/from/calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/Calendar/prototype/dateAdd/argument-propertybag-calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/Calendar/prototype/day/argument-propertybag-calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/Calendar/prototype/dayOfWeek/argument-propertybag-calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/Calendar/prototype/dayOfYear/argument-propertybag-calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/Calendar/prototype/daysInMonth/argument-propertybag-calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/Calendar/prototype/daysInWeek/argument-propertybag-calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/Calendar/prototype/daysInYear/argument-propertybag-calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/Calendar/prototype/inLeapYear/argument-propertybag-calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/Calendar/prototype/month/argument-propertybag-calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/Calendar/prototype/monthCode/argument-propertybag-calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/Calendar/prototype/monthsInYear/argument-propertybag-calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/Calendar/prototype/weekOfYear/argument-propertybag-calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/Calendar/prototype/year/argument-propertybag-calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/Instant/prototype/toZonedDateTime/calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/Now/plainDate/calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/Now/plainDateTime/calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/Now/zonedDateTime/calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/PlainDate/calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/PlainDate/from/argument-propertybag-calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/PlainDate/prototype/equals/argument-propertybag-calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/PlainDate/prototype/since/argument-propertybag-calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/PlainDate/prototype/until/argument-propertybag-calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/PlainDate/prototype/withCalendar/calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/PlainDateTime/calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/PlainDateTime/from/argument-propertybag-calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/PlainDateTime/prototype/equals/argument-propertybag-calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/PlainDateTime/prototype/since/argument-propertybag-calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/PlainDateTime/prototype/until/argument-propertybag-calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/PlainDateTime/prototype/withCalendar/calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/PlainDateTime/prototype/withPlainDate/argument-propertybag-calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/PlainMonthDay/calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/PlainMonthDay/from/argument-propertybag-calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/PlainMonthDay/prototype/equals/argument-propertybag-calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/PlainTime/prototype/toPlainDateTime/argument-propertybag-calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/PlainTime/prototype/toZonedDateTime/argument-propertybag-calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/PlainYearMonth/calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/PlainYearMonth/from/argument-propertybag-calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/PlainYearMonth/prototype/equals/argument-propertybag-calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/PlainYearMonth/prototype/since/argument-propertybag-calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/PlainYearMonth/prototype/until/argument-propertybag-calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/TimeZone/prototype/getInstantFor/argument-propertybag-calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/TimeZone/prototype/getPlainDateTimeFor/calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/TimeZone/prototype/getPossibleInstantsFor/argument-propertybag-calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/ZonedDateTime/calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/ZonedDateTime/from/argument-propertybag-calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/ZonedDateTime/prototype/equals/argument-propertybag-calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/ZonedDateTime/prototype/since/argument-propertybag-calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/ZonedDateTime/prototype/until/argument-propertybag-calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/ZonedDateTime/prototype/withCalendar/calendar-case-insensitive': [FAIL], + 'built-ins/Temporal/ZonedDateTime/prototype/withPlainDate/argument-propertybag-calendar-case-insensitive': [FAIL], + # intl402 + 'intl402/Temporal/Calendar/calendar-case-insensitive': [FAIL], + 'intl402/Temporal/Calendar/from/calendar-case-insensitive': [FAIL], + 'intl402/Temporal/Calendar/prototype/era/argument-propertybag-calendar-case-insensitive': [FAIL], + 'intl402/Temporal/Calendar/prototype/eraYear/argument-propertybag-calendar-case-insensitive': [FAIL], + 'intl402/Temporal/TimeZone/from/timezone-case-insensitive': [FAIL], + + 'built-ins/Temporal/Duration/compare/precision-exact-mathematical-values-1': [FAIL], + 'built-ins/Temporal/Duration/compare/precision-exact-mathematical-values-2': [FAIL], + 'built-ins/Temporal/Duration/prototype/round/precision-exact-in-balance-duration-relative-months': [FAIL], + 'built-ins/Temporal/Duration/prototype/round/precision-exact-in-balance-duration-relative-weeks': [FAIL], + 'built-ins/Temporal/Duration/prototype/round/precision-exact-in-balance-duration-relative-years-days': [FAIL], + 'built-ins/Temporal/Duration/prototype/round/precision-exact-in-balance-duration-relative-years-months': [FAIL], + 'built-ins/Temporal/Duration/prototype/round/precision-exact-in-balance-duration-relative-years-with-calendar': [FAIL], + 'built-ins/Temporal/Duration/prototype/round/precision-exact-in-round-duration': [FAIL], + 'built-ins/Temporal/Duration/prototype/round/roundingmode-ceil': [FAIL], + 'built-ins/Temporal/Duration/prototype/round/roundingmode-floor': [FAIL], + 'built-ins/Temporal/Duration/prototype/round/roundingmode-trunc': [FAIL], + 'built-ins/Temporal/PlainDateTime/prototype/since/roundingmode-ceil': [FAIL], + 'built-ins/Temporal/PlainDateTime/prototype/since/roundingmode-expand': [FAIL], + 'built-ins/Temporal/PlainDateTime/prototype/since/roundingmode-floor': [FAIL], + 'built-ins/Temporal/PlainDateTime/prototype/until/roundingmode-ceil': [FAIL], + 'built-ins/Temporal/PlainDateTime/prototype/until/roundingmode-expand': [FAIL], + 'built-ins/Temporal/PlainDateTime/prototype/until/roundingmode-floor': [FAIL], + 'built-ins/Temporal/PlainTime/prototype/add/precision-exact-mathematical-values-1': [FAIL], + 'built-ins/Temporal/PlainTime/prototype/add/precision-exact-mathematical-values-2': [FAIL], + 'built-ins/Temporal/PlainTime/prototype/add/precision-exact-mathematical-values-3': [FAIL], + 'built-ins/Temporal/PlainTime/prototype/subtract/precision-exact-mathematical-values-1': [FAIL], + 'built-ins/Temporal/PlainTime/prototype/subtract/precision-exact-mathematical-values-2': [FAIL], + 'built-ins/Temporal/PlainTime/prototype/subtract/precision-exact-mathematical-values-3': [FAIL], + 'built-ins/Temporal/ZonedDateTime/prototype/since/roundingmode-ceil': [FAIL], + 'built-ins/Temporal/ZonedDateTime/prototype/since/roundingmode-expand': [FAIL], + 'built-ins/Temporal/ZonedDateTime/prototype/since/roundingmode-floor': [FAIL], + 'built-ins/Temporal/ZonedDateTime/prototype/until/roundingmode-ceil': [FAIL], + 'built-ins/Temporal/ZonedDateTime/prototype/until/roundingmode-expand': [FAIL], + 'built-ins/Temporal/ZonedDateTime/prototype/until/roundingmode-floor': [FAIL], + 'intl402/Temporal/ZonedDateTime/prototype/withCalendar/calendar-case-insensitive': [FAIL], + + # https://bugs.chromium.org/p/v8/issues/detail?id=13342 + 'built-ins/RegExp/property-escapes/generated/Alphabetic': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Assigned': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Cased': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Case_Ignorable': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Changes_When_NFKC_Casefolded': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Diacritic': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Emoji': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Emoji_Modifier_Base': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Emoji_Presentation': [FAIL], + 'built-ins/RegExp/property-escapes/generated/General_Category_-_Cased_Letter': [FAIL], + 'built-ins/RegExp/property-escapes/generated/General_Category_-_Decimal_Number': [FAIL], + 'built-ins/RegExp/property-escapes/generated/General_Category_-_Format': [FAIL], + 'built-ins/RegExp/property-escapes/generated/General_Category_-_Letter': [FAIL], + 'built-ins/RegExp/property-escapes/generated/General_Category_-_Lowercase_Letter': [FAIL], + 'built-ins/RegExp/property-escapes/generated/General_Category_-_Mark': [FAIL], + 'built-ins/RegExp/property-escapes/generated/General_Category_-_Modifier_Letter': [FAIL], + 'built-ins/RegExp/property-escapes/generated/General_Category_-_Nonspacing_Mark': [FAIL], + 'built-ins/RegExp/property-escapes/generated/General_Category_-_Number': [FAIL], + 'built-ins/RegExp/property-escapes/generated/General_Category_-_Other': [FAIL], + 'built-ins/RegExp/property-escapes/generated/General_Category_-_Other_Letter': [FAIL], + 'built-ins/RegExp/property-escapes/generated/General_Category_-_Other_Number': [FAIL], + 'built-ins/RegExp/property-escapes/generated/General_Category_-_Other_Punctuation': [FAIL], + 'built-ins/RegExp/property-escapes/generated/General_Category_-_Other_Symbol': [FAIL], + 'built-ins/RegExp/property-escapes/generated/General_Category_-_Punctuation': [FAIL], + 'built-ins/RegExp/property-escapes/generated/General_Category_-_Spacing_Mark': [FAIL], + 'built-ins/RegExp/property-escapes/generated/General_Category_-_Symbol': [FAIL], + 'built-ins/RegExp/property-escapes/generated/General_Category_-_Unassigned': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Grapheme_Base': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Grapheme_Extend': [FAIL], + 'built-ins/RegExp/property-escapes/generated/ID_Continue': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Ideographic': [FAIL], + 'built-ins/RegExp/property-escapes/generated/ID_Start': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Lowercase': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Script_-_Arabic': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Script_-_Common': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Script_-_Cyrillic': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Script_-_Devanagari': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Script_-_Egyptian_Hieroglyphs': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Script_Extensions_-_Arabic': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Script_Extensions_-_Common': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Script_Extensions_-_Cyrillic': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Script_Extensions_-_Devanagari': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Script_Extensions_-_Egyptian_Hieroglyphs': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Script_Extensions_-_Han': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Script_Extensions_-_Hiragana': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Script_Extensions_-_Kannada': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Script_Extensions_-_Katakana': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Script_Extensions_-_Kawi': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Script_Extensions_-_Khojki': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Script_Extensions_-_Lao': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Script_Extensions_-_Latin': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Script_Extensions_-_Nag_Mundari': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Script_-_Han': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Script_-_Hiragana': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Script_-_Kannada': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Script_-_Katakana': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Script_-_Kawi': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Script_-_Khojki': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Script_-_Lao': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Script_-_Latin': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Script_-_Nag_Mundari': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Sentence_Terminal': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Soft_Dotted': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Terminal_Punctuation': [FAIL], + 'built-ins/RegExp/property-escapes/generated/Unified_Ideograph': [FAIL], + 'built-ins/RegExp/property-escapes/generated/XID_Continue': [FAIL], + 'built-ins/RegExp/property-escapes/generated/XID_Start': [FAIL], + 'language/identifiers/part-unicode-15.0.0-class-escaped': [FAIL], + 'language/identifiers/part-unicode-15.0.0-class': [FAIL], + 'language/identifiers/part-unicode-15.0.0-escaped': [FAIL], + 'language/identifiers/part-unicode-15.0.0': [FAIL], + 'language/identifiers/start-unicode-15.0.0-class-escaped': [FAIL], + 'language/identifiers/start-unicode-15.0.0-class': [FAIL], + 'language/identifiers/start-unicode-15.0.0-escaped': [FAIL], + 'language/identifiers/start-unicode-15.0.0': [FAIL], # https://bugs.chromium.org/p/v8/issues/detail?id=11660 - 'intl402/DurationFormat/prototype/prototype_attributes': [FAIL], - 'intl402/DurationFormat/prototype/constructor/prop-desc': [FAIL], - 'intl402/DurationFormat/prototype/constructor/value': [FAIL], - 'intl402/DurationFormat/prototype/format/length': [FAIL], - 'intl402/DurationFormat/prototype/format/name': [FAIL], - 'intl402/DurationFormat/prototype/format/prop-desc': [FAIL], - 'intl402/DurationFormat/prototype/format/throw-invoked-as-func': [FAIL], - 'intl402/DurationFormat/prototype/formatToParts/length': [FAIL], - 'intl402/DurationFormat/prototype/formatToParts/name': [FAIL], - 'intl402/DurationFormat/prototype/formatToParts/prop-desc': [FAIL], - 'intl402/DurationFormat/prototype/formatToParts/throw-invoked-as-func': [FAIL], - 'intl402/DurationFormat/prototype/resolvedOptions/length': [FAIL], - 'intl402/DurationFormat/prototype/resolvedOptions/name': [FAIL], - 'intl402/DurationFormat/prototype/resolvedOptions/prop-desc': [FAIL], - 'intl402/DurationFormat/prototype/resolvedOptions/throw-invoked-as-func': [FAIL], - 'intl402/DurationFormat/prototype/toStringTag/toString': [FAIL], - 'intl402/DurationFormat/prototype/toStringTag/toStringTag': [FAIL], - 'intl402/DurationFormat/constructor-locales-invalid': [FAIL], - 'intl402/DurationFormat/constructor-locales-valid': [FAIL], - 'intl402/DurationFormat/constructor-options-defaults': [FAIL], - 'intl402/DurationFormat/constructor-options-fractionalDigits-invalid': [FAIL], - 'intl402/DurationFormat/constructor-options-fractionalDigits-valid': [FAIL], - 'intl402/DurationFormat/constructor-options-invalid': [FAIL], - 'intl402/DurationFormat/constructor-options-localeMatcher-invalid': [FAIL], - 'intl402/DurationFormat/constructor-options-localeMatcher-valid': [FAIL], - 'intl402/DurationFormat/constructor-options-numberingSystem-invalid': [FAIL], - 'intl402/DurationFormat/constructor-options-numberingSystem-valid': [FAIL], - 'intl402/DurationFormat/constructor-options-order': [FAIL], - 'intl402/DurationFormat/constructor-options-style-invalid': [FAIL], - 'intl402/DurationFormat/constructor-options-style-valid': [FAIL], - 'intl402/DurationFormat/extensibility': [FAIL], - 'intl402/DurationFormat/length': [FAIL], - 'intl402/DurationFormat/name': [FAIL], - 'intl402/DurationFormat/newtarget-undefined': [FAIL], - 'intl402/DurationFormat/prop-desc': [FAIL], - 'intl402/DurationFormat/prototype': [FAIL], - 'intl402/DurationFormat/supportedLocalesOf/basic': [FAIL], - 'intl402/DurationFormat/supportedLocalesOf/branding': [FAIL], - 'intl402/DurationFormat/supportedLocalesOf/length': [FAIL], - 'intl402/DurationFormat/supportedLocalesOf/locales-empty': [FAIL], - 'intl402/DurationFormat/supportedLocalesOf/locales-invalid': [FAIL], - 'intl402/DurationFormat/supportedLocalesOf/locales-specific': [FAIL], - 'intl402/DurationFormat/supportedLocalesOf/name': [FAIL], - 'intl402/DurationFormat/supportedLocalesOf/prop-desc': [FAIL], - 'intl402/DurationFormat/prototype/format/invalid-negative-duration-throws': [FAIL], - 'intl402/DurationFormat/prototype/formatToParts/invalid-negative-duration-throws': [FAIL], - 'intl402/DurationFormat/prototype/format/basic-format-en': [FAIL], - 'intl402/DurationFormat/prototype/format/branding': [FAIL], - 'intl402/DurationFormat/prototype/format/invalid-arguments-throws': [FAIL], - 'intl402/DurationFormat/prototype/format/not-a-constructor': [FAIL], + # https://github.com/tc39/proposal-intl-duration-format/issues/114 'intl402/DurationFormat/prototype/format/style-options-en': [FAIL], - 'intl402/DurationFormat/prototype/formatToParts/branding': [FAIL], - 'intl402/DurationFormat/prototype/formatToParts/invalid-arguments-throws': [FAIL], - 'intl402/DurationFormat/prototype/formatToParts/not-a-constructor': [FAIL], # https://bugs.chromium.org/p/v8/issues/detail?id=12763 'language/expressions/class/decorator/syntax/class-valid/decorator-member-expr-private-identifier': [FAIL], @@ -679,9 +646,6 @@ 'built-ins/Date/prototype/setUTCMonth/arg-coercion-order': [FAIL], 'built-ins/Date/prototype/setUTCSeconds/arg-coercion-order': [FAIL], - # https://bugs.chromium.org/p/v8/issues/detail?id=12044 - 'built-ins/Array/prototype/Symbol.unscopables/array-grouping': [FAIL], - # https://bugs.chromium.org/p/v8/issues/detail?id=12681 'built-ins/Array/prototype/push/set-length-zero-array-length-is-non-writable': [FAIL], @@ -793,6 +757,27 @@ 'built-ins/RegExp/named-groups/unicode-property-names-valid': [SKIP], 'built-ins/RegExp/named-groups/non-unicode-property-names-valid': [FAIL], 'built-ins/RegExp/match-indices/indices-array-unicode-property-names': [SKIP], + 'built-ins/RegExp/unicodeSets/generated/character-class-difference-character-property-escape': [PASS,FAIL], + 'built-ins/RegExp/unicodeSets/generated/character-class-escape-difference-character-property-escape': [PASS,FAIL], + 'built-ins/RegExp/unicodeSets/generated/character-class-escape-union-character-property-escape': [PASS,FAIL], + 'built-ins/RegExp/unicodeSets/generated/character-class-escape-intersection-character-property-escape': [PASS,FAIL], + 'built-ins/RegExp/unicodeSets/generated/character-class-intersection-character-property-escape': [PASS,FAIL], + 'built-ins/RegExp/unicodeSets/generated/character-class-union-character-property-escape': [PASS,FAIL], + 'built-ins/RegExp/unicodeSets/generated/character-difference-character-property-escape': [PASS,FAIL], + 'built-ins/RegExp/unicodeSets/generated/character-intersection-character-property-escape': [PASS,FAIL], + 'built-ins/RegExp/unicodeSets/generated/character-property-escape-intersection-character-class-escape': [PASS,FAIL], + 'built-ins/RegExp/unicodeSets/generated/character-property-escape-intersection-character': [PASS,FAIL], + 'built-ins/RegExp/unicodeSets/generated/character-property-escape-difference-character-class-escape': [PASS,FAIL], + 'built-ins/RegExp/unicodeSets/generated/character-property-escape-difference-character-class': [PASS,FAIL], + 'built-ins/RegExp/unicodeSets/generated/character-property-escape-difference-character': [PASS,FAIL], + 'built-ins/RegExp/unicodeSets/generated/character-property-escape-difference-character-property-escape': [PASS,FAIL], + 'built-ins/RegExp/unicodeSets/generated/character-property-escape-union-character': [PASS,FAIL], + 'built-ins/RegExp/unicodeSets/generated/character-property-escape-intersection-character-class': [PASS,FAIL], + 'built-ins/RegExp/unicodeSets/generated/character-property-escape-union-character-class-escape': [PASS,FAIL], + 'built-ins/RegExp/unicodeSets/generated/character-property-escape-union-character-property-escape': [PASS,FAIL], + 'built-ins/RegExp/unicodeSets/generated/character-union-character-property-escape': [PASS,FAIL], + 'built-ins/RegExp/unicodeSets/generated/character-property-escape-union-character-class': [PASS,FAIL], + 'built-ins/RegExp/unicodeSets/generated/character-property-escape-intersection-character-property-escape': [PASS,FAIL], # Unicode in identifiers. 'language/identifiers/part-unicode-*': [FAIL], @@ -805,8 +790,11 @@ # Temporal staging test which use timeZone other than "UTC" or # calendar other than "iso8601" which are not supported in no i18n mode. + 'staging/Temporal/Duration/old/add': [FAIL], + 'staging/Temporal/Duration/old/subtract': [FAIL], 'staging/Temporal/Instant/old/toZonedDateTime': [FAIL], 'staging/Temporal/Instant/old/toZonedDateTimeISO': [FAIL], + 'staging/Temporal/Regex/old/timezone': [FAIL], 'staging/Temporal/TimeZone/old/dst-change': [FAIL], 'staging/Temporal/TimeZone/old/getInstantFor': [FAIL], 'staging/Temporal/TimeZone/old/getInstantFor-disambiguation': [FAIL], @@ -820,13 +808,20 @@ 'staging/Temporal/ZonedDateTime/old/add': [FAIL], 'staging/Temporal/ZonedDateTime/old/compare': [FAIL], 'staging/Temporal/ZonedDateTime/old/date-time-hours-overflow': [FAIL], + 'staging/Temporal/ZonedDateTime/old/dst-properties': [FAIL], 'staging/Temporal/ZonedDateTime/old/order-of-operations': [FAIL], + 'staging/Temporal/ZonedDateTime/old/property-bags': [FAIL], 'staging/Temporal/ZonedDateTime/old/reversibility-of-differences': [FAIL], + 'staging/Temporal/ZonedDateTime/old/round': [FAIL], 'staging/Temporal/ZonedDateTime/old/since': [FAIL], + 'staging/Temporal/ZonedDateTime/old/string-parsing': [FAIL], 'staging/Temporal/ZonedDateTime/old/subtract': [FAIL], 'staging/Temporal/ZonedDateTime/old/toInstant': [FAIL], 'staging/Temporal/ZonedDateTime/old/toPlainDate': [FAIL], 'staging/Temporal/ZonedDateTime/old/toPlainTime': [FAIL], + 'staging/Temporal/ZonedDateTime/old/toString': [FAIL], + 'staging/Temporal/ZonedDateTime/old/until': [FAIL], + 'staging/Temporal/ZonedDateTime/old/with': [FAIL], 'staging/Temporal/ZonedDateTime/old/withCalendar': [FAIL], 'staging/Temporal/ZonedDateTime/old/withPlainDate': [FAIL], 'staging/Temporal/ZonedDateTime/old/withPlainTime': [FAIL], diff --git a/deps/v8/test/test262/testcfg.py b/deps/v8/test/test262/testcfg.py index 6d72acbe4e5c0b..f0dab8ae29eb1d 100644 --- a/deps/v8/test/test262/testcfg.py +++ b/deps/v8/test/test262/testcfg.py @@ -41,7 +41,8 @@ # TODO(littledan): move the flag mapping into the status file FEATURE_FLAGS = { - 'Intl.NumberFormat-v3': '--harmony_intl_number_format_v3', + 'Intl.NumberFormat-v3': '--harmony-intl-number-format-v3', + 'Intl.DurationFormat': '--harmony-intl-duration-format', 'Symbol.prototype.description': '--harmony-symbol-description', 'FinalizationRegistry': '--harmony-weak-refs-with-cleanup-some', 'WeakRef': '--harmony-weak-refs-with-cleanup-some', @@ -50,9 +51,10 @@ 'import-assertions': '--harmony-import-assertions', 'resizable-arraybuffer': '--harmony-rab-gsab', 'Temporal': '--harmony-temporal', - 'array-find-from-last': '--harmony_array_find_last', + 'array-find-from-last': '--harmony-array-find-last', 'ShadowRealm': '--harmony-shadow-realm', 'regexp-v-flag': '--harmony-regexp-unicode-sets', + 'array-grouping': '--harmony-array-grouping', } SKIPPED_FEATURES = set([]) diff --git a/deps/v8/test/unittests/BUILD.gn b/deps/v8/test/unittests/BUILD.gn index f5fa21f23ab478..3ed05d219da6b3 100644 --- a/deps/v8/test/unittests/BUILD.gn +++ b/deps/v8/test/unittests/BUILD.gn @@ -59,6 +59,7 @@ v8_source_set("v8_heap_base_unittests_sources") { sources = [ "heap/base/active-system-pages-unittest.cc", + "heap/base/basic-slot-set-unittest.cc", "heap/base/worklist-unittest.cc", ] @@ -238,6 +239,7 @@ v8_source_set("unittests_sources") { "api/access-check-unittest.cc", "api/accessor-unittest.cc", "api/api-icu-unittest.cc", + "api/context-unittest.cc", "api/deserialize-unittest.cc", "api/exception-unittest.cc", "api/gc-callbacks-unittest.cc", @@ -348,6 +350,7 @@ v8_source_set("unittests_sources") { "compiler/regalloc/mid-tier-register-allocator-unittest.cc", "compiler/regalloc/move-optimizer-unittest.cc", "compiler/regalloc/register-allocator-unittest.cc", + "compiler/run-bytecode-graph-builder-unittest.cc", "compiler/run-deopt-unittest.cc", "compiler/run-jsbranches-unittest.cc", "compiler/run-jscalls-unittest.cc", @@ -409,7 +412,6 @@ v8_source_set("unittests_sources") { "heap/local-factory-unittest.cc", "heap/local-handles-unittest.cc", "heap/local-heap-unittest.cc", - "heap/marking-inner-pointer-resolution-unittest.cc", "heap/marking-unittest.cc", "heap/marking-worklist-unittest.cc", "heap/memory-reducer-unittest.cc", @@ -582,6 +584,14 @@ v8_source_set("unittests_sources") { sources += [ "heap/object-start-bitmap-unittest.cc" ] } + if (v8_enable_inner_pointer_resolution_mb) { + sources += [ "heap/marking-inner-pointer-resolution-unittest.cc" ] + } + + if (v8_enable_conservative_stack_scanning) { + sources += [ "heap/conservative-stack-visitor-unittest.cc" ] + } + if (v8_enable_i18n_support) { defines = [ "V8_INTL_SUPPORT" ] public_deps = [ "//third_party/icu" ] diff --git a/deps/v8/test/unittests/api/accessor-unittest.cc b/deps/v8/test/unittests/api/accessor-unittest.cc index ffddb2da1acd3c..8cd8a6fccffe50 100644 --- a/deps/v8/test/unittests/api/accessor-unittest.cc +++ b/deps/v8/test/unittests/api/accessor-unittest.cc @@ -33,7 +33,7 @@ static void UnreachableCallback( TEST_F(AccessorTest, CachedAccessor) { // TurboFan support for fast accessors is not implemented; turbofanned // code uses the slow accessor which breaks this test's expectations. - v8::internal::FLAG_always_turbofan = false; + i::v8_flags.always_turbofan = false; v8::Isolate* isolate = context()->GetIsolate(); v8::HandleScope scope(isolate); @@ -78,8 +78,8 @@ TEST_F(AccessorTest, CachedAccessor) { } TEST_F(AccessorTest, CachedAccessorTurboFan) { - i::FLAG_allow_natives_syntax = true; - // v8::internal::FLAG_always_turbofan = false; + i::v8_flags.allow_natives_syntax = true; + // i::v8_flags.always_turbofan = false; v8::Isolate* isolate = context()->GetIsolate(); v8::HandleScope scope(isolate); @@ -162,7 +162,7 @@ TEST_F(AccessorTest, CachedAccessorTurboFan) { } TEST_F(AccessorTest, CachedAccessorOnGlobalObject) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; v8::HandleScope scope(isolate()); v8::Local<v8::FunctionTemplate> templ = v8::FunctionTemplate::New(isolate()); @@ -845,7 +845,7 @@ v8::MaybeLocal<v8::Context> TestHostCreateShadowRealmContextCallback( } // namespace TEST_F(AccessorTest, WrapFunctionTemplateSetNativeDataProperty) { - i::FLAG_harmony_shadow_realm = true; + i::v8_flags.harmony_shadow_realm = true; isolate()->SetHostCreateShadowRealmContextCallback( TestHostCreateShadowRealmContextCallback); diff --git a/deps/v8/test/unittests/api/api-wasm-unittest.cc b/deps/v8/test/unittests/api/api-wasm-unittest.cc index 6a3d363b62f780..04f52549486079 100644 --- a/deps/v8/test/unittests/api/api-wasm-unittest.cc +++ b/deps/v8/test/unittests/api/api-wasm-unittest.cc @@ -104,6 +104,13 @@ void WasmStreamingCallbackTestOnBytesReceived( streaming->OnBytesReceived(bytes, arraysize(bytes)); } +void WasmStreamingMoreFunctionsCanBeSerializedCallback( + const FunctionCallbackInfo<Value>& args) { + std::shared_ptr<WasmStreaming> streaming = + WasmStreaming::Unpack(args.GetIsolate(), args.Data()); + streaming->SetMoreFunctionsCanBeSerializedCallback([](CompiledWasmModule) {}); +} + TEST_F(ApiWasmTest, WasmStreamingCallback) { TestWasmStreaming(WasmStreamingCallbackTestCallbackIsCalled, Promise::kPending); @@ -144,6 +151,11 @@ TEST_F(ApiWasmTest, WasmCompileToWasmModuleObject) { CHECK(!maybe_module.IsEmpty()); } +TEST_F(ApiWasmTest, WasmStreamingSetCallback) { + TestWasmStreaming(WasmStreamingMoreFunctionsCanBeSerializedCallback, + Promise::kPending); +} + namespace { bool wasm_simd_enabled_value = false; diff --git a/deps/v8/test/unittests/api/context-unittest.cc b/deps/v8/test/unittests/api/context-unittest.cc new file mode 100644 index 00000000000000..e144a60ca42ba1 --- /dev/null +++ b/deps/v8/test/unittests/api/context-unittest.cc @@ -0,0 +1,96 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "include/libplatform/libplatform.h" +#include "include/v8-context.h" +#include "include/v8-data.h" +#include "include/v8-isolate.h" +#include "include/v8-local-handle.h" +#include "include/v8-value.h" +#include "test/unittests/test-utils.h" +#include "testing/gtest/include/gtest/gtest.h" + +using ContextTest = v8::TestWithIsolate; + +TEST_F(ContextTest, HasTemplateLiteralObjectBasic) { + v8::Local<v8::Context> context = v8::Context::New(isolate()); + v8::Context::Scope scope(context); + ASSERT_FALSE( + context->HasTemplateLiteralObject(v8::Number::New(isolate(), 1))); + ASSERT_FALSE(context->HasTemplateLiteralObject(v8::String::Empty(isolate()))); + ASSERT_FALSE( + context->HasTemplateLiteralObject(v8::Array::New(isolate(), 10))); +} + +TEST_F(ContextTest, HasTemplateLiteralObject) { + const char* source = R"( + function ret(literal) { + return literal; + }; + ret`one_${'two'}_three`; + )"; + const char* otherObject1Source = R"( + Object.freeze( + Object.defineProperty(['one_', '_three'], 'raw', { + value: ['asdf'], + writable: false, + enumerable: false, + configurable: false, + }) + ); + )"; + const char* otherObject2Source = R"( + Object.freeze( + Object.defineProperty(['one_', '_three'], 'raw', { + get() { return ['asdf']; }, + enumerable: false, + configurable: false, + }) + ); + )"; + + v8::Local<v8::Context> context1 = v8::Context::New(isolate()); + v8::Local<v8::Value> templateLiteral1; + v8::Local<v8::Value> templateLiteral1_2; + v8::Local<v8::Value> otherObject1_ctx1; + v8::Local<v8::Value> otherObject2_ctx1; + { + v8::Context::Scope scope(context1); + auto script = + v8::Script::Compile(context1, NewString(source)).ToLocalChecked(); + templateLiteral1 = script->Run(context1).ToLocalChecked(); + templateLiteral1_2 = script->Run(context1).ToLocalChecked(); + otherObject1_ctx1 = RunJS(context1, otherObject1Source); + otherObject2_ctx1 = RunJS(context1, otherObject2Source); + } + + v8::Local<v8::Value> templateLiteral2; + v8::Local<v8::Context> context2 = v8::Context::New(isolate()); + v8::Local<v8::Value> otherObject1_ctx2; + v8::Local<v8::Value> otherObject2_ctx2; + { + v8::Context::Scope scope(context2); + templateLiteral2 = RunJS(context2, source); + otherObject1_ctx2 = RunJS(context2, otherObject1Source); + otherObject2_ctx2 = RunJS(context1, otherObject2Source); + } + + ASSERT_TRUE(context1->HasTemplateLiteralObject(templateLiteral1)); + ASSERT_TRUE(context1->HasTemplateLiteralObject(templateLiteral1_2)); + ASSERT_FALSE(context1->HasTemplateLiteralObject(templateLiteral2)); + + ASSERT_FALSE(context2->HasTemplateLiteralObject(templateLiteral1)); + ASSERT_FALSE(context2->HasTemplateLiteralObject(templateLiteral1_2)); + ASSERT_TRUE(context2->HasTemplateLiteralObject(templateLiteral2)); + + // Neither otherObject is a template object + ASSERT_FALSE(context1->HasTemplateLiteralObject(otherObject1_ctx1)); + ASSERT_FALSE(context1->HasTemplateLiteralObject(otherObject2_ctx1)); + ASSERT_FALSE(context1->HasTemplateLiteralObject(otherObject1_ctx2)); + ASSERT_FALSE(context1->HasTemplateLiteralObject(otherObject1_ctx1)); + ASSERT_FALSE(context2->HasTemplateLiteralObject(otherObject2_ctx1)); + ASSERT_FALSE(context2->HasTemplateLiteralObject(otherObject1_ctx2)); + ASSERT_FALSE(context2->HasTemplateLiteralObject(otherObject2_ctx2)); + ASSERT_FALSE(context2->HasTemplateLiteralObject(otherObject2_ctx2)); +} diff --git a/deps/v8/test/unittests/api/deserialize-unittest.cc b/deps/v8/test/unittests/api/deserialize-unittest.cc index a1237b6bc32318..f649beb26fca43 100644 --- a/deps/v8/test/unittests/api/deserialize-unittest.cc +++ b/deps/v8/test/unittests/api/deserialize-unittest.cc @@ -402,7 +402,8 @@ class MergeDeserializedCodeTest : public DeserializeTest { ScriptObjectFlag aged_after_background_merge, bool lazy_should_be_compiled = false, bool eager_should_be_compiled = true) { - i::FLAG_merge_background_deserialized_script_with_compilation_cache = true; + i::v8_flags.merge_background_deserialized_script_with_compilation_cache = + true; std::unique_ptr<v8::ScriptCompiler::CachedData> cached_data; IsolateAndContextScope scope(this); i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate()); @@ -632,4 +633,86 @@ TEST_F(MergeDeserializedCodeTest, Regress1360024) { true); // lazy_should_be_compiled } +TEST_F(MergeDeserializedCodeTest, MergeWithNoFollowUpWork) { + i::v8_flags.merge_background_deserialized_script_with_compilation_cache = + true; + std::unique_ptr<v8::ScriptCompiler::CachedData> cached_data; + IsolateAndContextScope scope(this); + i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate()); + ScriptOrigin default_origin(isolate(), NewString("")); + + constexpr char kSourceCode[] = "function f() {}"; + Local<Script> original_script; + + // Compile the script for the first time, to both populate the Isolate + // compilation cache and produce code cache data. + { + v8::EscapableHandleScope handle_scope(isolate()); + Local<Script> script = + Script::Compile(context(), NewString(kSourceCode), &default_origin) + .ToLocalChecked(); + + cached_data.reset( + ScriptCompiler::CreateCodeCache(script->GetUnboundScript())); + + // Retain the v8::Script (a JSFunction) so we can run it later. + original_script = handle_scope.Escape(script); + } + + // Age the top-level bytecode so that the Isolate compilation cache will + // contain only the Script. + i::BytecodeArray bytecode = + GetSharedFunctionInfo(original_script).GetBytecodeArray(i_isolate); + const int kAgingThreshold = 6; + for (int j = 0; j < kAgingThreshold; ++j) { + bytecode.MakeOlder(); + } + i_isolate->heap()->CollectAllGarbage(i::Heap::kNoGCFlags, + i::GarbageCollectionReason::kTesting); + + // A second round of GC is necessary in case incremental marking had already + // started before the bytecode was aged. + i_isolate->heap()->CollectAllGarbage(i::Heap::kNoGCFlags, + i::GarbageCollectionReason::kTesting); + + DeserializeThread deserialize_thread(ScriptCompiler::StartConsumingCodeCache( + isolate(), std::make_unique<ScriptCompiler::CachedData>( + cached_data->data, cached_data->length, + ScriptCompiler::CachedData::BufferNotOwned))); + CHECK(deserialize_thread.Start()); + deserialize_thread.Join(); + + std::unique_ptr<ScriptCompiler::ConsumeCodeCacheTask> task = + deserialize_thread.TakeTask(); + + // At this point, the cached script's top-level SFI is not compiled, so a + // background merge is recommended. + task->SourceTextAvailable(isolate(), NewString(kSourceCode), default_origin); + + CHECK(task->ShouldMergeWithExistingScript()); + + // Run the original script, which will cause its top-level SFI to become + // compiled again, and make the SFI for the nested function exist. + CHECK(!original_script->Run(context()).IsEmpty()); + + // The background merge does nothing and requests no follow-up work on the + // main thread because the original script has the same SFIs at the same level + // of compiledness. + MergeThread merge_thread(task.get()); + CHECK(merge_thread.Start()); + merge_thread.Join(); + + // Complete compilation on the main thread. Even though no follow-up work is + // required, this step should reuse the original script. + ScriptCompiler::Source source(NewString(kSourceCode), default_origin, + cached_data.release(), task.release()); + Local<Script> script = + ScriptCompiler::Compile(context(), &source, + ScriptCompiler::kConsumeCodeCache) + .ToLocalChecked(); + + CHECK_EQ(GetSharedFunctionInfo(script), + GetSharedFunctionInfo(original_script)); +} + } // namespace v8 diff --git a/deps/v8/test/unittests/api/exception-unittest.cc b/deps/v8/test/unittests/api/exception-unittest.cc index cda1d9defe1c42..2455e4c78f94b1 100644 --- a/deps/v8/test/unittests/api/exception-unittest.cc +++ b/deps/v8/test/unittests/api/exception-unittest.cc @@ -44,10 +44,10 @@ class APIExceptionTest : public TestWithIsolate { class V8_NODISCARD ScopedExposeGc { public: - ScopedExposeGc() : was_exposed_(i::FLAG_expose_gc) { - i::FLAG_expose_gc = true; + ScopedExposeGc() : was_exposed_(i::v8_flags.expose_gc) { + i::v8_flags.expose_gc = true; } - ~ScopedExposeGc() { i::FLAG_expose_gc = was_exposed_; } + ~ScopedExposeGc() { i::v8_flags.expose_gc = was_exposed_; } private: const bool was_exposed_; diff --git a/deps/v8/test/unittests/api/gc-callbacks-unittest.cc b/deps/v8/test/unittests/api/gc-callbacks-unittest.cc index edbc9d1af70fa2..1603d7709627d9 100644 --- a/deps/v8/test/unittests/api/gc-callbacks-unittest.cc +++ b/deps/v8/test/unittests/api/gc-callbacks-unittest.cc @@ -20,7 +20,7 @@ class GCCallbacksTest : public internal::TestWithHeapInternalsAndContext { CHECK_EQ(current_test_->gc_callbacks_isolate_, isolate); ++current_test_->prologue_call_count_alloc_; - if (!v8::internal::FLAG_single_generation) { + if (!i::v8_flags.single_generation) { // Simulate full heap to see if we will reenter this callback current_test_->SimulateFullSpace(current_test_->heap()->new_space()); } @@ -39,7 +39,7 @@ class GCCallbacksTest : public internal::TestWithHeapInternalsAndContext { CHECK_EQ(current_test_->gc_callbacks_isolate_, isolate); ++current_test_->epilogue_call_count_alloc_; - if (!v8::internal::FLAG_single_generation) { + if (!i::v8_flags.single_generation) { // Simulate full heap to see if we will reenter this callback current_test_->SimulateFullSpace(current_test_->heap()->new_space()); } @@ -120,7 +120,7 @@ GCCallbacksTest* GCCallbacksTest::current_test_ = nullptr; TEST_F(GCCallbacksTest, GCCallbacks) { // For SimulateFullSpace in PrologueCallbackAlloc and EpilogueCallbackAlloc. - i::FLAG_stress_concurrent_allocation = false; + i::v8_flags.stress_concurrent_allocation = false; v8::Isolate* isolate = context()->GetIsolate(); gc_callbacks_isolate_ = isolate; isolate->AddGCPrologueCallback(PrologueCallback); diff --git a/deps/v8/test/unittests/api/v8-object-unittest.cc b/deps/v8/test/unittests/api/v8-object-unittest.cc index a4b5dc76e5ad53..9fcb9a402088da 100644 --- a/deps/v8/test/unittests/api/v8-object-unittest.cc +++ b/deps/v8/test/unittests/api/v8-object-unittest.cc @@ -101,7 +101,7 @@ TEST_F(LapContextTest, CurrentContextInLazyAccessorOnPrototype) { "%OptimizeFunctionOnNextCall(f); " "f();"; Context::Scope scope(caller_context); - internal::FLAG_allow_natives_syntax = true; + internal::v8_flags.allow_natives_syntax = true; Script::Compile(caller_context, String::NewFromUtf8Literal(isolate(), script)) .ToLocalChecked() ->Run(caller_context) @@ -153,7 +153,7 @@ TEST_F(LapContextTest, CurrentContextInLazyAccessorOnPlatformObject) { "%OptimizeFunctionOnNextCall(f); " "f();"; Context::Scope scope(caller_context); - internal::FLAG_allow_natives_syntax = true; + internal::v8_flags.allow_natives_syntax = true; Script::Compile(caller_context, String::NewFromUtf8Literal(isolate(), script)) .ToLocalChecked() ->Run(caller_context) @@ -204,7 +204,7 @@ TEST_F(LapContextTest, CurrentContextInLazyAccessorOnInterface) { "%OptimizeFunctionOnNextCall(f); " "f();"; Context::Scope scope(caller_context); - internal::FLAG_allow_natives_syntax = true; + internal::v8_flags.allow_natives_syntax = true; Script::Compile(caller_context, String::NewFromUtf8Literal(isolate(), script)) .ToLocalChecked() ->Run(caller_context) diff --git a/deps/v8/test/unittests/assembler/disasm-arm-unittest.cc b/deps/v8/test/unittests/assembler/disasm-arm-unittest.cc index 52e2ad53e55d66..e0f9c5f90b96e9 100644 --- a/deps/v8/test/unittests/assembler/disasm-arm-unittest.cc +++ b/deps/v8/test/unittests/assembler/disasm-arm-unittest.cc @@ -1641,7 +1641,7 @@ static void TestLoadLiteral(byte* buffer, Assembler* assm, bool* failure, } -TEST_F(DisasmArmTest,LoadLiteral) { +TEST_F(DisasmArmTest, LoadLiteral) { SET_UP(); TestLoadLiteral(buffer, &assm, &failure, 0); diff --git a/deps/v8/test/unittests/assembler/disasm-ia32-unittest.cc b/deps/v8/test/unittests/assembler/disasm-ia32-unittest.cc index 86759cf7748e7f..d35a7a23dff50b 100644 --- a/deps/v8/test/unittests/assembler/disasm-ia32-unittest.cc +++ b/deps/v8/test/unittests/assembler/disasm-ia32-unittest.cc @@ -45,8 +45,6 @@ using DisasmIa320Test = TestWithIsolate; #define __ assm. -static void DummyStaticFunction(Object result) {} - TEST_F(DisasmIa320Test, DisasmIa320) { HandleScope scope(isolate()); v8::internal::byte buffer[8192]; @@ -293,8 +291,6 @@ TEST_F(DisasmIa320Test, DisasmIa320) { Handle<Code> ic = BUILTIN_CODE(isolate(), ArrayFrom); __ call(ic, RelocInfo::CODE_TARGET); __ nop(); - __ call(FUNCTION_ADDR(DummyStaticFunction), RelocInfo::RUNTIME_ENTRY); - __ nop(); __ jmp(&L1); __ jmp(Operand(ebx, ecx, times_4, 10000)); diff --git a/deps/v8/test/unittests/assembler/disasm-riscv-unittest.cc b/deps/v8/test/unittests/assembler/disasm-riscv-unittest.cc index f2b83adb852ae2..7bc9ca42cb3fe1 100644 --- a/deps/v8/test/unittests/assembler/disasm-riscv-unittest.cc +++ b/deps/v8/test/unittests/assembler/disasm-riscv-unittest.cc @@ -469,7 +469,7 @@ TEST_F(DisasmRiscv64Test, PSEUDO) { } #ifdef V8_TARGET_ARCH_RISCV64 TEST_F(DisasmRiscv64Test, RV64C) { - i::FLAG_riscv_c_extension = true; + i::v8_flags.riscv_c_extension = true; SET_UP(); COMPARE(c_nop(), "00000001 nop"); diff --git a/deps/v8/test/unittests/assembler/macro-assembler-x64-unittest.cc b/deps/v8/test/unittests/assembler/macro-assembler-x64-unittest.cc index 5cc8f9856b7743..b7e5b0ffbe7f6f 100644 --- a/deps/v8/test/unittests/assembler/macro-assembler-x64-unittest.cc +++ b/deps/v8/test/unittests/assembler/macro-assembler-x64-unittest.cc @@ -440,7 +440,7 @@ void TestSmiIndex(MacroAssembler* masm, Label* exit, int id, int x) { TEST_F(MacroAssemblerX64Test, EmbeddedObj) { #ifdef V8_COMPRESS_POINTERS - FLAG_compact_on_every_full_gc = true; + v8_flags.compact_on_every_full_gc = true; Isolate* isolate = i_isolate(); HandleScope handles(isolate); diff --git a/deps/v8/test/unittests/assembler/turbo-assembler-arm-unittest.cc b/deps/v8/test/unittests/assembler/turbo-assembler-arm-unittest.cc index b04f7806aa57a7..6fa1bd5927341e 100644 --- a/deps/v8/test/unittests/assembler/turbo-assembler-arm-unittest.cc +++ b/deps/v8/test/unittests/assembler/turbo-assembler-arm-unittest.cc @@ -153,7 +153,7 @@ TEST_P(TurboAssemblerTestMoveObjectAndSlot, MoveObjectAndSlot) { CodeDesc desc; tasm.GetCode(nullptr, &desc); - if (FLAG_print_code) { + if (v8_flags.print_code) { Handle<Code> code = Factory::CodeBuilder(isolate(), desc, CodeKind::FOR_TESTING).Build(); StdoutStream os; diff --git a/deps/v8/test/unittests/assembler/turbo-assembler-arm64-unittest.cc b/deps/v8/test/unittests/assembler/turbo-assembler-arm64-unittest.cc index 9ac58a827626ed..77123ef56513d2 100644 --- a/deps/v8/test/unittests/assembler/turbo-assembler-arm64-unittest.cc +++ b/deps/v8/test/unittests/assembler/turbo-assembler-arm64-unittest.cc @@ -216,7 +216,7 @@ TEST_P(TurboAssemblerTestMoveObjectAndSlot, MoveObjectAndSlot) { CodeDesc desc; tasm.GetCode(nullptr, &desc); - if (FLAG_print_code) { + if (v8_flags.print_code) { Handle<Code> code = Factory::CodeBuilder(isolate(), desc, CodeKind::FOR_TESTING) .Build(); diff --git a/deps/v8/test/unittests/base/functional-unittest.cc b/deps/v8/test/unittests/base/functional-unittest.cc index efcabb75a25e3b..95e79b5176be3f 100644 --- a/deps/v8/test/unittests/base/functional-unittest.cc +++ b/deps/v8/test/unittests/base/functional-unittest.cc @@ -43,7 +43,7 @@ template <typename T> class FunctionalTest : public ::testing::Test { public: FunctionalTest() - : rng_(GetRandomSeedFromFlag(::v8::internal::FLAG_random_seed)) {} + : rng_(GetRandomSeedFromFlag(::v8::internal::v8_flags.random_seed)) {} ~FunctionalTest() override = default; FunctionalTest(const FunctionalTest&) = delete; FunctionalTest& operator=(const FunctionalTest&) = delete; diff --git a/deps/v8/test/unittests/compiler-dispatcher/compiler-dispatcher-unittest.cc b/deps/v8/test/unittests/compiler-dispatcher/compiler-dispatcher-unittest.cc index 3f50f5a091d69b..2e777256ae2577 100644 --- a/deps/v8/test/unittests/compiler-dispatcher/compiler-dispatcher-unittest.cc +++ b/deps/v8/test/unittests/compiler-dispatcher/compiler-dispatcher-unittest.cc @@ -43,7 +43,7 @@ class LazyCompileDispatcherTestFlags { static void SetFlagsForTest() { CHECK_NULL(save_flags_); save_flags_ = new SaveFlags(); - FLAG_lazy_compile_dispatcher = true; + v8_flags.lazy_compile_dispatcher = true; FlagList::EnforceFlagImplications(); } @@ -334,13 +334,13 @@ class MockPlatform : public v8::Platform { TEST_F(LazyCompileDispatcherTest, Construct) { MockPlatform platform; - LazyCompileDispatcher dispatcher(i_isolate(), &platform, FLAG_stack_size); + LazyCompileDispatcher dispatcher(i_isolate(), &platform, v8_flags.stack_size); dispatcher.AbortAll(); } TEST_F(LazyCompileDispatcherTest, IsEnqueued) { MockPlatform platform; - LazyCompileDispatcher dispatcher(i_isolate(), &platform, FLAG_stack_size); + LazyCompileDispatcher dispatcher(i_isolate(), &platform, v8_flags.stack_size); Handle<SharedFunctionInfo> shared = test::CreateSharedFunctionInfo(i_isolate(), nullptr); @@ -360,7 +360,7 @@ TEST_F(LazyCompileDispatcherTest, IsEnqueued) { TEST_F(LazyCompileDispatcherTest, FinishNow) { MockPlatform platform; - LazyCompileDispatcher dispatcher(i_isolate(), &platform, FLAG_stack_size); + LazyCompileDispatcher dispatcher(i_isolate(), &platform, v8_flags.stack_size); Handle<SharedFunctionInfo> shared = test::CreateSharedFunctionInfo(i_isolate(), nullptr); @@ -379,7 +379,7 @@ TEST_F(LazyCompileDispatcherTest, FinishNow) { TEST_F(LazyCompileDispatcherTest, CompileAndFinalize) { MockPlatform platform; - LazyCompileDispatcher dispatcher(i_isolate(), &platform, FLAG_stack_size); + LazyCompileDispatcher dispatcher(i_isolate(), &platform, v8_flags.stack_size); Handle<SharedFunctionInfo> shared = test::CreateSharedFunctionInfo(i_isolate(), nullptr); @@ -407,7 +407,7 @@ TEST_F(LazyCompileDispatcherTest, CompileAndFinalize) { TEST_F(LazyCompileDispatcherTest, IdleTaskNoIdleTime) { MockPlatform platform; - LazyCompileDispatcher dispatcher(i_isolate(), &platform, FLAG_stack_size); + LazyCompileDispatcher dispatcher(i_isolate(), &platform, v8_flags.stack_size); Handle<SharedFunctionInfo> shared = test::CreateSharedFunctionInfo(i_isolate(), nullptr); @@ -458,7 +458,7 @@ TEST_F(LazyCompileDispatcherTest, IdleTaskNoIdleTime) { TEST_F(LazyCompileDispatcherTest, IdleTaskSmallIdleTime) { MockPlatform platform; - LazyCompileDispatcher dispatcher(i_isolate(), &platform, FLAG_stack_size); + LazyCompileDispatcher dispatcher(i_isolate(), &platform, v8_flags.stack_size); Handle<SharedFunctionInfo> shared_1 = test::CreateSharedFunctionInfo(i_isolate(), nullptr); @@ -556,7 +556,7 @@ TEST_F(LazyCompileDispatcherTest, IdleTaskException) { TEST_F(LazyCompileDispatcherTest, FinishNowWithWorkerTask) { MockPlatform platform; - LazyCompileDispatcher dispatcher(i_isolate(), &platform, FLAG_stack_size); + LazyCompileDispatcher dispatcher(i_isolate(), &platform, v8_flags.stack_size); Handle<SharedFunctionInfo> shared = test::CreateSharedFunctionInfo(i_isolate(), nullptr); @@ -589,7 +589,7 @@ TEST_F(LazyCompileDispatcherTest, FinishNowWithWorkerTask) { TEST_F(LazyCompileDispatcherTest, IdleTaskMultipleJobs) { MockPlatform platform; - LazyCompileDispatcher dispatcher(i_isolate(), &platform, FLAG_stack_size); + LazyCompileDispatcher dispatcher(i_isolate(), &platform, v8_flags.stack_size); Handle<SharedFunctionInfo> shared_1 = test::CreateSharedFunctionInfo(i_isolate(), nullptr); @@ -648,7 +648,7 @@ TEST_F(LazyCompileDispatcherTest, FinishNowException) { TEST_F(LazyCompileDispatcherTest, AbortJobNotStarted) { MockPlatform platform; - LazyCompileDispatcher dispatcher(i_isolate(), &platform, FLAG_stack_size); + LazyCompileDispatcher dispatcher(i_isolate(), &platform, v8_flags.stack_size); Handle<SharedFunctionInfo> shared = test::CreateSharedFunctionInfo(i_isolate(), nullptr); @@ -675,7 +675,7 @@ TEST_F(LazyCompileDispatcherTest, AbortJobNotStarted) { TEST_F(LazyCompileDispatcherTest, AbortJobAlreadyStarted) { MockPlatform platform; - LazyCompileDispatcher dispatcher(i_isolate(), &platform, FLAG_stack_size); + LazyCompileDispatcher dispatcher(i_isolate(), &platform, v8_flags.stack_size); Handle<SharedFunctionInfo> shared = test::CreateSharedFunctionInfo(i_isolate(), nullptr); @@ -789,7 +789,7 @@ TEST_F(LazyCompileDispatcherTest, CompileLazy2FinishesDispatcherJob) { TEST_F(LazyCompileDispatcherTest, CompileMultipleOnBackgroundThread) { MockPlatform platform; - LazyCompileDispatcher dispatcher(i_isolate(), &platform, FLAG_stack_size); + LazyCompileDispatcher dispatcher(i_isolate(), &platform, v8_flags.stack_size); Handle<SharedFunctionInfo> shared_1 = test::CreateSharedFunctionInfo(i_isolate(), nullptr); diff --git a/deps/v8/test/unittests/compiler/backend/instruction-selector-unittest.cc b/deps/v8/test/unittests/compiler/backend/instruction-selector-unittest.cc index e52661fae260ef..30928fe94a8b82 100644 --- a/deps/v8/test/unittests/compiler/backend/instruction-selector-unittest.cc +++ b/deps/v8/test/unittests/compiler/backend/instruction-selector-unittest.cc @@ -19,7 +19,7 @@ namespace compiler { InstructionSelectorTest::InstructionSelectorTest() : TestWithNativeContextAndZone(kCompressGraphZone), - rng_(FLAG_random_seed) {} + rng_(v8_flags.random_seed) {} InstructionSelectorTest::~InstructionSelectorTest() = default; @@ -28,7 +28,7 @@ InstructionSelectorTest::Stream InstructionSelectorTest::StreamBuilder::Build( InstructionSelectorTest::StreamBuilderMode mode, InstructionSelector::SourcePositionMode source_position_mode) { Schedule* schedule = ExportForTest(); - if (FLAG_trace_turbo) { + if (v8_flags.trace_turbo) { StdoutStream{} << "=== Schedule before instruction selection ===" << std::endl << *schedule; @@ -52,7 +52,7 @@ InstructionSelectorTest::Stream InstructionSelectorTest::StreamBuilder::Build( source_position_mode, features, InstructionSelector::kDisableScheduling, InstructionSelector::kEnableRootsRelativeAddressing); selector.SelectInstructions(); - if (FLAG_trace_turbo) { + if (v8_flags.trace_turbo) { StdoutStream{} << "=== Code sequence after instruction selection ===" << std::endl << sequence; diff --git a/deps/v8/test/unittests/compiler/bytecode-analysis-unittest.cc b/deps/v8/test/unittests/compiler/bytecode-analysis-unittest.cc index 90a80b968eb0aa..1eb43200419d66 100644 --- a/deps/v8/test/unittests/compiler/bytecode-analysis-unittest.cc +++ b/deps/v8/test/unittests/compiler/bytecode-analysis-unittest.cc @@ -29,8 +29,8 @@ class BytecodeAnalysisTest : public TestWithIsolateAndZone { static void SetUpTestSuite() { CHECK_NULL(save_flags_); save_flags_ = new SaveFlags(); - i::FLAG_ignition_elide_noneffectful_bytecodes = false; - i::FLAG_ignition_reo = false; + i::v8_flags.ignition_elide_noneffectful_bytecodes = false; + i::v8_flags.ignition_reo = false; TestWithIsolateAndZone::SetUpTestSuite(); } diff --git a/deps/v8/test/unittests/compiler/compiler-unittest.cc b/deps/v8/test/unittests/compiler/compiler-unittest.cc index b5419dba417d51..abef44976cfe3b 100644 --- a/deps/v8/test/unittests/compiler/compiler-unittest.cc +++ b/deps/v8/test/unittests/compiler/compiler-unittest.cc @@ -206,7 +206,7 @@ using CompilerC2JSFramesTest = WithPrintExtensionMixin<v8::TestWithIsolate>; // | JS | // | C-to-JS | TEST_F(CompilerC2JSFramesTest, C2JSFrames) { - FLAG_expose_gc = true; + v8_flags.expose_gc = true; v8::HandleScope scope(isolate()); const char* extension_names[2] = { "v8/gc", WithPrintExtensionMixin::kPrintExtensionName}; @@ -276,8 +276,8 @@ TEST_F(CompilerTest, GetScriptLineNumber) { } TEST_F(CompilerTest, FeedbackVectorPreservedAcrossRecompiles) { - if (i::FLAG_always_turbofan || !i::FLAG_turbofan) return; - i::FLAG_allow_natives_syntax = true; + if (i::v8_flags.always_turbofan || !i::v8_flags.turbofan) return; + i::v8_flags.allow_natives_syntax = true; if (!i_isolate()->use_optimizer()) return; v8::HandleScope scope(isolate()); @@ -319,7 +319,8 @@ TEST_F(CompilerTest, FeedbackVectorPreservedAcrossRecompiles) { } TEST_F(CompilerTest, FeedbackVectorUnaffectedByScopeChanges) { - if (i::FLAG_always_turbofan || !i::FLAG_lazy || i::FLAG_lite_mode) { + if (i::v8_flags.always_turbofan || !i::v8_flags.lazy || + i::v8_flags.lite_mode) { return; } v8::HandleScope scope(isolate()); @@ -357,8 +358,8 @@ TEST_F(CompilerTest, FeedbackVectorUnaffectedByScopeChanges) { // Test that optimized code for different closures is actually shared. TEST_F(CompilerTest, OptimizedCodeSharing1) { - FLAG_stress_compaction = false; - FLAG_allow_natives_syntax = true; + v8_flags.stress_compaction = false; + v8_flags.allow_natives_syntax = true; v8::HandleScope scope(isolate()); for (int i = 0; i < 3; i++) { context() @@ -399,7 +400,7 @@ TEST_F(CompilerTest, OptimizedCodeSharing1) { } TEST_F(CompilerTest, CompileFunction) { - if (i::FLAG_always_turbofan) return; + if (i::v8_flags.always_turbofan) return; v8::HandleScope scope(isolate()); RunJS("var r = 10;"); v8::Local<v8::Object> math = v8::Local<v8::Object>::Cast( @@ -724,9 +725,9 @@ TEST_F(CompilerTest, CompileFunctionFunctionToString) { } TEST_F(CompilerTest, InvocationCount) { - if (FLAG_lite_mode) return; - FLAG_allow_natives_syntax = true; - FLAG_always_turbofan = false; + if (v8_flags.lite_mode) return; + v8_flags.allow_natives_syntax = true; + v8_flags.always_turbofan = false; v8::HandleScope scope(isolate()); RunJS( @@ -746,7 +747,7 @@ TEST_F(CompilerTest, InvocationCount) { } TEST_F(CompilerTest, ShallowEagerCompilation) { - i::FLAG_always_turbofan = false; + i::v8_flags.always_turbofan = false; v8::HandleScope scope(isolate()); v8::Local<v8::String> source = NewString( "function f(x) {" @@ -766,7 +767,7 @@ TEST_F(CompilerTest, ShallowEagerCompilation) { } TEST_F(CompilerTest, DeepEagerCompilation) { - i::FLAG_always_turbofan = false; + i::v8_flags.always_turbofan = false; v8::HandleScope scope(isolate()); v8::Local<v8::String> source = NewString( "function f(x) {" @@ -792,7 +793,7 @@ TEST_F(CompilerTest, DeepEagerCompilation) { } TEST_F(CompilerTest, DeepEagerCompilationPeakMemory) { - i::FLAG_always_turbofan = false; + i::v8_flags.always_turbofan = false; v8::HandleScope scope(isolate()); v8::Local<v8::String> source = NewString( "function f() {" diff --git a/deps/v8/test/unittests/compiler/control-equivalence-unittest.cc b/deps/v8/test/unittests/compiler/control-equivalence-unittest.cc index e56d18a8cbc27a..1463f4ac8e8553 100644 --- a/deps/v8/test/unittests/compiler/control-equivalence-unittest.cc +++ b/deps/v8/test/unittests/compiler/control-equivalence-unittest.cc @@ -30,7 +30,7 @@ class ControlEquivalenceTest : public GraphTest { protected: void ComputeEquivalence(Node* end_node) { graph()->SetEnd(graph()->NewNode(common()->End(1), end_node)); - if (FLAG_trace_turbo) { + if (v8_flags.trace_turbo) { SourcePositionTable table(graph()); NodeOriginTable table2(graph()); StdoutStream{} << AsJSON(*graph(), &table, &table2); diff --git a/deps/v8/test/unittests/compiler/function-tester.cc b/deps/v8/test/unittests/compiler/function-tester.cc index 1d8895d3d0c22a..d6951da6f70338 100644 --- a/deps/v8/test/unittests/compiler/function-tester.cc +++ b/deps/v8/test/unittests/compiler/function-tester.cc @@ -37,7 +37,7 @@ FunctionTester::FunctionTester(Isolate* isolate, const char* source, uint32_t flags) : isolate(isolate), canonical(isolate), - function((FLAG_allow_natives_syntax = true, NewFunction(source))), + function((v8_flags.allow_natives_syntax = true, NewFunction(source))), flags_(flags) { Compile(function); const uint32_t supported_flags = OptimizedCompilationInfo::kInlining; @@ -56,7 +56,7 @@ FunctionTester::FunctionTester(Isolate* isolate, Handle<Code> code, int param_count) : isolate(isolate), canonical(isolate), - function((FLAG_allow_natives_syntax = true, + function((v8_flags.allow_natives_syntax = true, NewFunction(BuildFunction(param_count).c_str()))), flags_(0) { CHECK(!code.is_null()); diff --git a/deps/v8/test/unittests/compiler/loop-peeling-unittest.cc b/deps/v8/test/unittests/compiler/loop-peeling-unittest.cc index 4e3d238bcdd52d..160064439c778f 100644 --- a/deps/v8/test/unittests/compiler/loop-peeling-unittest.cc +++ b/deps/v8/test/unittests/compiler/loop-peeling-unittest.cc @@ -59,7 +59,7 @@ class LoopPeelingTest : public GraphTest { MachineOperatorBuilder* machine() { return &machine_; } LoopTree* GetLoopTree() { - if (FLAG_trace_turbo_graph) { + if (v8_flags.trace_turbo_graph) { StdoutStream{} << AsRPO(*graph()); } Zone zone(isolate()->allocator(), ZONE_NAME); @@ -79,7 +79,7 @@ class LoopPeelingTest : public GraphTest { PeeledIteration* Peel(LoopPeeler peeler, LoopTree::Loop* loop) { EXPECT_TRUE(peeler.CanPeel(loop)); PeeledIteration* peeled = peeler.Peel(loop); - if (FLAG_trace_turbo_graph) { + if (v8_flags.trace_turbo_graph) { StdoutStream{} << AsRPO(*graph()); } return peeled; diff --git a/deps/v8/test/unittests/compiler/mips64/instruction-selector-mips64-unittest.cc b/deps/v8/test/unittests/compiler/mips64/instruction-selector-mips64-unittest.cc index c24ad5b48e03aa..bb8698c91baadd 100644 --- a/deps/v8/test/unittests/compiler/mips64/instruction-selector-mips64-unittest.cc +++ b/deps/v8/test/unittests/compiler/mips64/instruction-selector-mips64-unittest.cc @@ -334,7 +334,7 @@ TEST_P(InstructionSelectorCmpTest, Parameter) { m.Return((m.*cmp.mi.constructor)(m.Parameter(0), m.Parameter(1))); Stream s = m.Build(); - if (FLAG_debug_code && + if (v8_flags.debug_code && type.representation() == MachineRepresentation::kWord32) { ASSERT_EQ(6U, s.size()); @@ -1164,7 +1164,7 @@ TEST_P(InstructionSelectorElidedChangeUint32ToUint64Test, Parameter) { (m.*binop.constructor)(m.Parameter(0), m.Parameter(1)))); Stream s = m.Build(); // Make sure the `ChangeUint32ToUint64` node turned into a no-op. - if (FLAG_debug_code && binop.arch_opcode == kMips64Cmp) { + if (v8_flags.debug_code && binop.arch_opcode == kMips64Cmp) { ASSERT_EQ(6U, s.size()); EXPECT_EQ(kMips64Cmp, s[0]->arch_opcode()); EXPECT_EQ(kMips64Dshl, s[1]->arch_opcode()); diff --git a/deps/v8/test/unittests/compiler/regalloc/live-range-unittest.cc b/deps/v8/test/unittests/compiler/regalloc/live-range-unittest.cc index b8fe329f697b19..d2785465caeb4c 100644 --- a/deps/v8/test/unittests/compiler/regalloc/live-range-unittest.cc +++ b/deps/v8/test/unittests/compiler/regalloc/live-range-unittest.cc @@ -56,13 +56,13 @@ class TestRangeBuilder { LifetimePosition start = LifetimePosition::FromInt(pair.first); LifetimePosition end = LifetimePosition::FromInt(pair.second); CHECK(start < end); - range->AddUseInterval(start, end, zone_, FLAG_trace_turbo_alloc); + range->AddUseInterval(start, end, zone_, v8_flags.trace_turbo_alloc); } for (int pos : uses_) { UsePosition* use_position = zone_->New<UsePosition>(LifetimePosition::FromInt(pos), nullptr, nullptr, UsePositionHintType::kNone); - range->AddUsePosition(use_position, FLAG_trace_turbo_alloc); + range->AddUsePosition(use_position, v8_flags.trace_turbo_alloc); } pairs_.clear(); @@ -117,10 +117,11 @@ TEST_F(LiveRangeUnitTest, InvalidConstruction) { // Build a range manually, because the builder guards against empty cases. TopLevelLiveRange* range = zone()->New<TopLevelLiveRange>(1, MachineRepresentation::kTagged); - V8_ASSERT_DEBUG_DEATH(range->AddUseInterval(LifetimePosition::FromInt(0), - LifetimePosition::FromInt(0), - zone(), FLAG_trace_turbo_alloc), - ".*"); + V8_ASSERT_DEBUG_DEATH( + range->AddUseInterval(LifetimePosition::FromInt(0), + LifetimePosition::FromInt(0), zone(), + v8_flags.trace_turbo_alloc), + ".*"); } TEST_F(LiveRangeUnitTest, SplitInvalidStart) { diff --git a/deps/v8/test/unittests/compiler/regalloc/mid-tier-register-allocator-unittest.cc b/deps/v8/test/unittests/compiler/regalloc/mid-tier-register-allocator-unittest.cc index 4218f66180fc07..d879636e5208f9 100644 --- a/deps/v8/test/unittests/compiler/regalloc/mid-tier-register-allocator-unittest.cc +++ b/deps/v8/test/unittests/compiler/regalloc/mid-tier-register-allocator-unittest.cc @@ -400,7 +400,7 @@ TEST_F(MidTierRegisterAllocatorTest, SpillPhiDueToRegisterPressure) { } TEST_F(MidTierRegisterAllocatorTest, MoveLotsOfConstants) { - FLAG_trace_turbo = true; + v8_flags.trace_turbo = true; StartBlock(); VReg constants[Register::kNumRegisters]; for (size_t i = 0; i < arraysize(constants); ++i) { @@ -816,7 +816,7 @@ class MidTierRegAllocSlotConstraintTest } // namespace TEST_P(MidTierRegAllocSlotConstraintTest, SlotConstraint) { - FLAG_trace_turbo = true; + v8_flags.trace_turbo = true; StartBlock(); VReg p_0; switch (parameter_type()) { diff --git a/deps/v8/test/unittests/compiler/regalloc/move-optimizer-unittest.cc b/deps/v8/test/unittests/compiler/regalloc/move-optimizer-unittest.cc index 4a26bbc715d429..709ec6601b9aea 100644 --- a/deps/v8/test/unittests/compiler/regalloc/move-optimizer-unittest.cc +++ b/deps/v8/test/unittests/compiler/regalloc/move-optimizer-unittest.cc @@ -52,14 +52,14 @@ class MoveOptimizerTest : public InstructionSequenceTest { // TODO(dcarney): add a verifier. void Optimize() { WireBlocks(); - if (FLAG_trace_turbo) { + if (v8_flags.trace_turbo) { StdoutStream{} << "----- Instruction sequence before move optimization -----\n" << *sequence(); } MoveOptimizer move_optimizer(zone(), sequence()); move_optimizer.Run(); - if (FLAG_trace_turbo) { + if (v8_flags.trace_turbo) { StdoutStream{} << "----- Instruction sequence after move optimization -----\n" << *sequence(); diff --git a/deps/v8/test/unittests/compiler/riscv32/instruction-selector-riscv32-unittest.cc b/deps/v8/test/unittests/compiler/riscv32/instruction-selector-riscv32-unittest.cc index 0c7b6478fd9cd9..f9380ce8cc64e2 100644 --- a/deps/v8/test/unittests/compiler/riscv32/instruction-selector-riscv32-unittest.cc +++ b/deps/v8/test/unittests/compiler/riscv32/instruction-selector-riscv32-unittest.cc @@ -264,7 +264,7 @@ TEST_P(InstructionSelectorCmpTest, Parameter) { m.Return((m.*cmp.mi.constructor)(m.Parameter(0), m.Parameter(1))); Stream s = m.Build(); - if (FLAG_debug_code && + if (v8_flags.debug_code && type.representation() == MachineRepresentation::kWord32) { ASSERT_EQ(1U, s.size()); diff --git a/deps/v8/test/unittests/compiler/riscv64/instruction-selector-riscv64-unittest.cc b/deps/v8/test/unittests/compiler/riscv64/instruction-selector-riscv64-unittest.cc index b56149b604fcc4..8458e4e7d560b8 100644 --- a/deps/v8/test/unittests/compiler/riscv64/instruction-selector-riscv64-unittest.cc +++ b/deps/v8/test/unittests/compiler/riscv64/instruction-selector-riscv64-unittest.cc @@ -313,7 +313,7 @@ TEST_P(InstructionSelectorCmpTest, Parameter) { m.Return((m.*cmp.mi.constructor)(m.Parameter(0), m.Parameter(1))); Stream s = m.Build(); - if (FLAG_debug_code && + if (v8_flags.debug_code && type.representation() == MachineRepresentation::kWord32) { #ifndef V8_COMPRESS_POINTERS ASSERT_EQ(6U, s.size()); diff --git a/deps/v8/test/cctest/compiler/test-run-bytecode-graph-builder.cc b/deps/v8/test/unittests/compiler/run-bytecode-graph-builder-unittest.cc similarity index 76% rename from deps/v8/test/cctest/compiler/test-run-bytecode-graph-builder.cc rename to deps/v8/test/unittests/compiler/run-bytecode-graph-builder-unittest.cc index eb970693b98d7d..8b3befc48c85b9 100644 --- a/deps/v8/test/cctest/compiler/test-run-bytecode-graph-builder.cc +++ b/deps/v8/test/unittests/compiler/run-bytecode-graph-builder-unittest.cc @@ -1,4 +1,4 @@ -// Copyright 2015 the V8 project authors. All rights reserved. +// Copyright 2022 the V8 project authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. @@ -15,20 +15,20 @@ #include "src/interpreter/interpreter.h" #include "src/objects/objects-inl.h" #include "src/parsing/parse-info.h" -#include "test/cctest/cctest.h" +#include "test/unittests/test-utils.h" namespace v8 { namespace internal { namespace compiler { -#define SHARD_TEST_BY_2(x) \ - TEST(x##_0) { Test##x(0); } \ - TEST(x##_1) { Test##x(1); } -#define SHARD_TEST_BY_4(x) \ - TEST(x##_0) { Test##x(0); } \ - TEST(x##_1) { Test##x(1); } \ - TEST(x##_2) { Test##x(2); } \ - TEST(x##_3) { Test##x(3); } +#define SHARD_TEST_BY_2(x) \ + TEST_F(RunBytecodeGraphBuilderTest, x##_0) { Test##x(0); } \ + TEST_F(RunBytecodeGraphBuilderTest, x##_1) { Test##x(1); } +#define SHARD_TEST_BY_4(x) \ + TEST_F(RunBytecodeGraphBuilderTest, x##_0) { Test##x(0); } \ + TEST_F(RunBytecodeGraphBuilderTest, x##_1) { Test##x(1); } \ + TEST_F(RunBytecodeGraphBuilderTest, x##_2) { Test##x(2); } \ + TEST_F(RunBytecodeGraphBuilderTest, x##_3) { Test##x(3); } static const char kFunctionName[] = "f"; @@ -56,6 +56,22 @@ static MaybeHandle<Object> CallFunction(Isolate* isolate, argv); } +static v8::Local<v8::Value> CompileRun(v8::Isolate* isolate, + const char* source) { + v8::Local<v8::Context> context = isolate->GetCurrentContext(); + v8::Local<v8::Script> script = + v8::Script::Compile( + context, v8::String::NewFromUtf8(isolate, source).ToLocalChecked()) + .ToLocalChecked(); + + v8::Local<v8::Value> result; + if (script->Run(context).ToLocal(&result)) { + return result; + } else { + return v8::Local<v8::Value>(); + } +} + template <class... A> class BytecodeGraphCallable { public: @@ -77,8 +93,8 @@ class BytecodeGraphTester { BytecodeGraphTester(Isolate* isolate, const char* script, const char* filter = kFunctionName) : isolate_(isolate), script_(script) { - i::FLAG_always_turbofan = false; - i::FLAG_allow_natives_syntax = true; + i::v8_flags.always_turbofan = false; + i::v8_flags.allow_natives_syntax = true; } virtual ~BytecodeGraphTester() = default; BytecodeGraphTester(const BytecodeGraphTester&) = delete; @@ -102,19 +118,20 @@ class BytecodeGraphTester { return try_catch.Message(); } - static Handle<Object> NewObject(const char* script) { - return v8::Utils::OpenHandle(*CompileRun(script)); - } - private: Isolate* isolate_; const char* script_; Handle<JSFunction> GetFunction(const char* functionName) { - CompileRun(script_); + v8::Isolate* v8_isolate = reinterpret_cast<v8::Isolate*>(isolate_); + v8::Local<v8::Context> context = v8_isolate->GetCurrentContext(); + + CompileRun(v8_isolate, script_); + Local<Function> api_function = Local<Function>::Cast( - CcTest::global() - ->Get(CcTest::isolate()->GetCurrentContext(), v8_str(functionName)) + context->Global() + ->Get(context, v8::String::NewFromUtf8(v8_isolate, functionName) + .ToLocalChecked()) .ToLocalChecked()); Handle<JSFunction> function = Handle<JSFunction>::cast(v8::Utils::OpenHandle(*api_function)); @@ -185,9 +202,183 @@ struct ExpectedSnippet { } }; -TEST(BytecodeGraphBuilderReturnStatements) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +class RunBytecodeGraphBuilderTest : public TestWithNativeContext { + public: + void TestBytecodeGraphBuilderNamedStore(size_t shard) { + Factory* factory = i_isolate()->factory(); + + ExpectedSnippet<1> snippets[] = { + {"return p1.val = 20;", + {factory->NewNumberFromInt(20), RunJS("({val : 10})")}}, + {"p1.type = 'int'; return p1.type;", + {MakeString("int"), RunJS("({val : 10})")}}, + {"p1.name = 'def'; return p1[\"name\"];", + {MakeString("def"), RunJS("({name : 'abc'})")}}, + {"'use strict'; p1.val = 20; return p1.val;", + {factory->NewNumberFromInt(20), RunJS("({val : 10 })")}}, + {"'use strict'; return p1.type = 'int';", + {MakeString("int"), RunJS("({val : 10})")}}, + {"'use strict'; p1.val = 20; return p1[\"val\"];", + {factory->NewNumberFromInt(20), RunJS("({val : 10, name : 'abc'})")}}, + {"var b = 'abc';\n" REPEAT_127( + SPACE, " p1.name = b; ") " p1.name = 'def'; return p1.name;\n", + {MakeString("def"), RunJS("({name : 'abc'})")}}, + {"'use strict'; var b = 'def';\n" REPEAT_127( + SPACE, " p1.name = 'abc'; ") "p1.name = b; return p1.name;\n", + {MakeString("def"), RunJS("({ name : 'abc'})")}}, + }; + + for (size_t i = 0; i < arraysize(snippets); i++) { + if ((i % 2) != shard) continue; + base::ScopedVector<char> script(3072); + SNPrintF(script, "function %s(p1) { %s };\n%s({});", kFunctionName, + snippets[i].code_snippet, kFunctionName); + + BytecodeGraphTester tester(i_isolate(), script.begin()); + auto callable = tester.GetCallable<Handle<Object>>(); + Handle<Object> return_value = + callable(snippets[i].parameter(0)).ToHandleChecked(); + CHECK(return_value->SameValue(*snippets[i].return_value())); + } + } + + void TestBytecodeGraphBuilderKeyedStore(size_t shard) { + Isolate* isolate = i_isolate(); + Factory* factory = isolate->factory(); + + ExpectedSnippet<2> snippets[] = { + {"p1[p2] = 20; return p1[p2];", + {factory->NewNumberFromInt(20), RunJS("({val : 10})"), + MakeString("val")}}, + {"return p1[100] = 'def';", + {MakeString("def"), RunJS("({100 : 'abc'})"), + factory->NewNumberFromInt(0)}}, + {"var b = 100; p1[b] = 'def'; return p1[b];", + {MakeString("def"), RunJS("({100 : 'abc'})"), + factory->NewNumberFromInt(0)}}, + {"'use strict'; p1[p2] = 20; return p1[p2];", + {factory->NewNumberFromInt(20), RunJS("({val : 10 })"), + MakeString("val")}}, + {"'use strict'; return p1[100] = 20;", + {factory->NewNumberFromInt(20), RunJS("({100 : 10})"), + factory->NewNumberFromInt(0)}}, + {"'use strict'; var b = p2; p1[b] = 'def'; return p1[b];", + {MakeString("def"), RunJS("({100 : 'abc'})"), + factory->NewNumberFromInt(100)}}, + {"var b;\n" REPEAT_127( + SPACE, " b = p1[p2]; ") " p1[p2] = 'def'; return p1[p2];\n", + {MakeString("def"), RunJS("({100 : 'abc'})"), + factory->NewNumberFromInt(100)}}, + {"'use strict'; var b;\n" REPEAT_127( + SPACE, " b = p1[p2]; ") " p1[p2] = 'def'; return p1[p2];\n", + {MakeString("def"), RunJS("({ 100 : 'abc'})"), + factory->NewNumberFromInt(100)}}, + }; + + for (size_t i = 0; i < arraysize(snippets); i++) { + if ((i % 2) != shard) continue; + base::ScopedVector<char> script(2048); + SNPrintF(script, "function %s(p1, p2) { %s };\n%s({});", kFunctionName, + snippets[i].code_snippet, kFunctionName); + + BytecodeGraphTester tester(isolate, script.begin()); + auto callable = tester.GetCallable<Handle<Object>>(); + Handle<Object> return_value = + callable(snippets[i].parameter(0)).ToHandleChecked(); + CHECK(return_value->SameValue(*snippets[i].return_value())); + } + } + + void TestBytecodeGraphBuilderGlobals(size_t shard) { + Isolate* isolate = i_isolate(); + Factory* factory = isolate->factory(); + + ExpectedSnippet<0> snippets[] = { + {"var global = 321;\n function f() { return global; };\n f();", + {factory->NewNumberFromInt(321)}}, + {"var global = 321;\n" + "function f() { global = 123; return global };\n f();", + {factory->NewNumberFromInt(123)}}, + {"var global = function() { return 'abc'};\n" + "function f() { return global(); };\n f();", + {MakeString("abc")}}, + {"var global = 456;\n" + "function f() { 'use strict'; return global; };\n f();", + {factory->NewNumberFromInt(456)}}, + {"var global = 987;\n" + "function f() { 'use strict'; global = 789; return global };\n f();", + {factory->NewNumberFromInt(789)}}, + {"var global = function() { return 'xyz'};\n" + "function f() { 'use strict'; return global(); };\n f();", + {MakeString("xyz")}}, + {"var global = 'abc'; var global_obj = {val:123};\n" + "function f() {\n" REPEAT_127( + SPACE, " var b = global_obj.name;\n") "return global; };\n f();\n", + {MakeString("abc")}}, + {"var global = 'abc'; var global_obj = {val:123};\n" + "function f() { 'use strict';\n" REPEAT_127( + SPACE, " var b = global_obj.name;\n") "global = 'xyz'; return " + "global };\n f();\n", + {MakeString("xyz")}}, + {"function f() { return typeof(undeclared_var); }\n; f();\n", + {MakeString("undefined")}}, + {"var defined_var = 10; function f() { return typeof(defined_var); " + "}\n; " + "f();\n", + {MakeString("number")}}, + }; + + for (size_t i = 0; i < arraysize(snippets); i++) { + if ((i % 2) != shard) continue; + BytecodeGraphTester tester(isolate, snippets[i].code_snippet); + auto callable = tester.GetCallable<>(); + Handle<Object> return_value = callable().ToHandleChecked(); + CHECK(return_value->SameValue(*snippets[i].return_value())); + } + } + + void TestJumpWithConstantsAndWideConstants(size_t shard) { + const int kStep = 46; + int start = static_cast<int>(7 + 17 * shard); + for (int constants = start; constants < 300; constants += kStep) { + std::stringstream filler_os; + // Generate a string that consumes constant pool entries and + // spread out branch distances in script below. + for (int i = 0; i < constants; i++) { + filler_os << "var x_ = 'x_" << i << "';\n"; + } + std::string filler(filler_os.str()); + + std::stringstream script_os; + script_os << "function " << kFunctionName << "(a) {\n"; + script_os << " " << filler; + script_os << " for (var i = a; i < 2; i++) {\n"; + script_os << " " << filler; + script_os << " if (i == 0) { " << filler << "i = 10; continue; }\n"; + script_os << " else if (i == a) { " << filler << "i = 12; break; }\n"; + script_os << " else { " << filler << " }\n"; + script_os << " }\n"; + script_os << " return i;\n"; + script_os << "}\n"; + script_os << kFunctionName << "(0);\n"; + std::string script(script_os.str()); + + Isolate* isolate = i_isolate(); + Factory* factory = isolate->factory(); + BytecodeGraphTester tester(isolate, script.c_str()); + auto callable = tester.GetCallable<Handle<Object>>(); + for (int a = 0; a < 3; a++) { + Handle<Object> return_val = + callable(factory->NewNumberFromInt(a)).ToHandleChecked(); + static const int results[] = {11, 12, 2}; + CHECK_EQ(Handle<Smi>::cast(return_val)->value(), results[a]); + } + } + } +}; + +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderReturnStatements) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); ExpectedSnippet<0> snippets[] = { @@ -203,8 +394,8 @@ TEST(BytecodeGraphBuilderReturnStatements) { {"return 0.001;", {factory->NewNumber(0.001)}}, {"return 3.7e-60;", {factory->NewNumber(3.7e-60)}}, {"return -3.7e60;", {factory->NewNumber(-3.7e60)}}, - {"return '';", {factory->NewStringFromStaticChars("")}}, - {"return 'catfood';", {factory->NewStringFromStaticChars("catfood")}}, + {"return '';", {MakeString("")}}, + {"return 'catfood';", {MakeString("catfood")}}, {"return NaN;", {factory->nan_value()}}}; for (size_t i = 0; i < arraysize(snippets); i++) { @@ -219,9 +410,8 @@ TEST(BytecodeGraphBuilderReturnStatements) { } } -TEST(BytecodeGraphBuilderPrimitiveExpressions) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderPrimitiveExpressions) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); ExpectedSnippet<0> snippets[] = { @@ -244,9 +434,8 @@ TEST(BytecodeGraphBuilderPrimitiveExpressions) { } } -TEST(BytecodeGraphBuilderTwoParameterTests) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderTwoParameterTests) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); ExpectedSnippet<2> snippets[] = { @@ -284,9 +473,7 @@ TEST(BytecodeGraphBuilderTwoParameterTests) { factory->NewHeapNumber(4)}}, // Strings {"return p1 + p2;", - {factory->NewStringFromStaticChars("abcdef"), - factory->NewStringFromStaticChars("abc"), - factory->NewStringFromStaticChars("def")}}}; + {MakeString("abcdef"), MakeString("abc"), MakeString("def")}}}; for (size_t i = 0; i < arraysize(snippets); i++) { base::ScopedVector<char> script(1024); @@ -302,33 +489,23 @@ TEST(BytecodeGraphBuilderTwoParameterTests) { } } - -TEST(BytecodeGraphBuilderNamedLoad) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderNamedLoad) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); ExpectedSnippet<1> snippets[] = { {"return p1.val;", - {factory->NewNumberFromInt(10), - BytecodeGraphTester::NewObject("({val : 10})")}}, - {"return p1[\"name\"];", - {factory->NewStringFromStaticChars("abc"), - BytecodeGraphTester::NewObject("({name : 'abc'})")}}, + {factory->NewNumberFromInt(10), RunJS("({val : 10})")}}, + {"return p1[\"name\"];", {MakeString("abc"), RunJS("({name : 'abc'})")}}, {"'use strict'; return p1.val;", - {factory->NewNumberFromInt(10), - BytecodeGraphTester::NewObject("({val : 10 })")}}, + {factory->NewNumberFromInt(10), RunJS("({val : 10 })")}}, {"'use strict'; return p1[\"val\"];", - {factory->NewNumberFromInt(10), - BytecodeGraphTester::NewObject("({val : 10, name : 'abc'})")}}, + {factory->NewNumberFromInt(10), RunJS("({val : 10, name : 'abc'})")}}, {"var b;\n" REPEAT_127(SPACE, " b = p1.name; ") " return p1.name;\n", - {factory->NewStringFromStaticChars("abc"), - BytecodeGraphTester::NewObject("({name : 'abc'})")}}, - {"'use strict'; var b;\n" - REPEAT_127(SPACE, " b = p1.name; ") - "return p1.name;\n", - {factory->NewStringFromStaticChars("abc"), - BytecodeGraphTester::NewObject("({ name : 'abc'})")}}, + {MakeString("abc"), RunJS("({name : 'abc'})")}}, + {"'use strict'; var b;\n" REPEAT_127( + SPACE, " b = p1.name; ") "return p1.name;\n", + {MakeString("abc"), RunJS("({ name : 'abc'})")}}, }; for (size_t i = 0; i < arraysize(snippets); i++) { @@ -344,44 +521,35 @@ TEST(BytecodeGraphBuilderNamedLoad) { } } -TEST(BytecodeGraphBuilderKeyedLoad) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderKeyedLoad) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); ExpectedSnippet<2> snippets[] = { {"return p1[p2];", - {factory->NewNumberFromInt(10), - BytecodeGraphTester::NewObject("({val : 10})"), - factory->NewStringFromStaticChars("val")}}, + {factory->NewNumberFromInt(10), RunJS("({val : 10})"), + MakeString("val")}}, {"return p1[100];", - {factory->NewStringFromStaticChars("abc"), - BytecodeGraphTester::NewObject("({100 : 'abc'})"), + {MakeString("abc"), RunJS("({100 : 'abc'})"), factory->NewNumberFromInt(0)}}, {"var b = 100; return p1[b];", - {factory->NewStringFromStaticChars("abc"), - BytecodeGraphTester::NewObject("({100 : 'abc'})"), + {MakeString("abc"), RunJS("({100 : 'abc'})"), factory->NewNumberFromInt(0)}}, {"'use strict'; return p1[p2];", - {factory->NewNumberFromInt(10), - BytecodeGraphTester::NewObject("({val : 10 })"), - factory->NewStringFromStaticChars("val")}}, + {factory->NewNumberFromInt(10), RunJS("({val : 10 })"), + MakeString("val")}}, {"'use strict'; return p1[100];", - {factory->NewNumberFromInt(10), - BytecodeGraphTester::NewObject("({100 : 10})"), + {factory->NewNumberFromInt(10), RunJS("({100 : 10})"), factory->NewNumberFromInt(0)}}, {"'use strict'; var b = p2; return p1[b];", - {factory->NewStringFromStaticChars("abc"), - BytecodeGraphTester::NewObject("({100 : 'abc'})"), + {MakeString("abc"), RunJS("({100 : 'abc'})"), factory->NewNumberFromInt(100)}}, {"var b;\n" REPEAT_127(SPACE, " b = p1[p2]; ") " return p1[p2];\n", - {factory->NewStringFromStaticChars("abc"), - BytecodeGraphTester::NewObject("({100 : 'abc'})"), + {MakeString("abc"), RunJS("({100 : 'abc'})"), factory->NewNumberFromInt(100)}}, {"'use strict'; var b;\n" REPEAT_127(SPACE, " b = p1[p2]; ") "return p1[p2];\n", - {factory->NewStringFromStaticChars("abc"), - BytecodeGraphTester::NewObject("({ 100 : 'abc'})"), + {MakeString("abc"), RunJS("({ 100 : 'abc'})"), factory->NewNumberFromInt(100)}}, }; @@ -399,131 +567,23 @@ TEST(BytecodeGraphBuilderKeyedLoad) { } } -void TestBytecodeGraphBuilderNamedStore(size_t shard) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); - Factory* factory = isolate->factory(); - - ExpectedSnippet<1> snippets[] = { - {"return p1.val = 20;", - {factory->NewNumberFromInt(20), - BytecodeGraphTester::NewObject("({val : 10})")}}, - {"p1.type = 'int'; return p1.type;", - {factory->NewStringFromStaticChars("int"), - BytecodeGraphTester::NewObject("({val : 10})")}}, - {"p1.name = 'def'; return p1[\"name\"];", - {factory->NewStringFromStaticChars("def"), - BytecodeGraphTester::NewObject("({name : 'abc'})")}}, - {"'use strict'; p1.val = 20; return p1.val;", - {factory->NewNumberFromInt(20), - BytecodeGraphTester::NewObject("({val : 10 })")}}, - {"'use strict'; return p1.type = 'int';", - {factory->NewStringFromStaticChars("int"), - BytecodeGraphTester::NewObject("({val : 10})")}}, - {"'use strict'; p1.val = 20; return p1[\"val\"];", - {factory->NewNumberFromInt(20), - BytecodeGraphTester::NewObject("({val : 10, name : 'abc'})")}}, - {"var b = 'abc';\n" REPEAT_127( - SPACE, " p1.name = b; ") " p1.name = 'def'; return p1.name;\n", - {factory->NewStringFromStaticChars("def"), - BytecodeGraphTester::NewObject("({name : 'abc'})")}}, - {"'use strict'; var b = 'def';\n" REPEAT_127( - SPACE, " p1.name = 'abc'; ") "p1.name = b; return p1.name;\n", - {factory->NewStringFromStaticChars("def"), - BytecodeGraphTester::NewObject("({ name : 'abc'})")}}, - }; - - for (size_t i = 0; i < arraysize(snippets); i++) { - if ((i % 2) != shard) continue; - base::ScopedVector<char> script(3072); - SNPrintF(script, "function %s(p1) { %s };\n%s({});", kFunctionName, - snippets[i].code_snippet, kFunctionName); - - BytecodeGraphTester tester(isolate, script.begin()); - auto callable = tester.GetCallable<Handle<Object>>(); - Handle<Object> return_value = - callable(snippets[i].parameter(0)).ToHandleChecked(); - CHECK(return_value->SameValue(*snippets[i].return_value())); - } -} - SHARD_TEST_BY_2(BytecodeGraphBuilderNamedStore) -void TestBytecodeGraphBuilderKeyedStore(size_t shard) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); - Factory* factory = isolate->factory(); - - ExpectedSnippet<2> snippets[] = { - {"p1[p2] = 20; return p1[p2];", - {factory->NewNumberFromInt(20), - BytecodeGraphTester::NewObject("({val : 10})"), - factory->NewStringFromStaticChars("val")}}, - {"return p1[100] = 'def';", - {factory->NewStringFromStaticChars("def"), - BytecodeGraphTester::NewObject("({100 : 'abc'})"), - factory->NewNumberFromInt(0)}}, - {"var b = 100; p1[b] = 'def'; return p1[b];", - {factory->NewStringFromStaticChars("def"), - BytecodeGraphTester::NewObject("({100 : 'abc'})"), - factory->NewNumberFromInt(0)}}, - {"'use strict'; p1[p2] = 20; return p1[p2];", - {factory->NewNumberFromInt(20), - BytecodeGraphTester::NewObject("({val : 10 })"), - factory->NewStringFromStaticChars("val")}}, - {"'use strict'; return p1[100] = 20;", - {factory->NewNumberFromInt(20), - BytecodeGraphTester::NewObject("({100 : 10})"), - factory->NewNumberFromInt(0)}}, - {"'use strict'; var b = p2; p1[b] = 'def'; return p1[b];", - {factory->NewStringFromStaticChars("def"), - BytecodeGraphTester::NewObject("({100 : 'abc'})"), - factory->NewNumberFromInt(100)}}, - {"var b;\n" REPEAT_127( - SPACE, " b = p1[p2]; ") " p1[p2] = 'def'; return p1[p2];\n", - {factory->NewStringFromStaticChars("def"), - BytecodeGraphTester::NewObject("({100 : 'abc'})"), - factory->NewNumberFromInt(100)}}, - {"'use strict'; var b;\n" REPEAT_127( - SPACE, " b = p1[p2]; ") " p1[p2] = 'def'; return p1[p2];\n", - {factory->NewStringFromStaticChars("def"), - BytecodeGraphTester::NewObject("({ 100 : 'abc'})"), - factory->NewNumberFromInt(100)}}, - }; - - for (size_t i = 0; i < arraysize(snippets); i++) { - if ((i % 2) != shard) continue; - base::ScopedVector<char> script(2048); - SNPrintF(script, "function %s(p1, p2) { %s };\n%s({});", kFunctionName, - snippets[i].code_snippet, kFunctionName); - - BytecodeGraphTester tester(isolate, script.begin()); - auto callable = tester.GetCallable<Handle<Object>>(); - Handle<Object> return_value = - callable(snippets[i].parameter(0)).ToHandleChecked(); - CHECK(return_value->SameValue(*snippets[i].return_value())); - } -} - SHARD_TEST_BY_2(BytecodeGraphBuilderKeyedStore) -TEST(BytecodeGraphBuilderPropertyCall) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderPropertyCall) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); ExpectedSnippet<1> snippets[] = { {"return p1.func();", - {factory->NewNumberFromInt(25), - BytecodeGraphTester::NewObject("({func() { return 25; }})")}}, + {factory->NewNumberFromInt(25), RunJS("({func() { return 25; }})")}}, {"return p1.func('abc');", - {factory->NewStringFromStaticChars("abc"), - BytecodeGraphTester::NewObject("({func(a) { return a; }})")}}, + {MakeString("abc"), RunJS("({func(a) { return a; }})")}}, {"return p1.func(1, 2, 3, 4, 5, 6, 7, 8);", {factory->NewNumberFromInt(36), - BytecodeGraphTester::NewObject( - "({func(a, b, c, d, e, f, g, h) {\n" - " return a + b + c + d + e + f + g + h;}})")}}, + RunJS("({func(a, b, c, d, e, f, g, h) {\n" + " return a + b + c + d + e + f + g + h;}})")}}, }; for (size_t i = 0; i < arraysize(snippets); i++) { @@ -539,9 +599,8 @@ TEST(BytecodeGraphBuilderPropertyCall) { } } -TEST(BytecodeGraphBuilderCallNew) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderCallNew) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); ExpectedSnippet<0> snippets[] = { @@ -575,9 +634,8 @@ TEST(BytecodeGraphBuilderCallNew) { } } -TEST(BytecodeGraphBuilderCreateClosure) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderCreateClosure) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); ExpectedSnippet<0> snippets[] = { @@ -611,16 +669,15 @@ TEST(BytecodeGraphBuilderCreateClosure) { } } -TEST(BytecodeGraphBuilderCallRuntime) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderCallRuntime) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); ExpectedSnippet<1> snippets[] = { {"function f(arg0) { return %MaxSmi(); }\nf()", {factory->NewNumberFromInt(Smi::kMaxValue), factory->undefined_value()}}, {"function f(arg0) { return %IsArray(arg0) }\nf(undefined)", - {factory->true_value(), BytecodeGraphTester::NewObject("[1, 2, 3]")}}, + {factory->true_value(), RunJS("[1, 2, 3]")}}, {"function f(arg0) { return %Add(arg0, 2) }\nf(1)", {factory->NewNumberFromInt(5), factory->NewNumberFromInt(3)}}, }; @@ -634,63 +691,14 @@ TEST(BytecodeGraphBuilderCallRuntime) { } } -void TestBytecodeGraphBuilderGlobals(size_t shard) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); - Factory* factory = isolate->factory(); - - ExpectedSnippet<0> snippets[] = { - {"var global = 321;\n function f() { return global; };\n f();", - {factory->NewNumberFromInt(321)}}, - {"var global = 321;\n" - "function f() { global = 123; return global };\n f();", - {factory->NewNumberFromInt(123)}}, - {"var global = function() { return 'abc'};\n" - "function f() { return global(); };\n f();", - {factory->NewStringFromStaticChars("abc")}}, - {"var global = 456;\n" - "function f() { 'use strict'; return global; };\n f();", - {factory->NewNumberFromInt(456)}}, - {"var global = 987;\n" - "function f() { 'use strict'; global = 789; return global };\n f();", - {factory->NewNumberFromInt(789)}}, - {"var global = function() { return 'xyz'};\n" - "function f() { 'use strict'; return global(); };\n f();", - {factory->NewStringFromStaticChars("xyz")}}, - {"var global = 'abc'; var global_obj = {val:123};\n" - "function f() {\n" REPEAT_127( - SPACE, " var b = global_obj.name;\n") "return global; };\n f();\n", - {factory->NewStringFromStaticChars("abc")}}, - {"var global = 'abc'; var global_obj = {val:123};\n" - "function f() { 'use strict';\n" REPEAT_127( - SPACE, " var b = global_obj.name;\n") "global = 'xyz'; return " - "global };\n f();\n", - {factory->NewStringFromStaticChars("xyz")}}, - {"function f() { return typeof(undeclared_var); }\n; f();\n", - {factory->NewStringFromStaticChars("undefined")}}, - {"var defined_var = 10; function f() { return typeof(defined_var); }\n; " - "f();\n", - {factory->NewStringFromStaticChars("number")}}, - }; - - for (size_t i = 0; i < arraysize(snippets); i++) { - if ((i % 2) != shard) continue; - BytecodeGraphTester tester(isolate, snippets[i].code_snippet); - auto callable = tester.GetCallable<>(); - Handle<Object> return_value = callable().ToHandleChecked(); - CHECK(return_value->SameValue(*snippets[i].return_value())); - } -} - SHARD_TEST_BY_2(BytecodeGraphBuilderGlobals) -TEST(BytecodeGraphBuilderToObject) { +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderToObject) { // TODO(mythria): tests for ToObject. Needs ForIn. } -TEST(BytecodeGraphBuilderToName) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderToName) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); ExpectedSnippet<0> snippets[] = { @@ -730,21 +738,17 @@ TEST(BytecodeGraphBuilderToName) { } } -TEST(BytecodeGraphBuilderLogicalNot) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderLogicalNot) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); ExpectedSnippet<1> snippets[] = { - {"return !p1;", - {factory->false_value(), - BytecodeGraphTester::NewObject("({val : 10})")}}, + {"return !p1;", {factory->false_value(), RunJS("({val : 10})")}}, {"return !p1;", {factory->true_value(), factory->NewNumberFromInt(0)}}, {"return !p1;", {factory->true_value(), factory->undefined_value()}}, {"return !p1;", {factory->false_value(), factory->NewNumberFromInt(10)}}, {"return !p1;", {factory->false_value(), factory->true_value()}}, - {"return !p1;", - {factory->false_value(), factory->NewStringFromStaticChars("abc")}}, + {"return !p1;", {factory->false_value(), MakeString("abc")}}, }; for (size_t i = 0; i < arraysize(snippets); i++) { @@ -760,26 +764,18 @@ TEST(BytecodeGraphBuilderLogicalNot) { } } -TEST(BytecodeGraphBuilderTypeOf) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderTypeOf) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); ExpectedSnippet<1> snippets[] = { + {"return typeof p1;", {MakeString("object"), RunJS("({val : 10})")}}, {"return typeof p1;", - {factory->NewStringFromStaticChars("object"), - BytecodeGraphTester::NewObject("({val : 10})")}}, - {"return typeof p1;", - {factory->NewStringFromStaticChars("undefined"), - factory->undefined_value()}}, - {"return typeof p1;", - {factory->NewStringFromStaticChars("number"), - factory->NewNumberFromInt(10)}}, - {"return typeof p1;", - {factory->NewStringFromStaticChars("boolean"), factory->true_value()}}, + {MakeString("undefined"), factory->undefined_value()}}, {"return typeof p1;", - {factory->NewStringFromStaticChars("string"), - factory->NewStringFromStaticChars("abc")}}, + {MakeString("number"), factory->NewNumberFromInt(10)}}, + {"return typeof p1;", {MakeString("boolean"), factory->true_value()}}, + {"return typeof p1;", {MakeString("string"), MakeString("abc")}}, }; for (size_t i = 0; i < arraysize(snippets); i++) { @@ -795,9 +791,8 @@ TEST(BytecodeGraphBuilderTypeOf) { } } -TEST(BytecodeGraphBuilderCompareTypeOf) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderCompareTypeOf) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); ExpectedSnippet<1> snippets[] = { @@ -806,7 +801,7 @@ TEST(BytecodeGraphBuilderCompareTypeOf) { {"return typeof p1 === 'string';", {factory->false_value(), factory->NewNumber(1.1)}}, {"return typeof p1 === 'string';", - {factory->true_value(), factory->NewStringFromStaticChars("string")}}, + {factory->true_value(), MakeString("string")}}, {"return typeof p1 === 'string';", {factory->false_value(), factory->undefined_value()}}, {"return typeof p1 === 'undefined';", @@ -814,14 +809,13 @@ TEST(BytecodeGraphBuilderCompareTypeOf) { {"return typeof p1 === 'object';", {factory->true_value(), factory->null_value()}}, {"return typeof p1 === 'object';", - {factory->true_value(), BytecodeGraphTester::NewObject("({val : 10})")}}, + {factory->true_value(), RunJS("({val : 10})")}}, {"return typeof p1 === 'function';", - {factory->false_value(), - BytecodeGraphTester::NewObject("({val : 10})")}}, + {factory->false_value(), RunJS("({val : 10})")}}, {"return typeof p1 === 'symbol';", {factory->true_value(), factory->NewSymbol()}}, {"return typeof p1 === 'symbol';", - {factory->false_value(), factory->NewStringFromStaticChars("string")}}, + {factory->false_value(), MakeString("string")}}, {"return typeof p1 === 'other';", {factory->false_value(), factory->NewNumber(1.1)}}, }; @@ -839,9 +833,8 @@ TEST(BytecodeGraphBuilderCompareTypeOf) { } } -TEST(BytecodeGraphBuilderCountOperation) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderCountOperation) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); ExpectedSnippet<1> snippets[] = { @@ -862,20 +855,15 @@ TEST(BytecodeGraphBuilderCountOperation) { {"return 20 + --p1;", {factory->NewNumberFromInt(29), factory->NewNumberFromInt(10)}}, {"return p1.val--;", - {factory->NewNumberFromInt(10), - BytecodeGraphTester::NewObject("({val : 10})")}}, + {factory->NewNumberFromInt(10), RunJS("({val : 10})")}}, {"return ++p1['val'];", - {factory->NewNumberFromInt(11), - BytecodeGraphTester::NewObject("({val : 10})")}}, - {"return ++p1[1];", - {factory->NewNumberFromInt(11), - BytecodeGraphTester::NewObject("({1 : 10})")}}, + {factory->NewNumberFromInt(11), RunJS("({val : 10})")}}, + {"return ++p1[1];", {factory->NewNumberFromInt(11), RunJS("({1 : 10})")}}, {" function inner() { return p1 } return --p1;", {factory->NewNumberFromInt(9), factory->NewNumberFromInt(10)}}, {" function inner() { return p1 } return p1--;", {factory->NewNumberFromInt(10), factory->NewNumberFromInt(10)}}, - {"return ++p1;", - {factory->nan_value(), factory->NewStringFromStaticChars("String")}}, + {"return ++p1;", {factory->nan_value(), MakeString("String")}}, }; for (size_t i = 0; i < arraysize(snippets); i++) { @@ -891,28 +879,22 @@ TEST(BytecodeGraphBuilderCountOperation) { } } -TEST(BytecodeGraphBuilderDelete) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderDelete) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); ExpectedSnippet<1> snippets[] = { - {"return delete p1.val;", - {factory->true_value(), BytecodeGraphTester::NewObject("({val : 10})")}}, + {"return delete p1.val;", {factory->true_value(), RunJS("({val : 10})")}}, {"delete p1.val; return p1.val;", - {factory->undefined_value(), - BytecodeGraphTester::NewObject("({val : 10})")}}, + {factory->undefined_value(), RunJS("({val : 10})")}}, {"delete p1.name; return p1.val;", - {factory->NewNumberFromInt(10), - BytecodeGraphTester::NewObject("({val : 10, name:'abc'})")}}, + {factory->NewNumberFromInt(10), RunJS("({val : 10, name:'abc'})")}}, {"'use strict'; return delete p1.val;", - {factory->true_value(), BytecodeGraphTester::NewObject("({val : 10})")}}, + {factory->true_value(), RunJS("({val : 10})")}}, {"'use strict'; delete p1.val; return p1.val;", - {factory->undefined_value(), - BytecodeGraphTester::NewObject("({val : 10})")}}, + {factory->undefined_value(), RunJS("({val : 10})")}}, {"'use strict'; delete p1.name; return p1.val;", - {factory->NewNumberFromInt(10), - BytecodeGraphTester::NewObject("({val : 10, name:'abc'})")}}, + {factory->NewNumberFromInt(10), RunJS("({val : 10, name:'abc'})")}}, }; for (size_t i = 0; i < arraysize(snippets); i++) { @@ -928,9 +910,8 @@ TEST(BytecodeGraphBuilderDelete) { } } -TEST(BytecodeGraphBuilderDeleteGlobal) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderDeleteGlobal) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); ExpectedSnippet<0> snippets[] = { @@ -978,23 +959,24 @@ TEST(BytecodeGraphBuilderDeleteGlobal) { } } -TEST(BytecodeGraphBuilderDeleteLookupSlot) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderDeleteLookupSlot) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); // TODO(mythria): Add more tests when we have support for LdaLookupSlot. - const char* function_prologue = "var f;" - "var x = 1;" - "y = 10;" - "var obj = {val:10};" - "var z = 30;" - "function f1() {" - " var z = 20;" - " eval(\"function t() {"; - const char* function_epilogue = " }; f = t; t();\");" - "}" - "f1();"; + const char* function_prologue = + "var f;" + "var x = 1;" + "y = 10;" + "var obj = {val:10};" + "var z = 30;" + "function f1() {" + " var z = 20;" + " eval(\"function t() {"; + const char* function_epilogue = + " }; f = t; t();\");" + "}" + "f1();"; ExpectedSnippet<0> snippets[] = { {"return delete y;", {factory->true_value()}}, @@ -1013,29 +995,29 @@ TEST(BytecodeGraphBuilderDeleteLookupSlot) { } } -TEST(BytecodeGraphBuilderLookupSlot) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderLookupSlot) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); - const char* function_prologue = "var f;" - "var x = 12;" - "y = 10;" - "var obj = {val:3.1414};" - "var z = 30;" - "function f1() {" - " var z = 20;" - " eval(\"function t() {"; - const char* function_epilogue = " }; f = t; t();\");" - "}" - "f1();"; + const char* function_prologue = + "var f;" + "var x = 12;" + "y = 10;" + "var obj = {val:3.1414};" + "var z = 30;" + "function f1() {" + " var z = 20;" + " eval(\"function t() {"; + const char* function_epilogue = + " }; f = t; t();\");" + "}" + "f1();"; ExpectedSnippet<0> snippets[] = { {"return x;", {factory->NewNumber(12)}}, {"return obj.val;", {factory->NewNumber(3.1414)}}, - {"return typeof x;", {factory->NewStringFromStaticChars("number")}}, - {"return typeof dummy;", - {factory->NewStringFromStaticChars("undefined")}}, + {"return typeof x;", {MakeString("number")}}, + {"return typeof dummy;", {MakeString("undefined")}}, {"x = 23; return x;", {factory->NewNumber(23)}}, {"'use strict'; obj.val = 23.456; return obj.val;", {factory->NewNumber(23.456)}}}; @@ -1052,9 +1034,8 @@ TEST(BytecodeGraphBuilderLookupSlot) { } } -TEST(BytecodeGraphBuilderLookupContextSlot) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderLookupContextSlot) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); // Testing with eval called in the current context. @@ -1101,9 +1082,8 @@ TEST(BytecodeGraphBuilderLookupContextSlot) { } } -TEST(BytecodeGraphBuilderLookupGlobalSlot) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderLookupGlobalSlot) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); // Testing with eval called in the current context. @@ -1150,9 +1130,8 @@ TEST(BytecodeGraphBuilderLookupGlobalSlot) { } } -TEST(BytecodeGraphBuilderLookupSlotWide) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderLookupSlotWide) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); const char* function_prologue = @@ -1173,7 +1152,7 @@ TEST(BytecodeGraphBuilderLookupSlotWide) { {"var y = 2.3;" REPEAT_256(SPACE, "y = 2.3;") "return x;", {factory->NewNumber(12)}}, {"var y = 2.3;" REPEAT_256(SPACE, "y = 2.3;") "return typeof x;", - {factory->NewStringFromStaticChars("number")}}, + {MakeString("number")}}, {"var y = 2.3;" REPEAT_256(SPACE, "y = 2.3;") "return x = 23;", {factory->NewNumber(23)}}, {"'use strict';" REPEAT_256(SPACE, "y = 2.3;") "return obj.val = 23.456;", @@ -1191,9 +1170,8 @@ TEST(BytecodeGraphBuilderLookupSlotWide) { } } -TEST(BytecodeGraphBuilderCallLookupSlot) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderCallLookupSlot) { + Isolate* isolate = i_isolate(); ExpectedSnippet<0> snippets[] = { {"g = function(){ return 2 }; eval(''); return g();", @@ -1218,10 +1196,8 @@ TEST(BytecodeGraphBuilderCallLookupSlot) { } } -TEST(BytecodeGraphBuilderEval) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); - Factory* factory = isolate->factory(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderEval) { + Isolate* isolate = i_isolate(); ExpectedSnippet<0> snippets[] = { {"return eval('1;');", {handle(Smi::FromInt(1), isolate)}}, @@ -1243,18 +1219,16 @@ TEST(BytecodeGraphBuilderEval) { {handle(Smi::FromInt(1), isolate)}}, {"'use strict'; var x = 1; eval('var x = 2;'); return x;", {handle(Smi::FromInt(1), isolate)}}, - {"var x = 10; eval('x + 20;'); return typeof x;", - {factory->NewStringFromStaticChars("number")}}, + {"var x = 10; eval('x + 20;'); return typeof x;", {MakeString("number")}}, {"eval('var y = 10;'); return typeof unallocated;", - {factory->NewStringFromStaticChars("undefined")}}, + {MakeString("undefined")}}, {"'use strict'; eval('var y = 10;'); return typeof unallocated;", - {factory->NewStringFromStaticChars("undefined")}}, - {"eval('var x = 10;'); return typeof x;", - {factory->NewStringFromStaticChars("number")}}, + {MakeString("undefined")}}, + {"eval('var x = 10;'); return typeof x;", {MakeString("number")}}, {"var x = {}; eval('var x = 10;'); return typeof x;", - {factory->NewStringFromStaticChars("number")}}, + {MakeString("number")}}, {"'use strict'; var x = {}; eval('var x = 10;'); return typeof x;", - {factory->NewStringFromStaticChars("object")}}, + {MakeString("object")}}, }; for (size_t i = 0; i < arraysize(snippets); i++) { @@ -1268,9 +1242,8 @@ TEST(BytecodeGraphBuilderEval) { } } -TEST(BytecodeGraphBuilderEvalParams) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderEvalParams) { + Isolate* isolate = i_isolate(); ExpectedSnippet<1> snippets[] = { {"var x = 10; return eval('x + p1;');", @@ -1295,10 +1268,8 @@ TEST(BytecodeGraphBuilderEvalParams) { } } -TEST(BytecodeGraphBuilderEvalGlobal) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); - Factory* factory = isolate->factory(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderEvalGlobal) { + Isolate* isolate = i_isolate(); ExpectedSnippet<0> snippets[] = { {"function add_global() { eval('function f() { z = 33; }; f()'); };" @@ -1309,7 +1280,7 @@ TEST(BytecodeGraphBuilderEvalGlobal) { " try { f() } catch(e) {}');\n" "}\n" "function f() { add_global(); return typeof y; } f();", - {factory->NewStringFromStaticChars("undefined")}}, + {MakeString("undefined")}}, }; for (size_t i = 0; i < arraysize(snippets); i++) { @@ -1368,21 +1339,18 @@ const char* get_code_snippet(Token::Value opcode) { } } -TEST(BytecodeGraphBuilderCompare) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderCompare) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); Handle<Object> lhs_values[] = { factory->NewNumberFromInt(10), factory->NewHeapNumber(3.45), - factory->NewStringFromStaticChars("abc"), - factory->NewNumberFromInt(SMI_MAX), factory->NewNumberFromInt(SMI_MIN)}; - Handle<Object> rhs_values[] = {factory->NewNumberFromInt(10), - factory->NewStringFromStaticChars("10"), - factory->NewNumberFromInt(20), - factory->NewStringFromStaticChars("abc"), - factory->NewHeapNumber(3.45), - factory->NewNumberFromInt(SMI_MAX), - factory->NewNumberFromInt(SMI_MIN)}; + MakeString("abc"), factory->NewNumberFromInt(SMI_MAX), + factory->NewNumberFromInt(SMI_MIN)}; + Handle<Object> rhs_values[] = { + factory->NewNumberFromInt(10), MakeString("10"), + factory->NewNumberFromInt(20), MakeString("abc"), + factory->NewHeapNumber(3.45), factory->NewNumberFromInt(SMI_MAX), + factory->NewNumberFromInt(SMI_MIN)}; for (size_t i = 0; i < arraysize(kCompareOperators); i++) { base::ScopedVector<char> script(1024); @@ -1403,36 +1371,29 @@ TEST(BytecodeGraphBuilderCompare) { } } -TEST(BytecodeGraphBuilderTestIn) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderTestIn) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); ExpectedSnippet<2> snippets[] = { {"return p2 in p1;", - {factory->true_value(), BytecodeGraphTester::NewObject("({val : 10})"), - factory->NewStringFromStaticChars("val")}}, + {factory->true_value(), RunJS("({val : 10})"), MakeString("val")}}, {"return p2 in p1;", - {factory->true_value(), BytecodeGraphTester::NewObject("[]"), - factory->NewStringFromStaticChars("length")}}, + {factory->true_value(), RunJS("[]"), MakeString("length")}}, {"return p2 in p1;", - {factory->true_value(), BytecodeGraphTester::NewObject("[]"), - factory->NewStringFromStaticChars("toString")}}, + {factory->true_value(), RunJS("[]"), MakeString("toString")}}, {"return p2 in p1;", - {factory->true_value(), BytecodeGraphTester::NewObject("({val : 10})"), - factory->NewStringFromStaticChars("toString")}}, + {factory->true_value(), RunJS("({val : 10})"), MakeString("toString")}}, {"return p2 in p1;", - {factory->false_value(), BytecodeGraphTester::NewObject("({val : 10})"), - factory->NewStringFromStaticChars("abc")}}, + {factory->false_value(), RunJS("({val : 10})"), MakeString("abc")}}, {"return p2 in p1;", - {factory->false_value(), BytecodeGraphTester::NewObject("({val : 10})"), + {factory->false_value(), RunJS("({val : 10})"), factory->NewNumberFromInt(10)}}, {"return p2 in p1;", - {factory->true_value(), BytecodeGraphTester::NewObject("({10 : 'val'})"), + {factory->true_value(), RunJS("({10 : 'val'})"), factory->NewNumberFromInt(10)}}, {"return p2 in p1;", - {factory->false_value(), - BytecodeGraphTester::NewObject("({10 : 'val'})"), + {factory->false_value(), RunJS("({10 : 'val'})"), factory->NewNumberFromInt(1)}}, }; @@ -1450,16 +1411,15 @@ TEST(BytecodeGraphBuilderTestIn) { } } -TEST(BytecodeGraphBuilderTestInstanceOf) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderTestInstanceOf) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); ExpectedSnippet<1> snippets[] = { {"return p1 instanceof Object;", - {factory->true_value(), BytecodeGraphTester::NewObject("({val : 10})")}}, + {factory->true_value(), RunJS("({val : 10})")}}, {"return p1 instanceof String;", - {factory->false_value(), factory->NewStringFromStaticChars("string")}}, + {factory->false_value(), MakeString("string")}}, {"var cons = function() {};" "var obj = new cons();" "return obj instanceof cons;", @@ -1479,9 +1439,8 @@ TEST(BytecodeGraphBuilderTestInstanceOf) { } } -TEST(BytecodeGraphBuilderTryCatch) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderTryCatch) { + Isolate* isolate = i_isolate(); ExpectedSnippet<0> snippets[] = { {"var a = 1; try { a = 2 } catch(e) { a = 3 }; return a;", @@ -1507,9 +1466,8 @@ TEST(BytecodeGraphBuilderTryCatch) { } } -TEST(BytecodeGraphBuilderTryFinally1) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderTryFinally1) { + Isolate* isolate = i_isolate(); ExpectedSnippet<0> snippets[] = { {"var a = 1; try { a = a + 1; } finally { a = a + 2; }; return a;", @@ -1544,9 +1502,8 @@ TEST(BytecodeGraphBuilderTryFinally1) { } } -TEST(BytecodeGraphBuilderTryFinally2) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderTryFinally2) { + Isolate* isolate = i_isolate(); ExpectedSnippet<0, const char*> snippets[] = { {"var a = 1; try { a = 2; throw 23; } finally { a = 3 }; return a;", @@ -1562,16 +1519,15 @@ TEST(BytecodeGraphBuilderTryFinally2) { BytecodeGraphTester tester(isolate, script.begin()); v8::Local<v8::String> message = tester.CheckThrowsReturnMessage()->Get(); - v8::Local<v8::String> expected_string = v8_str(snippets[i].return_value()); - CHECK( - message->Equals(CcTest::isolate()->GetCurrentContext(), expected_string) - .FromJust()); + v8::Local<v8::String> expected_string = + NewString(snippets[i].return_value()); + CHECK(message->Equals(v8_isolate()->GetCurrentContext(), expected_string) + .FromJust()); } } -TEST(BytecodeGraphBuilderThrow) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderThrow) { + Isolate* isolate = i_isolate(); // TODO(mythria): Add more tests when real try-catch and deoptimization // information are supported. @@ -1590,17 +1546,15 @@ TEST(BytecodeGraphBuilderThrow) { BytecodeGraphTester tester(isolate, script.begin()); v8::Local<v8::String> message = tester.CheckThrowsReturnMessage()->Get(); - v8::Local<v8::String> expected_string = v8_str(snippets[i].return_value()); - CHECK( - message->Equals(CcTest::isolate()->GetCurrentContext(), expected_string) - .FromJust()); + v8::Local<v8::String> expected_string = + NewString(snippets[i].return_value()); + CHECK(message->Equals(v8_isolate()->GetCurrentContext(), expected_string) + .FromJust()); } } -TEST(BytecodeGraphBuilderContext) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); - Factory* factory = isolate->factory(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderContext) { + Isolate* isolate = i_isolate(); ExpectedSnippet<0> snippets[] = { {"var x = 'outer';" @@ -1613,7 +1567,7 @@ TEST(BytecodeGraphBuilderContext) { "return(x);" "}" "f();", - {factory->NewStringFromStaticChars("outer")}}, + {MakeString("outer")}}, {"var x = 'outer';" "function f() {" " 'use strict';" @@ -1624,7 +1578,7 @@ TEST(BytecodeGraphBuilderContext) { "return(innerFunc() + x);" "}" "f();", - {factory->NewStringFromStaticChars("inner outer")}}, + {MakeString("inner outer")}}, {"var x = 'outer';" "function f() {" " 'use strict';" @@ -1640,7 +1594,7 @@ TEST(BytecodeGraphBuilderContext) { " return(innerMostFunc() + x);" "}" "f();", - {factory->NewStringFromStaticChars("innermost inner_changed outer")}}, + {MakeString("innermost inner_changed outer")}}, }; for (size_t i = 0; i < arraysize(snippets); i++) { @@ -1654,9 +1608,8 @@ TEST(BytecodeGraphBuilderContext) { } } -TEST(BytecodeGraphBuilderLoadContext) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderLoadContext) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); ExpectedSnippet<1> snippets[] = { @@ -1718,9 +1671,9 @@ TEST(BytecodeGraphBuilderLoadContext) { } } -TEST(BytecodeGraphBuilderCreateArgumentsNoParameters) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, + BytecodeGraphBuilderCreateArgumentsNoParameters) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); ExpectedSnippet<0> snippets[] = { @@ -1747,9 +1700,8 @@ TEST(BytecodeGraphBuilderCreateArgumentsNoParameters) { } } -TEST(BytecodeGraphBuilderCreateArguments) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderCreateArguments) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); ExpectedSnippet<3> snippets[] = { @@ -1792,9 +1744,8 @@ TEST(BytecodeGraphBuilderCreateArguments) { } } -TEST(BytecodeGraphBuilderCreateRestArguments) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderCreateRestArguments) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); ExpectedSnippet<3> snippets[] = { @@ -1834,26 +1785,20 @@ TEST(BytecodeGraphBuilderCreateRestArguments) { } } -TEST(BytecodeGraphBuilderRegExpLiterals) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderRegExpLiterals) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); ExpectedSnippet<0> snippets[] = { {"return /abd/.exec('cccabbdd');", {factory->null_value()}}, - {"return /ab+d/.exec('cccabbdd')[0];", - {factory->NewStringFromStaticChars("abbd")}}, - {"var a = 3.1414;" - REPEAT_256(SPACE, "a = 3.1414;") - "return /ab+d/.exec('cccabbdd')[0];", - {factory->NewStringFromStaticChars("abbd")}}, + {"return /ab+d/.exec('cccabbdd')[0];", {MakeString("abbd")}}, + {"var a = 3.1414;" REPEAT_256( + SPACE, "a = 3.1414;") "return /ab+d/.exec('cccabbdd')[0];", + {MakeString("abbd")}}, {"return /ab+d/.exec('cccabbdd')[1];", {factory->undefined_value()}}, - {"return /AbC/i.exec('ssaBC')[0];", - {factory->NewStringFromStaticChars("aBC")}}, - {"return 'ssaBC'.match(/AbC/i)[0];", - {factory->NewStringFromStaticChars("aBC")}}, - {"return 'ssaBCtAbC'.match(/(AbC)/gi)[1];", - {factory->NewStringFromStaticChars("AbC")}}, + {"return /AbC/i.exec('ssaBC')[0];", {MakeString("aBC")}}, + {"return 'ssaBC'.match(/AbC/i)[0];", {MakeString("aBC")}}, + {"return 'ssaBCtAbC'.match(/(AbC)/gi)[1];", {MakeString("AbC")}}, }; for (size_t i = 0; i < arraysize(snippets); i++) { @@ -1868,9 +1813,8 @@ TEST(BytecodeGraphBuilderRegExpLiterals) { } } -TEST(BytecodeGraphBuilderArrayLiterals) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderArrayLiterals) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); ExpectedSnippet<0> snippets[] = { @@ -1878,21 +1822,19 @@ TEST(BytecodeGraphBuilderArrayLiterals) { {"return [1, 3, 2][1];", {factory->NewNumberFromInt(3)}}, {"var a;" REPEAT_256(SPACE, "a = 9.87;") "return [1, 3, 2][1];", {factory->NewNumberFromInt(3)}}, - {"return ['a', 'b', 'c'][2];", {factory->NewStringFromStaticChars("c")}}, + {"return ['a', 'b', 'c'][2];", {MakeString("c")}}, {"var a = 100; return [a, a++, a + 2, a + 3][2];", {factory->NewNumberFromInt(103)}}, {"var a = 100; return [a, ++a, a + 2, a + 3][1];", {factory->NewNumberFromInt(101)}}, - {"var a = 9.2;" - REPEAT_256(SPACE, "a = 9.34;") - "return [a, ++a, a + 2, a + 3][2];", + {"var a = 9.2;" REPEAT_256( + SPACE, "a = 9.34;") "return [a, ++a, a + 2, a + 3][2];", {factory->NewHeapNumber(12.34)}}, - {"return [[1, 2, 3], ['a', 'b', 'c']][1][0];", - {factory->NewStringFromStaticChars("a")}}, + {"return [[1, 2, 3], ['a', 'b', 'c']][1][0];", {MakeString("a")}}, {"var t = 't'; return [[t, t + 'est'], [1 + t]][0][1];", - {factory->NewStringFromStaticChars("test")}}, + {MakeString("test")}}, {"var t = 't'; return [[t, t + 'est'], [1 + t]][1][0];", - {factory->NewStringFromStaticChars("1t")}}}; + {MakeString("1t")}}}; for (size_t i = 0; i < arraysize(snippets); i++) { base::ScopedVector<char> script(4096); @@ -1906,52 +1848,47 @@ TEST(BytecodeGraphBuilderArrayLiterals) { } } -TEST(BytecodeGraphBuilderObjectLiterals) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderObjectLiterals) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); ExpectedSnippet<0> snippets[] = { {"return { }.name;", {factory->undefined_value()}}, - {"return { name: 'string', val: 9.2 }.name;", - {factory->NewStringFromStaticChars("string")}}, - {"var a;\n" - REPEAT_256(SPACE, "a = 1.23;\n") - "return { name: 'string', val: 9.2 }.name;", - {factory->NewStringFromStaticChars("string")}}, - {"return { name: 'string', val: 9.2 }['name'];", - {factory->NewStringFromStaticChars("string")}}, + {"return { name: 'string', val: 9.2 }.name;", {MakeString("string")}}, + {"var a;\n" REPEAT_256( + SPACE, "a = 1.23;\n") "return { name: 'string', val: 9.2 }.name;", + {MakeString("string")}}, + {"return { name: 'string', val: 9.2 }['name'];", {MakeString("string")}}, {"var a = 15; return { name: 'string', val: a }.val;", {factory->NewNumberFromInt(15)}}, - {"var a;" - REPEAT_256(SPACE, "a = 1.23;") - "return { name: 'string', val: a }.val;", + {"var a;" REPEAT_256( + SPACE, "a = 1.23;") "return { name: 'string', val: a }.val;", {factory->NewHeapNumber(1.23)}}, {"var a = 15; var b = 'val'; return { name: 'string', val: a }[b];", {factory->NewNumberFromInt(15)}}, {"var a = 5; return { val: a, val: a + 1 }.val;", {factory->NewNumberFromInt(6)}}, {"return { func: function() { return 'test' } }.func();", - {factory->NewStringFromStaticChars("test")}}, + {MakeString("test")}}, {"return { func(a) { return a + 'st'; } }.func('te');", - {factory->NewStringFromStaticChars("test")}}, + {MakeString("test")}}, {"return { get a() { return 22; } }.a;", {factory->NewNumberFromInt(22)}}, {"var a = { get b() { return this.x + 't'; },\n" " set b(val) { this.x = val + 's' } };\n" "a.b = 'te';\n" "return a.b;", - {factory->NewStringFromStaticChars("test")}}, + {MakeString("test")}}, {"var a = 123; return { 1: a }[1];", {factory->NewNumberFromInt(123)}}, {"return Object.getPrototypeOf({ __proto__: null });", {factory->null_value()}}, {"var a = 'test'; return { [a]: 1 }.test;", {factory->NewNumberFromInt(1)}}, {"var a = 'test'; return { b: a, [a]: a + 'ing' }['test']", - {factory->NewStringFromStaticChars("testing")}}, + {MakeString("testing")}}, {"var a = 'proto_str';\n" "var b = { [a]: 1, __proto__: { var : a } };\n" "return Object.getPrototypeOf(b).var", - {factory->NewStringFromStaticChars("proto_str")}}, + {MakeString("proto_str")}}, {"var n = 'name';\n" "return { [n]: 'val', get a() { return 987 } }['a'];", {factory->NewNumberFromInt(987)}}, @@ -1968,9 +1905,8 @@ TEST(BytecodeGraphBuilderObjectLiterals) { } } -TEST(BytecodeGraphBuilderIf) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderIf) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); ExpectedSnippet<1> snippets[] = { @@ -2076,9 +2012,8 @@ TEST(BytecodeGraphBuilderIf) { } } -TEST(BytecodeGraphBuilderConditionalOperator) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderConditionalOperator) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); ExpectedSnippet<1> snippets[] = { @@ -2105,9 +2040,8 @@ TEST(BytecodeGraphBuilderConditionalOperator) { } } -TEST(BytecodeGraphBuilderSwitch) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderSwitch) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); const char* switch_code = @@ -2151,9 +2085,8 @@ TEST(BytecodeGraphBuilderSwitch) { } } -TEST(BytecodeGraphBuilderSwitchMerge) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderSwitchMerge) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); const char* switch_code = @@ -2199,9 +2132,8 @@ TEST(BytecodeGraphBuilderSwitchMerge) { } } -TEST(BytecodeGraphBuilderNestedSwitch) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderNestedSwitch) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); const char* switch_code = @@ -2258,9 +2190,8 @@ TEST(BytecodeGraphBuilderNestedSwitch) { } } -TEST(BytecodeGraphBuilderBreakableBlocks) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderBreakableBlocks) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); ExpectedSnippet<0> snippets[] = { @@ -2297,9 +2228,8 @@ TEST(BytecodeGraphBuilderBreakableBlocks) { } } -TEST(BytecodeGraphBuilderWhile) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderWhile) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); ExpectedSnippet<0> snippets[] = { @@ -2344,9 +2274,8 @@ TEST(BytecodeGraphBuilderWhile) { } } -TEST(BytecodeGraphBuilderDo) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderDo) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); ExpectedSnippet<0> snippets[] = { @@ -2391,9 +2320,8 @@ TEST(BytecodeGraphBuilderDo) { } } -TEST(BytecodeGraphBuilderFor) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderFor) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); ExpectedSnippet<0> snippets[] = { @@ -2483,9 +2411,8 @@ TEST(BytecodeGraphBuilderFor) { } } -TEST(BytecodeGraphBuilderForIn) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderForIn) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); ExpectedSnippet<0> snippets[] = { {"var sum = 0;\n" @@ -2553,10 +2480,8 @@ TEST(BytecodeGraphBuilderForIn) { } } -TEST(BytecodeGraphBuilderForOf) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); - Factory* factory = isolate->factory(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderForOf) { + Isolate* isolate = i_isolate(); ExpectedSnippet<0> snippets[] = { {" var r = 0;\n" " for (var a of [0,6,7,9]) { r += a; }\n" @@ -2565,7 +2490,7 @@ TEST(BytecodeGraphBuilderForOf) { {" var r = '';\n" " for (var a of 'foobar') { r = a + r; }\n" " return r;\n", - {factory->NewStringFromStaticChars("raboof")}}, + {MakeString("raboof")}}, {" var a = [1, 2, 3];\n" " a.name = 4;\n" " var r = 0;\n" @@ -2575,15 +2500,15 @@ TEST(BytecodeGraphBuilderForOf) { {" var r = '';\n" " var data = [1, 2, 3]; \n" " for (a of data) { delete data[0]; r += a; } return r;", - {factory->NewStringFromStaticChars("123")}}, + {MakeString("123")}}, {" var r = '';\n" " var data = [1, 2, 3]; \n" " for (a of data) { delete data[2]; r += a; } return r;", - {factory->NewStringFromStaticChars("12undefined")}}, + {MakeString("12undefined")}}, {" var r = '';\n" " var data = [1, 2, 3]; \n" " for (a of data) { delete data; r += a; } return r;", - {factory->NewStringFromStaticChars("123")}}, + {MakeString("123")}}, {" var r = '';\n" " var input = 'foobar';\n" " for (var a of input) {\n" @@ -2591,7 +2516,7 @@ TEST(BytecodeGraphBuilderForOf) { " r += a;\n" " }\n" " return r;\n", - {factory->NewStringFromStaticChars("foo")}}, + {MakeString("foo")}}, {" var r = '';\n" " var input = 'foobar';\n" " for (var a of input) {\n" @@ -2599,22 +2524,22 @@ TEST(BytecodeGraphBuilderForOf) { " r += a;\n" " }\n" " return r;\n", - {factory->NewStringFromStaticChars("fooar")}}, + {MakeString("fooar")}}, {" var r = '';\n" " var data = [1, 2, 3, 4]; \n" " for (a of data) { data[2] = 567; r += a; }\n" " return r;\n", - {factory->NewStringFromStaticChars("125674")}}, + {MakeString("125674")}}, {" var r = '';\n" " var data = [1, 2, 3, 4]; \n" " for (a of data) { data[4] = 567; r += a; }\n" " return r;\n", - {factory->NewStringFromStaticChars("1234567")}}, + {MakeString("1234567")}}, {" var r = '';\n" " var data = [1, 2, 3, 4]; \n" " for (a of data) { data[5] = 567; r += a; }\n" " return r;\n", - {factory->NewStringFromStaticChars("1234undefined567")}}, + {MakeString("1234undefined567")}}, {" var r = '';\n" " var obj = new Object();\n" " obj[Symbol.iterator] = function() { return {\n" @@ -2629,7 +2554,7 @@ TEST(BytecodeGraphBuilderForOf) { " }}\n" " for (a of obj) { r += a }\n" " return r;\n", - {factory->NewStringFromStaticChars("dcba")}}, + {MakeString("dcba")}}, }; for (size_t i = 0; i < arraysize(snippets); i++) { @@ -2644,51 +2569,10 @@ TEST(BytecodeGraphBuilderForOf) { } } -void TestJumpWithConstantsAndWideConstants(size_t shard) { - const int kStep = 46; - int start = static_cast<int>(7 + 17 * shard); - for (int constants = start; constants < 300; constants += kStep) { - std::stringstream filler_os; - // Generate a string that consumes constant pool entries and - // spread out branch distances in script below. - for (int i = 0; i < constants; i++) { - filler_os << "var x_ = 'x_" << i << "';\n"; - } - std::string filler(filler_os.str()); - - std::stringstream script_os; - script_os << "function " << kFunctionName << "(a) {\n"; - script_os << " " << filler; - script_os << " for (var i = a; i < 2; i++) {\n"; - script_os << " " << filler; - script_os << " if (i == 0) { " << filler << "i = 10; continue; }\n"; - script_os << " else if (i == a) { " << filler << "i = 12; break; }\n"; - script_os << " else { " << filler << " }\n"; - script_os << " }\n"; - script_os << " return i;\n"; - script_os << "}\n"; - script_os << kFunctionName << "(0);\n"; - std::string script(script_os.str()); - - HandleAndZoneScope scope; - auto isolate = scope.main_isolate(); - auto factory = isolate->factory(); - BytecodeGraphTester tester(isolate, script.c_str()); - auto callable = tester.GetCallable<Handle<Object>>(); - for (int a = 0; a < 3; a++) { - Handle<Object> return_val = - callable(factory->NewNumberFromInt(a)).ToHandleChecked(); - static const int results[] = {11, 12, 2}; - CHECK_EQ(Handle<Smi>::cast(return_val)->value(), results[a]); - } - } -} - SHARD_TEST_BY_4(JumpWithConstantsAndWideConstants) -TEST(BytecodeGraphBuilderWithStatement) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderWithStatement) { + Isolate* isolate = i_isolate(); ExpectedSnippet<0> snippets[] = { {"with({x:42}) return x;", {handle(Smi::FromInt(42), isolate)}}, @@ -2722,9 +2606,8 @@ TEST(BytecodeGraphBuilderWithStatement) { } } -TEST(BytecodeGraphBuilderConstDeclaration) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderConstDeclaration) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); ExpectedSnippet<0> snippets[] = { @@ -2780,9 +2663,9 @@ TEST(BytecodeGraphBuilderConstDeclaration) { } } -TEST(BytecodeGraphBuilderConstDeclarationLookupSlots) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, + BytecodeGraphBuilderConstDeclarationLookupSlots) { + Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); ExpectedSnippet<0> snippets[] = { @@ -2821,9 +2704,9 @@ TEST(BytecodeGraphBuilderConstDeclarationLookupSlots) { } } -TEST(BytecodeGraphBuilderConstInLookupContextChain) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, + BytecodeGraphBuilderConstInLookupContextChain) { + Isolate* isolate = i_isolate(); const char* prologue = "function OuterMost() {\n" @@ -2868,9 +2751,9 @@ TEST(BytecodeGraphBuilderConstInLookupContextChain) { } } -TEST(BytecodeGraphBuilderIllegalConstDeclaration) { - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); +TEST_F(RunBytecodeGraphBuilderTest, + BytecodeGraphBuilderIllegalConstDeclaration) { + Isolate* isolate = i_isolate(); ExpectedSnippet<0, const char*> illegal_const_decl[] = { {"const x = x = 10 + 3; return x;", @@ -2896,10 +2779,9 @@ TEST(BytecodeGraphBuilderIllegalConstDeclaration) { BytecodeGraphTester tester(isolate, script.begin()); v8::Local<v8::String> message = tester.CheckThrowsReturnMessage()->Get(); v8::Local<v8::String> expected_string = - v8_str(illegal_const_decl[i].return_value()); - CHECK( - message->Equals(CcTest::isolate()->GetCurrentContext(), expected_string) - .FromJust()); + NewString(illegal_const_decl[i].return_value()); + CHECK(message->Equals(v8_isolate()->GetCurrentContext(), expected_string) + .FromJust()); } // Tests for strict mode. @@ -2911,10 +2793,9 @@ TEST(BytecodeGraphBuilderIllegalConstDeclaration) { BytecodeGraphTester tester(isolate, script.begin()); v8::Local<v8::String> message = tester.CheckThrowsReturnMessage()->Get(); v8::Local<v8::String> expected_string = - v8_str(illegal_const_decl[i].return_value()); - CHECK( - message->Equals(CcTest::isolate()->GetCurrentContext(), expected_string) - .FromJust()); + NewString(illegal_const_decl[i].return_value()); + CHECK(message->Equals(v8_isolate()->GetCurrentContext(), expected_string) + .FromJust()); } } @@ -2928,12 +2809,11 @@ class CountBreakDebugDelegate : public v8::debug::DebugDelegate { int debug_break_count = 0; }; -TEST(BytecodeGraphBuilderDebuggerStatement) { +TEST_F(RunBytecodeGraphBuilderTest, BytecodeGraphBuilderDebuggerStatement) { CountBreakDebugDelegate delegate; - HandleAndZoneScope scope; - Isolate* isolate = scope.main_isolate(); + Isolate* isolate = i_isolate(); - v8::debug::SetDebugDelegate(CcTest::isolate(), &delegate); + v8::debug::SetDebugDelegate(v8_isolate(), &delegate); ExpectedSnippet<0> snippet = { "function f() {" @@ -2946,7 +2826,7 @@ TEST(BytecodeGraphBuilderDebuggerStatement) { auto callable = tester.GetCallable<>(); Handle<Object> return_value = callable().ToHandleChecked(); - v8::debug::SetDebugDelegate(CcTest::isolate(), nullptr); + v8::debug::SetDebugDelegate(v8_isolate(), nullptr); CHECK(return_value.is_identical_to(snippet.return_value())); CHECK_EQ(2, delegate.debug_break_count); } diff --git a/deps/v8/test/unittests/compiler/run-deopt-unittest.cc b/deps/v8/test/unittests/compiler/run-deopt-unittest.cc index 8535d7feb144e6..2c75b0455f03ab 100644 --- a/deps/v8/test/unittests/compiler/run-deopt-unittest.cc +++ b/deps/v8/test/unittests/compiler/run-deopt-unittest.cc @@ -31,7 +31,7 @@ class RunDeoptTest : public TestWithContext { }; TEST_F(RunDeoptTest, DeoptSimple) { - FLAG_allow_natives_syntax = true; + v8_flags.allow_natives_syntax = true; FunctionTester T(i_isolate(), "(function f(a) {" @@ -47,7 +47,7 @@ TEST_F(RunDeoptTest, DeoptSimple) { } TEST_F(RunDeoptTest, DeoptSimpleInExpr) { - FLAG_allow_natives_syntax = true; + v8_flags.allow_natives_syntax = true; FunctionTester T(i_isolate(), "(function f(a) {" @@ -64,7 +64,7 @@ TEST_F(RunDeoptTest, DeoptSimpleInExpr) { } TEST_F(RunDeoptTest, DeoptExceptionHandlerCatch) { - FLAG_allow_natives_syntax = true; + v8_flags.allow_natives_syntax = true; FunctionTester T(i_isolate(), "(function f() {" @@ -82,7 +82,7 @@ TEST_F(RunDeoptTest, DeoptExceptionHandlerCatch) { } TEST_F(RunDeoptTest, DeoptExceptionHandlerFinally) { - FLAG_allow_natives_syntax = true; + v8_flags.allow_natives_syntax = true; FunctionTester T(i_isolate(), "(function f() {" @@ -100,7 +100,7 @@ TEST_F(RunDeoptTest, DeoptExceptionHandlerFinally) { } TEST_F(RunDeoptTest, DeoptTrivial) { - FLAG_allow_natives_syntax = true; + v8_flags.allow_natives_syntax = true; FunctionTester T(i_isolate(), "(function foo() {" diff --git a/deps/v8/test/unittests/compiler/run-jscalls-unittest.cc b/deps/v8/test/unittests/compiler/run-jscalls-unittest.cc index 65135e8344e5d5..b651e8e640e52b 100644 --- a/deps/v8/test/unittests/compiler/run-jscalls-unittest.cc +++ b/deps/v8/test/unittests/compiler/run-jscalls-unittest.cc @@ -137,7 +137,7 @@ TEST_F(RunJSCallsTest, ConstructorCall) { } TEST_F(RunJSCallsTest, RuntimeCall) { - FLAG_allow_natives_syntax = true; + v8_flags.allow_natives_syntax = true; FunctionTester T(i_isolate(), "(function(a) { return %IsJSReceiver(a); })"); T.CheckCall(T.false_value(), T.NewNumber(23), T.undefined()); diff --git a/deps/v8/test/unittests/compiler/scheduler-unittest.cc b/deps/v8/test/unittests/compiler/scheduler-unittest.cc index 2f2a6c3f4c3d83..b7254e39386c89 100644 --- a/deps/v8/test/unittests/compiler/scheduler-unittest.cc +++ b/deps/v8/test/unittests/compiler/scheduler-unittest.cc @@ -37,7 +37,7 @@ class SchedulerTest : public TestWithIsolateAndZone { js_(zone()) {} Schedule* ComputeAndVerifySchedule(size_t expected) { - if (FLAG_trace_turbo) { + if (v8_flags.trace_turbo) { SourcePositionTable table(graph()); NodeOriginTable table2(graph()); StdoutStream{} << AsJSON(*graph(), &table, &table2); @@ -46,7 +46,7 @@ class SchedulerTest : public TestWithIsolateAndZone { Schedule* schedule = Scheduler::ComputeSchedule( zone(), graph(), Scheduler::kSplitNodes, tick_counter(), nullptr); - if (FLAG_trace_turbo_scheduler) { + if (v8_flags.trace_turbo_scheduler) { StdoutStream{} << *schedule << std::endl; } ScheduleVerifier::Run(schedule); diff --git a/deps/v8/test/unittests/compiler/sloppy-equality-unittest.cc b/deps/v8/test/unittests/compiler/sloppy-equality-unittest.cc index a1b13ebb239ef9..b019c58a87cf3f 100644 --- a/deps/v8/test/unittests/compiler/sloppy-equality-unittest.cc +++ b/deps/v8/test/unittests/compiler/sloppy-equality-unittest.cc @@ -61,8 +61,8 @@ class TestSloppyEqualityFactory { }; TEST_F(SloppyEqualityTest, SloppyEqualityTest) { - FlagScope<bool> allow_natives_syntax(&i::FLAG_allow_natives_syntax, true); - FlagScope<bool> always_turbofan(&i::FLAG_always_turbofan, false); + FlagScope<bool> allow_natives_syntax(&i::v8_flags.allow_natives_syntax, true); + FlagScope<bool> always_turbofan(&i::v8_flags.always_turbofan, false); TestSloppyEqualityFactory f(zone()); // TODO(nicohartmann@, v8:5660): Collect more precise feedback for some useful // cases. diff --git a/deps/v8/test/unittests/deoptimizer/deoptimization-unittest.cc b/deps/v8/test/unittests/deoptimizer/deoptimization-unittest.cc index 42a5cc0b8627e6..6db03235c8d55e 100644 --- a/deps/v8/test/unittests/deoptimizer/deoptimization-unittest.cc +++ b/deps/v8/test/unittests/deoptimizer/deoptimization-unittest.cc @@ -67,18 +67,18 @@ class DeoptimizationTest : public TestWithContext { class AlwaysOptimizeAllowNativesSyntaxNoInlining { public: AlwaysOptimizeAllowNativesSyntaxNoInlining() - : always_turbofan_(i::FLAG_always_turbofan), - allow_natives_syntax_(i::FLAG_allow_natives_syntax), - turbo_inlining_(i::FLAG_turbo_inlining) { - i::FLAG_always_turbofan = true; - i::FLAG_allow_natives_syntax = true; - i::FLAG_turbo_inlining = false; + : always_turbofan_(i::v8_flags.always_turbofan), + allow_natives_syntax_(i::v8_flags.allow_natives_syntax), + turbo_inlining_(i::v8_flags.turbo_inlining) { + i::v8_flags.always_turbofan = true; + i::v8_flags.allow_natives_syntax = true; + i::v8_flags.turbo_inlining = false; } ~AlwaysOptimizeAllowNativesSyntaxNoInlining() { - i::FLAG_always_turbofan = always_turbofan_; - i::FLAG_allow_natives_syntax = allow_natives_syntax_; - i::FLAG_turbo_inlining = turbo_inlining_; + i::v8_flags.always_turbofan = always_turbofan_; + i::v8_flags.allow_natives_syntax = allow_natives_syntax_; + i::v8_flags.turbo_inlining = turbo_inlining_; } private: @@ -93,15 +93,15 @@ class AlwaysOptimizeAllowNativesSyntaxNoInlining { class AllowNativesSyntaxNoInlining { public: AllowNativesSyntaxNoInlining() - : allow_natives_syntax_(i::FLAG_allow_natives_syntax), - turbo_inlining_(i::FLAG_turbo_inlining) { - i::FLAG_allow_natives_syntax = true; - i::FLAG_turbo_inlining = false; + : allow_natives_syntax_(i::v8_flags.allow_natives_syntax), + turbo_inlining_(i::v8_flags.turbo_inlining) { + i::v8_flags.allow_natives_syntax = true; + i::v8_flags.turbo_inlining = false; } ~AllowNativesSyntaxNoInlining() { - i::FLAG_allow_natives_syntax = allow_natives_syntax_; - i::FLAG_turbo_inlining = turbo_inlining_; + i::v8_flags.allow_natives_syntax = allow_natives_syntax_; + i::v8_flags.turbo_inlining = turbo_inlining_; } private: @@ -418,7 +418,7 @@ class DeoptimizationDisableConcurrentRecompilationTest " if (deopt) { count++; %DeoptimizeFunction(f); } return 8" "};"); } - static void SetUpTestSuite() { i::FLAG_concurrent_recompilation = false; } + static void SetUpTestSuite() { i::v8_flags.concurrent_recompilation = false; } void TestDeoptimizeBinaryOp(const char* binary_op) { v8::base::EmbeddedVector<char, SMALL_STRING_BUFFER_SIZE> f_source_buffer; v8::base::SNPrintF(f_source_buffer, "function f(x, y) { return x %s y; };", @@ -428,7 +428,7 @@ class DeoptimizationDisableConcurrentRecompilationTest AllowNativesSyntaxNoInlining options; // Compile function f and collect to type feedback to insert binary op stub // call in the optimized code. - i::FLAG_prepare_always_turbofan = true; + i::v8_flags.prepare_always_turbofan = true; CompileConstructorWithDeoptimizingValueOf(); RunJS(f_source); RunJS( @@ -437,7 +437,7 @@ class DeoptimizationDisableConcurrentRecompilationTest "};"); // Compile an optimized version of f. - i::FLAG_always_turbofan = true; + i::v8_flags.always_turbofan = true; RunJS(f_source); RunJS("f(7, new X());"); CHECK(!i_isolate()->use_optimizer() || @@ -464,7 +464,7 @@ TEST_F(DeoptimizationDisableConcurrentRecompilationTest, { // Compile function f and collect to type feedback to insert binary op // stub call in the optimized code. - i::FLAG_prepare_always_turbofan = true; + i::v8_flags.prepare_always_turbofan = true; RunJS( "var count = 0;" "var result = 0;" @@ -480,7 +480,7 @@ TEST_F(DeoptimizationDisableConcurrentRecompilationTest, "};"); // Compile an optimized version of f. - i::FLAG_always_turbofan = true; + i::v8_flags.always_turbofan = true; RunJS(f_source); RunJS("f('a+', new X());"); CHECK(!i_isolate()->use_optimizer() || @@ -629,7 +629,7 @@ TEST_F(DeoptimizationDisableConcurrentRecompilationTest, DeoptimizeCompare) { AllowNativesSyntaxNoInlining options; // Compile function f and collect to type feedback to insert compare ic // call in the optimized code. - i::FLAG_prepare_always_turbofan = true; + i::v8_flags.prepare_always_turbofan = true; RunJS( "var count = 0;" "var result = 0;" @@ -645,7 +645,7 @@ TEST_F(DeoptimizationDisableConcurrentRecompilationTest, DeoptimizeCompare) { "};"); // Compile an optimized version of f. - i::FLAG_always_turbofan = true; + i::v8_flags.always_turbofan = true; RunJS(f_source); RunJS("f('a', new X());"); CHECK(!i_isolate()->use_optimizer() || @@ -688,7 +688,7 @@ TEST_F(DeoptimizationDisableConcurrentRecompilationTest, AllowNativesSyntaxNoInlining options; // Compile functions and collect to type feedback to insert ic // calls in the optimized code. - i::FLAG_prepare_always_turbofan = true; + i::v8_flags.prepare_always_turbofan = true; RunJS( "var count = 0;" "var result = 0;" @@ -721,7 +721,7 @@ TEST_F(DeoptimizationDisableConcurrentRecompilationTest, "};"); // Compile an optimized version of the functions. - i::FLAG_always_turbofan = true; + i::v8_flags.always_turbofan = true; RunJS(f1_source); RunJS(g1_source); RunJS(f2_source); @@ -780,7 +780,7 @@ TEST_F(DeoptimizationDisableConcurrentRecompilationTest, AllowNativesSyntaxNoInlining options; // Compile functions and collect to type feedback to insert ic // calls in the optimized code. - i::FLAG_prepare_always_turbofan = true; + i::v8_flags.prepare_always_turbofan = true; RunJS( "var count = 0;" "var result = 0;" @@ -817,7 +817,7 @@ TEST_F(DeoptimizationDisableConcurrentRecompilationTest, "};"); // Compile an optimized version of the functions. - i::FLAG_always_turbofan = true; + i::v8_flags.always_turbofan = true; RunJS(f1_source); RunJS(g1_source); RunJS(f2_source); diff --git a/deps/v8/test/unittests/execution/microtask-queue-unittest.cc b/deps/v8/test/unittests/execution/microtask-queue-unittest.cc index 55e36c9f444d91..c8b3f4a3ce6b21 100644 --- a/deps/v8/test/unittests/execution/microtask-queue-unittest.cc +++ b/deps/v8/test/unittests/execution/microtask-queue-unittest.cc @@ -42,8 +42,8 @@ class WithFinalizationRegistryMixin : public TMixin { static void SetUpTestSuite() { CHECK_NULL(save_flags_); save_flags_ = new SaveFlags(); - FLAG_expose_gc = true; - FLAG_allow_natives_syntax = true; + v8_flags.expose_gc = true; + v8_flags.allow_natives_syntax = true; TMixin::SetUpTestSuite(); } diff --git a/deps/v8/test/unittests/execution/thread-termination-unittest.cc b/deps/v8/test/unittests/execution/thread-termination-unittest.cc index f9634b4a53d7e3..c05cb4c96f1a00 100644 --- a/deps/v8/test/unittests/execution/thread-termination-unittest.cc +++ b/deps/v8/test/unittests/execution/thread-termination-unittest.cc @@ -236,7 +236,7 @@ TEST_F(ThreadTerminationTest, TerminateBigIntMultiplication) { } TEST_F(ThreadTerminationTest, TerminateOptimizedBigIntMultiplication) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; TestTerminatingFromCurrentThread( "function foo(a, b) { return a * b; }" "%PrepareFunctionForOptimization(foo);" @@ -261,7 +261,7 @@ TEST_F(ThreadTerminationTest, TerminateBigIntDivision) { } TEST_F(ThreadTerminationTest, TerminateOptimizedBigIntDivision) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; TestTerminatingFromCurrentThread( "function foo(a, b) { return a / b; }" "%PrepareFunctionForOptimization(foo);" @@ -823,7 +823,7 @@ TEST_F(ThreadTerminationTest, TerminationInInnerTryCall) { } TEST_F(ThreadTerminationTest, TerminateAndTryCall) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; HandleScope scope(isolate()); Local<ObjectTemplate> global = CreateGlobalTemplate( isolate(), TerminateCurrentThread, DoLoopCancelTerminate); @@ -864,7 +864,7 @@ class ConsoleImpl : public debug::ConsoleDelegate { }; TEST_F(ThreadTerminationTest, TerminateConsole) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; ConsoleImpl console; debug::SetConsoleDelegate(isolate(), &console); HandleScope scope(isolate()); @@ -977,12 +977,12 @@ class TerminatorSleeperThread : public base::Thread { }; TEST_F(ThreadTerminationTest, TerminateRegExp) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; // We want to be stuck regexp execution, so no fallback to linear-time // engine. // TODO(mbid,v8:10765): Find a way to test interrupt support of the // experimental engine. - i::FLAG_enable_experimental_regexp_engine_on_excessive_backtracks = false; + i::v8_flags.enable_experimental_regexp_engine_on_excessive_backtracks = false; HandleScope scope(isolate()); Local<ObjectTemplate> global = CreateGlobalTemplate( diff --git a/deps/v8/test/unittests/flags/flag-definitions-unittest.cc b/deps/v8/test/unittests/flags/flag-definitions-unittest.cc index ee18fe00bdea9f..89022cc95e7301 100644 --- a/deps/v8/test/unittests/flags/flag-definitions-unittest.cc +++ b/deps/v8/test/unittests/flags/flag-definitions-unittest.cc @@ -41,10 +41,10 @@ class FlagDefinitionsTest : public ::testing::Test { }; void TestDefault() { - CHECK(FLAG_testing_bool_flag); - CHECK_EQ(13, FLAG_testing_int_flag); - CHECK_EQ(2.5, FLAG_testing_float_flag); - CHECK_EQ(0, strcmp(FLAG_testing_string_flag, "Hello, world!")); + CHECK(v8_flags.testing_bool_flag); + CHECK_EQ(13, v8_flags.testing_int_flag); + CHECK_EQ(2.5, v8_flags.testing_float_flag); + CHECK_EQ(0, strcmp(v8_flags.testing_string_flag, "Hello, world!")); } // This test must be executed first! @@ -65,12 +65,12 @@ TEST_F(FlagDefinitionsTest, Flags2) { CHECK_EQ(0, FlagList::SetFlagsFromCommandLine(&argc, const_cast<char**>(argv), false)); CHECK_EQ(8, argc); - CHECK(!FLAG_testing_bool_flag); - CHECK(FLAG_testing_maybe_bool_flag.value().has_value()); - CHECK(!FLAG_testing_maybe_bool_flag.value().value()); - CHECK_EQ(77, FLAG_testing_int_flag); - CHECK_EQ(.25, FLAG_testing_float_flag); - CHECK_EQ(0, strcmp(FLAG_testing_string_flag, "no way!")); + CHECK(!v8_flags.testing_bool_flag); + CHECK(v8_flags.testing_maybe_bool_flag.value().has_value()); + CHECK(!v8_flags.testing_maybe_bool_flag.value().value()); + CHECK_EQ(77, v8_flags.testing_int_flag); + CHECK_EQ(.25, v8_flags.testing_float_flag); + CHECK_EQ(0, strcmp(v8_flags.testing_string_flag, "no way!")); } TEST_F(FlagDefinitionsTest, Flags2b) { @@ -80,12 +80,12 @@ TEST_F(FlagDefinitionsTest, Flags2b) { "-testing_float_flag=.25 " "--testing_string_flag no_way! "; CHECK_EQ(0, FlagList::SetFlagsFromString(str, strlen(str))); - CHECK(!FLAG_testing_bool_flag); - CHECK(FLAG_testing_maybe_bool_flag.value().has_value()); - CHECK(!FLAG_testing_maybe_bool_flag.value().value()); - CHECK_EQ(77, FLAG_testing_int_flag); - CHECK_EQ(.25, FLAG_testing_float_flag); - CHECK_EQ(0, strcmp(FLAG_testing_string_flag, "no_way!")); + CHECK(!v8_flags.testing_bool_flag); + CHECK(v8_flags.testing_maybe_bool_flag.value().has_value()); + CHECK(!v8_flags.testing_maybe_bool_flag.value().value()); + CHECK_EQ(77, v8_flags.testing_int_flag); + CHECK_EQ(.25, v8_flags.testing_float_flag); + CHECK_EQ(0, strcmp(v8_flags.testing_string_flag, "no_way!")); } TEST_F(FlagDefinitionsTest, Flags3) { @@ -102,12 +102,12 @@ TEST_F(FlagDefinitionsTest, Flags3) { CHECK_EQ(0, FlagList::SetFlagsFromCommandLine(&argc, const_cast<char**>(argv), true)); CHECK_EQ(2, argc); - CHECK(FLAG_testing_bool_flag); - CHECK(FLAG_testing_maybe_bool_flag.value().has_value()); - CHECK(FLAG_testing_maybe_bool_flag.value().value()); - CHECK_EQ(-666, FLAG_testing_int_flag); - CHECK_EQ(-12E10, FLAG_testing_float_flag); - CHECK_EQ(0, strcmp(FLAG_testing_string_flag, "foo-bar")); + CHECK(v8_flags.testing_bool_flag); + CHECK(v8_flags.testing_maybe_bool_flag.value().has_value()); + CHECK(v8_flags.testing_maybe_bool_flag.value().value()); + CHECK_EQ(-666, v8_flags.testing_int_flag); + CHECK_EQ(-12E10, v8_flags.testing_float_flag); + CHECK_EQ(0, strcmp(v8_flags.testing_string_flag, "foo-bar")); } TEST_F(FlagDefinitionsTest, Flags3b) { @@ -117,12 +117,12 @@ TEST_F(FlagDefinitionsTest, Flags3b) { "--testing_float_flag -12E10 " "-testing-string-flag=foo-bar"; CHECK_EQ(0, FlagList::SetFlagsFromString(str, strlen(str))); - CHECK(FLAG_testing_bool_flag); - CHECK(FLAG_testing_maybe_bool_flag.value().has_value()); - CHECK(FLAG_testing_maybe_bool_flag.value().value()); - CHECK_EQ(-666, FLAG_testing_int_flag); - CHECK_EQ(-12E10, FLAG_testing_float_flag); - CHECK_EQ(0, strcmp(FLAG_testing_string_flag, "foo-bar")); + CHECK(v8_flags.testing_bool_flag); + CHECK(v8_flags.testing_maybe_bool_flag.value().has_value()); + CHECK(v8_flags.testing_maybe_bool_flag.value().value()); + CHECK_EQ(-666, v8_flags.testing_int_flag); + CHECK_EQ(-12E10, v8_flags.testing_float_flag); + CHECK_EQ(0, strcmp(v8_flags.testing_string_flag, "foo-bar")); } TEST_F(FlagDefinitionsTest, Flags4) { @@ -131,13 +131,13 @@ TEST_F(FlagDefinitionsTest, Flags4) { CHECK_EQ(0, FlagList::SetFlagsFromCommandLine(&argc, const_cast<char**>(argv), true)); CHECK_EQ(2, argc); - CHECK(!FLAG_testing_maybe_bool_flag.value().has_value()); + CHECK(!v8_flags.testing_maybe_bool_flag.value().has_value()); } TEST_F(FlagDefinitionsTest, Flags4b) { const char* str = "--testing_bool_flag --foo"; CHECK_EQ(2, FlagList::SetFlagsFromString(str, strlen(str))); - CHECK(!FLAG_testing_maybe_bool_flag.value().has_value()); + CHECK(!v8_flags.testing_maybe_bool_flag.value().has_value()); } TEST_F(FlagDefinitionsTest, Flags5) { @@ -179,44 +179,45 @@ TEST_F(FlagDefinitionsTest, FlagsRemoveIncomplete) { } TEST_F(FlagDefinitionsTest, FlagsJitlessImplications) { - if (FLAG_jitless) { + if (v8_flags.jitless) { // Double-check implications work as expected. Our implication system is // fairly primitive and can break easily depending on the implication // definition order in flag-definitions.h. - CHECK(!FLAG_turbofan); - CHECK(!FLAG_maglev); - CHECK(!FLAG_sparkplug); + CHECK(!v8_flags.turbofan); + CHECK(!v8_flags.maglev); + CHECK(!v8_flags.sparkplug); #if V8_ENABLE_WEBASSEMBLY - CHECK(!FLAG_validate_asm); - CHECK(!FLAG_asm_wasm_lazy_compilation); - CHECK(!FLAG_wasm_lazy_compilation); + CHECK(!v8_flags.validate_asm); + CHECK(!v8_flags.asm_wasm_lazy_compilation); + CHECK(!v8_flags.wasm_lazy_compilation); #endif // V8_ENABLE_WEBASSEMBLY } } TEST_F(FlagDefinitionsTest, FreezeFlags) { // Before freezing, we can arbitrarily change values. - CHECK_EQ(13, FLAG_testing_int_flag); // Initial (default) value. - FLAG_testing_int_flag = 27; - CHECK_EQ(27, FLAG_testing_int_flag); + CHECK_EQ(13, v8_flags.testing_int_flag); // Initial (default) value. + v8_flags.testing_int_flag = 27; + CHECK_EQ(27, v8_flags.testing_int_flag); // Get a direct pointer to the flag storage. - static_assert(sizeof(FLAG_testing_int_flag) == sizeof(int)); - int* direct_testing_int_ptr = reinterpret_cast<int*>(&FLAG_testing_int_flag); + static_assert(sizeof(v8_flags.testing_int_flag) == sizeof(int)); + int* direct_testing_int_ptr = + reinterpret_cast<int*>(&v8_flags.testing_int_flag); CHECK_EQ(27, *direct_testing_int_ptr); *direct_testing_int_ptr = 42; - CHECK_EQ(42, FLAG_testing_int_flag); + CHECK_EQ(42, v8_flags.testing_int_flag); // Now freeze flags. Accesses via the API and via the direct pointer should // both crash. FlagList::FreezeFlags(); // Accessing via the API fails with a CHECK. - ASSERT_DEATH_IF_SUPPORTED(FLAG_testing_int_flag = 41, + ASSERT_DEATH_IF_SUPPORTED(v8_flags.testing_int_flag = 41, "Check failed: !IsFrozen\\(\\)"); // Writing to the memory directly results in a segfault. ASSERT_DEATH_IF_SUPPORTED(*direct_testing_int_ptr = 41, ""); // We can still read the old value. - CHECK_EQ(42, FLAG_testing_int_flag); + CHECK_EQ(42, v8_flags.testing_int_flag); CHECK_EQ(42, *direct_testing_int_ptr); } diff --git a/deps/v8/test/unittests/heap/base/basic-slot-set-unittest.cc b/deps/v8/test/unittests/heap/base/basic-slot-set-unittest.cc new file mode 100644 index 00000000000000..ea493de3f02af0 --- /dev/null +++ b/deps/v8/test/unittests/heap/base/basic-slot-set-unittest.cc @@ -0,0 +1,198 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "src/heap/base/basic-slot-set.h" + +#include <limits> +#include <map> + +#include "testing/gtest/include/gtest/gtest.h" + +namespace heap { +namespace base { + +static constexpr size_t kTestGranularity = sizeof(void*); +using TestSlotSet = ::heap::base::BasicSlotSet<kTestGranularity>; +static constexpr size_t kTestPageSize = 1 << 17; +static constexpr size_t kBucketsTestPage = + TestSlotSet::BucketsForSize(kTestPageSize); + +TEST(BasicSlotSet, InsertAndLookup1) { + TestSlotSet* set = TestSlotSet::Allocate(kBucketsTestPage); + for (size_t i = 0; i < kTestPageSize; i += kTestGranularity) { + EXPECT_FALSE(set->Lookup(i)); + } + for (size_t i = 0; i < kTestPageSize; i += kTestGranularity) { + set->Insert<TestSlotSet::AccessMode::ATOMIC>(i); + } + for (size_t i = 0; i < kTestPageSize; i += kTestGranularity) { + EXPECT_TRUE(set->Lookup(i)); + } + TestSlotSet::Delete(set, kBucketsTestPage); +} + +TEST(BasicSlotSet, InsertAndLookup2) { + TestSlotSet* set = TestSlotSet::Allocate(kBucketsTestPage); + for (size_t i = 0; i < kTestPageSize; i += kTestGranularity) { + if (i % 7 == 0) { + set->Insert<TestSlotSet::AccessMode::ATOMIC>(i); + } + } + for (size_t i = 0; i < kTestPageSize; i += kTestGranularity) { + if (i % 7 == 0) { + EXPECT_TRUE(set->Lookup(i)); + } else { + EXPECT_FALSE(set->Lookup(i)); + } + } + TestSlotSet::Delete(set, kBucketsTestPage); +} + +TEST(BasicSlotSet, Iterate) { + TestSlotSet* set = TestSlotSet::Allocate(kBucketsTestPage); + + for (size_t i = 0; i < kTestPageSize; i += kTestGranularity) { + if (i % 7 == 0) { + set->Insert<TestSlotSet::AccessMode::ATOMIC>(i); + } + } + + set->Iterate( + 0, 0, kBucketsTestPage, + [](uintptr_t slot) { + if (slot % 3 == 0) { + return KEEP_SLOT; + } else { + return REMOVE_SLOT; + } + }, + TestSlotSet::KEEP_EMPTY_BUCKETS); + + for (size_t i = 0; i < kTestPageSize; i += kTestGranularity) { + if (i % 21 == 0) { + EXPECT_TRUE(set->Lookup(i)); + } else { + EXPECT_FALSE(set->Lookup(i)); + } + } + + TestSlotSet::Delete(set, kBucketsTestPage); +} + +TEST(BasicSlotSet, IterateFromHalfway) { + TestSlotSet* set = TestSlotSet::Allocate(kBucketsTestPage); + + for (size_t i = 0; i < kTestPageSize; i += kTestGranularity) { + if (i % 7 == 0) { + set->Insert<TestSlotSet::AccessMode::ATOMIC>(i); + } + } + + set->Iterate( + 0, kBucketsTestPage / 2, kBucketsTestPage, + [](uintptr_t slot) { + if (slot % 3 == 0) { + return KEEP_SLOT; + } else { + return REMOVE_SLOT; + } + }, + TestSlotSet::KEEP_EMPTY_BUCKETS); + + for (size_t i = 0; i < kTestPageSize; i += kTestGranularity) { + if (i < kTestPageSize / 2 && i % 7 == 0) { + EXPECT_TRUE(set->Lookup(i)); + } else if (i >= kTestPageSize / 2 && i % 21 == 0) { + EXPECT_TRUE(set->Lookup(i)); + } else { + EXPECT_FALSE(set->Lookup(i)); + } + } + + TestSlotSet::Delete(set, kBucketsTestPage); +} + +TEST(BasicSlotSet, Remove) { + TestSlotSet* set = TestSlotSet::Allocate(kBucketsTestPage); + + for (size_t i = 0; i < kTestPageSize; i += kTestGranularity) { + if (i % 7 == 0) { + set->Insert<TestSlotSet::AccessMode::ATOMIC>(i); + } + } + + for (size_t i = 0; i < kTestPageSize; i += kTestGranularity) { + if (i % 3 != 0) { + set->Remove(i); + } + } + + for (size_t i = 0; i < kTestPageSize; i += kTestGranularity) { + if (i % 21 == 0) { + EXPECT_TRUE(set->Lookup(i)); + } else { + EXPECT_FALSE(set->Lookup(i)); + } + } + + TestSlotSet::Delete(set, kBucketsTestPage); +} + +namespace { +void CheckRemoveRangeOn(uint32_t start, uint32_t end) { + TestSlotSet* set = TestSlotSet::Allocate(kBucketsTestPage); + uint32_t first = start == 0 ? 0 : start - kTestGranularity; + uint32_t last = end == kTestPageSize ? end - kTestGranularity : end; + for (const auto mode : + {TestSlotSet::FREE_EMPTY_BUCKETS, TestSlotSet::KEEP_EMPTY_BUCKETS}) { + for (uint32_t i = first; i <= last; i += kTestGranularity) { + set->Insert<TestSlotSet::AccessMode::ATOMIC>(i); + } + set->RemoveRange(start, end, kBucketsTestPage, mode); + if (first != start) { + EXPECT_TRUE(set->Lookup(first)); + } + if (last == end) { + EXPECT_TRUE(set->Lookup(last)); + } + for (size_t i = start; i < end; i += kTestGranularity) { + EXPECT_FALSE(set->Lookup(i)); + } + } + TestSlotSet::Delete(set, kBucketsTestPage); +} +} // namespace + +TEST(BasicSlotSet, RemoveRange) { + CheckRemoveRangeOn(0, kTestPageSize); + CheckRemoveRangeOn(1 * kTestGranularity, 1023 * kTestGranularity); + for (uint32_t start = 0; start <= 32; start++) { + CheckRemoveRangeOn(start * kTestGranularity, + (start + 1) * kTestGranularity); + CheckRemoveRangeOn(start * kTestGranularity, + (start + 2) * kTestGranularity); + const uint32_t kEnds[] = {32, 64, 100, 128, 1024, 1500, 2048}; + for (size_t i = 0; i < sizeof(kEnds) / sizeof(uint32_t); i++) { + for (int k = -3; k <= 3; k++) { + uint32_t end = (kEnds[i] + k); + if (start < end) { + CheckRemoveRangeOn(start * kTestGranularity, end * kTestGranularity); + } + } + } + } + TestSlotSet* set = TestSlotSet::Allocate(kBucketsTestPage); + for (const auto mode : + {TestSlotSet::FREE_EMPTY_BUCKETS, TestSlotSet::KEEP_EMPTY_BUCKETS}) { + set->Insert<TestSlotSet::AccessMode::ATOMIC>(kTestPageSize / 2); + set->RemoveRange(0, kTestPageSize, kBucketsTestPage, mode); + for (uint32_t i = 0; i < kTestPageSize; i += kTestGranularity) { + EXPECT_FALSE(set->Lookup(i)); + } + } + TestSlotSet::Delete(set, kBucketsTestPage); +} + +} // namespace base +} // namespace heap diff --git a/deps/v8/test/unittests/heap/conservative-stack-visitor-unittest.cc b/deps/v8/test/unittests/heap/conservative-stack-visitor-unittest.cc new file mode 100644 index 00000000000000..937b58161841c0 --- /dev/null +++ b/deps/v8/test/unittests/heap/conservative-stack-visitor-unittest.cc @@ -0,0 +1,230 @@ +// Copyright 2022 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#include "src/heap/conservative-stack-visitor.h" + +#include "test/unittests/heap/heap-utils.h" +#include "test/unittests/test-utils.h" + +namespace v8 { +namespace internal { + +namespace { + +class RecordingVisitor final : public RootVisitor { + public: + V8_NOINLINE explicit RecordingVisitor(Isolate* isolate) { + // Allocate the object. + auto h = isolate->factory()->NewFixedArray(256, AllocationType::kOld); + the_object_ = h->GetHeapObject(); + base_address_ = the_object_.address(); + tagged_address_ = the_object_.ptr(); + inner_address_ = base_address_ + 42 * kTaggedSize; +#ifdef V8_COMPRESS_POINTERS + compr_address_ = static_cast<uint32_t>( + V8HeapCompressionScheme::CompressTagged(base_address_)); + compr_inner_ = static_cast<uint32_t>( + V8HeapCompressionScheme::CompressTagged(inner_address_)); +#else + compr_address_ = static_cast<uint32_t>(base_address_); + compr_inner_ = static_cast<uint32_t>(inner_address_); +#endif + } + + void VisitRootPointers(Root root, const char* description, + FullObjectSlot start, FullObjectSlot end) override { + for (FullObjectSlot current = start; current != end; ++current) { + if (*current == the_object_) found_ = true; + } + } + + void Reset() { found_ = false; } + bool found() const { return found_; } + + Address base_address() const { return base_address_; } + Address tagged_address() const { return tagged_address_; } + Address inner_address() const { return inner_address_; } + uint32_t compr_address() const { return compr_address_; } + uint32_t compr_inner() const { return compr_inner_; } + + private: + // Some heap object that we want to check if it is visited or not. + HeapObject the_object_; + + // Addresses of this object. + Address base_address_; // Uncompressed base address + Address tagged_address_; // Tagged uncompressed base address + Address inner_address_; // Some inner address + uint32_t compr_address_; // Compressed base address + uint32_t compr_inner_; // Compressed inner address + + // Has the object been found? + bool found_ = false; +}; + +} // namespace + +using ConservativeStackVisitorTest = TestWithHeapInternalsAndContext; + +// In the following, we avoid negative tests, i.e., tests checking that objects +// are not visited when there are no pointers to them on the stack. Such tests +// are generally fragile and could fail on some platforms because of unforeseen +// compiler optimizations. In general we cannot ensure in a portable way that +// no pointer remained on the stack (or in some register) after the +// initialization of RecordingVisitor and until the invocation of +// Stack::IteratePointers. + +TEST_F(ConservativeStackVisitorTest, DirectBasePointer) { + auto recorder = std::make_unique<RecordingVisitor>(isolate()); + + // Ensure the heap is iterable before CSS. + SafepointScope safepoint_scope(heap()); + heap()->MakeHeapIterable(); + + { + volatile Address ptr = recorder->base_address(); + + ConservativeStackVisitor stack_visitor(isolate(), recorder.get()); + isolate()->heap()->stack().IteratePointers(&stack_visitor); + + // Make sure to keep the pointer alive. + EXPECT_NE(kNullAddress, ptr); + } + + // The object should have been visited. + EXPECT_TRUE(recorder->found()); +} + +TEST_F(ConservativeStackVisitorTest, TaggedBasePointer) { + auto recorder = std::make_unique<RecordingVisitor>(isolate()); + + // Ensure the heap is iterable before CSS. + SafepointScope safepoint_scope(heap()); + heap()->MakeHeapIterable(); + + { + volatile Address ptr = recorder->tagged_address(); + + ConservativeStackVisitor stack_visitor(isolate(), recorder.get()); + isolate()->heap()->stack().IteratePointers(&stack_visitor); + + // Make sure to keep the pointer alive. + EXPECT_NE(kNullAddress, ptr); + } + + // The object should have been visited. + EXPECT_TRUE(recorder->found()); +} + +TEST_F(ConservativeStackVisitorTest, InnerPointer) { + auto recorder = std::make_unique<RecordingVisitor>(isolate()); + + // Ensure the heap is iterable before CSS. + SafepointScope safepoint_scope(heap()); + heap()->MakeHeapIterable(); + + { + volatile Address ptr = recorder->inner_address(); + + ConservativeStackVisitor stack_visitor(isolate(), recorder.get()); + isolate()->heap()->stack().IteratePointers(&stack_visitor); + + // Make sure to keep the pointer alive. + EXPECT_NE(kNullAddress, ptr); + } + + // The object should have been visited. + EXPECT_TRUE(recorder->found()); +} + +#ifdef V8_COMPRESS_POINTERS + +TEST_F(ConservativeStackVisitorTest, HalfWord1) { + auto recorder = std::make_unique<RecordingVisitor>(isolate()); + + // Ensure the heap is iterable before CSS. + SafepointScope safepoint_scope(heap()); + heap()->MakeHeapIterable(); + + { + volatile uint32_t ptr[] = {recorder->compr_address(), 0}; + + ConservativeStackVisitor stack_visitor(isolate(), recorder.get()); + isolate()->heap()->stack().IteratePointers(&stack_visitor); + + // Make sure to keep the pointer alive. + EXPECT_NE(static_cast<uint32_t>(0), ptr[0]); + } + + // The object should have been visited. + EXPECT_TRUE(recorder->found()); +} + +TEST_F(ConservativeStackVisitorTest, HalfWord2) { + auto recorder = std::make_unique<RecordingVisitor>(isolate()); + + // Ensure the heap is iterable before CSS. + SafepointScope safepoint_scope(heap()); + heap()->MakeHeapIterable(); + + { + volatile uint32_t ptr[] = {0, recorder->compr_address()}; + + ConservativeStackVisitor stack_visitor(isolate(), recorder.get()); + isolate()->heap()->stack().IteratePointers(&stack_visitor); + + // Make sure to keep the pointer alive. + EXPECT_NE(static_cast<uint32_t>(0), ptr[1]); + } + + // The object should have been visited. + EXPECT_TRUE(recorder->found()); +} + +TEST_F(ConservativeStackVisitorTest, InnerHalfWord1) { + auto recorder = std::make_unique<RecordingVisitor>(isolate()); + + // Ensure the heap is iterable before CSS. + SafepointScope safepoint_scope(heap()); + heap()->MakeHeapIterable(); + + { + volatile uint32_t ptr[] = {recorder->compr_inner(), 0}; + + ConservativeStackVisitor stack_visitor(isolate(), recorder.get()); + isolate()->heap()->stack().IteratePointers(&stack_visitor); + + // Make sure to keep the pointer alive. + EXPECT_NE(static_cast<uint32_t>(0), ptr[0]); + } + + // The object should have been visited. + EXPECT_TRUE(recorder->found()); +} + +TEST_F(ConservativeStackVisitorTest, InnerHalfWord2) { + auto recorder = std::make_unique<RecordingVisitor>(isolate()); + + // Ensure the heap is iterable before CSS. + SafepointScope safepoint_scope(heap()); + heap()->MakeHeapIterable(); + + { + volatile uint32_t ptr[] = {0, recorder->compr_inner()}; + + ConservativeStackVisitor stack_visitor(isolate(), recorder.get()); + isolate()->heap()->stack().IteratePointers(&stack_visitor); + + // Make sure to keep the pointer alive. + EXPECT_NE(static_cast<uint32_t>(0), ptr[1]); + } + + // The object should have been visited. + EXPECT_TRUE(recorder->found()); +} + +#endif // V8_COMPRESS_POINTERS + +} // namespace internal +} // namespace v8 diff --git a/deps/v8/test/unittests/heap/cppgc-js/traced-reference-unittest.cc b/deps/v8/test/unittests/heap/cppgc-js/traced-reference-unittest.cc index 3521f848a2bbe2..eec4069ad14446 100644 --- a/deps/v8/test/unittests/heap/cppgc-js/traced-reference-unittest.cc +++ b/deps/v8/test/unittests/heap/cppgc-js/traced-reference-unittest.cc @@ -7,6 +7,7 @@ #include "src/api/api-inl.h" #include "src/handles/global-handles.h" #include "src/heap/cppgc/visitor.h" +#include "src/heap/marking-state-inl.h" #include "test/unittests/heap/heap-utils.h" #include "test/unittests/test-utils.h" #include "testing/gtest/include/gtest/gtest.h" diff --git a/deps/v8/test/unittests/heap/cppgc-js/unified-heap-unittest.cc b/deps/v8/test/unittests/heap/cppgc-js/unified-heap-unittest.cc index e904f95a55f667..4be30638239100 100644 --- a/deps/v8/test/unittests/heap/cppgc-js/unified-heap-unittest.cc +++ b/deps/v8/test/unittests/heap/cppgc-js/unified-heap-unittest.cc @@ -389,5 +389,36 @@ TEST_F(UnifiedHeapTest, InConstructionObjectReferringToGlobalHandle) { } } +namespace { + +class ResetReferenceInDestructorObject final + : public cppgc::GarbageCollected<ResetReferenceInDestructorObject> { + public: + ResetReferenceInDestructorObject(Heap* heap, v8::Local<v8::Object> wrapper) + : wrapper_(reinterpret_cast<v8::Isolate*>(heap->isolate()), wrapper) {} + ~ResetReferenceInDestructorObject() { wrapper_.Reset(); } + + void Trace(cppgc::Visitor* visitor) const { visitor->Trace(wrapper_); } + + private: + TracedReference<v8::Object> wrapper_; +}; + +} // namespace + +TEST_F(UnifiedHeapTest, ResetReferenceInDestructor) { + v8::HandleScope handle_scope(v8_isolate()); + v8::Local<v8::Context> context = v8::Context::New(v8_isolate()); + v8::Context::Scope context_scope(context); + { + v8::HandleScope inner_handle_scope(v8_isolate()); + auto local = v8::Object::New(v8_isolate()); + cppgc::MakeGarbageCollected<ResetReferenceInDestructorObject>( + allocation_handle(), + reinterpret_cast<i::Isolate*>(v8_isolate())->heap(), local); + } + CollectGarbageWithoutEmbedderStack(cppgc::Heap::SweepingType::kAtomic); +} + } // namespace internal } // namespace v8 diff --git a/deps/v8/test/unittests/heap/cppgc/compactor-unittest.cc b/deps/v8/test/unittests/heap/cppgc/compactor-unittest.cc index a1482cf7097a6d..1df053ae64e9af 100644 --- a/deps/v8/test/unittests/heap/cppgc/compactor-unittest.cc +++ b/deps/v8/test/unittests/heap/cppgc/compactor-unittest.cc @@ -74,9 +74,8 @@ class CompactorTest : public testing::TestWithPlatform { void StartCompaction() { compactor().EnableForNextGCForTesting(); - compactor().InitializeIfShouldCompact( - GarbageCollector::Config::MarkingType::kIncremental, - GarbageCollector::Config::StackState::kNoHeapPointers); + compactor().InitializeIfShouldCompact(GCConfig::MarkingType::kIncremental, + StackState::kNoHeapPointers); EXPECT_TRUE(compactor().IsEnabledForTesting()); } @@ -86,18 +85,17 @@ class CompactorTest : public testing::TestWithPlatform { CompactableGCed::g_destructor_callcount = 0u; StartCompaction(); heap()->StartIncrementalGarbageCollection( - GarbageCollector::Config::PreciseIncrementalConfig()); + GCConfig::PreciseIncrementalConfig()); } void EndGC() { - heap()->marker()->FinishMarking( - GarbageCollector::Config::StackState::kNoHeapPointers); + heap()->marker()->FinishMarking(StackState::kNoHeapPointers); heap()->GetMarkerRefForTesting().reset(); FinishCompaction(); // Sweeping also verifies the object start bitmap. - const Sweeper::SweepingConfig sweeping_config{ - Sweeper::SweepingConfig::SweepingType::kAtomic, - Sweeper::SweepingConfig::CompactableSpaceHandling::kIgnore}; + const SweepingConfig sweeping_config{ + SweepingConfig::SweepingType::kAtomic, + SweepingConfig::CompactableSpaceHandling::kIgnore}; heap()->sweeper().Start(sweeping_config); } @@ -125,13 +123,12 @@ namespace internal { TEST_F(CompactorTest, NothingToCompact) { StartCompaction(); heap()->stats_collector()->NotifyMarkingStarted( - GarbageCollector::Config::CollectionType::kMajor, - GarbageCollector::Config::MarkingType::kAtomic, - GarbageCollector::Config::IsForcedGC::kNotForced); + CollectionType::kMajor, GCConfig::MarkingType::kAtomic, + GCConfig::IsForcedGC::kNotForced); heap()->stats_collector()->NotifyMarkingCompleted(0); FinishCompaction(); heap()->stats_collector()->NotifySweepingCompleted( - GarbageCollector::Config::SweepingType::kAtomic); + GCConfig::SweepingType::kAtomic); } TEST_F(CompactorTest, NonEmptySpaceAllLive) { diff --git a/deps/v8/test/unittests/heap/cppgc/concurrent-marking-unittest.cc b/deps/v8/test/unittests/heap/cppgc/concurrent-marking-unittest.cc index 3a8d0307d02b71..617d78e253d298 100644 --- a/deps/v8/test/unittests/heap/cppgc/concurrent-marking-unittest.cc +++ b/deps/v8/test/unittests/heap/cppgc/concurrent-marking-unittest.cc @@ -27,20 +27,15 @@ class ConcurrentMarkingTest : public testing::TestWithHeap { static constexpr int kNumStep = 10; #endif // defined(THREAD_SANITIZER) - using Config = Heap::Config; - static constexpr Config ConcurrentPreciseConfig = { - Config::CollectionType::kMajor, Config::StackState::kNoHeapPointers, - Config::MarkingType::kIncrementalAndConcurrent, - Config::SweepingType::kIncrementalAndConcurrent}; - void StartConcurrentGC() { Heap* heap = Heap::From(GetHeap()); heap->DisableHeapGrowingForTesting(); - heap->StartIncrementalGarbageCollection(ConcurrentPreciseConfig); + heap->StartIncrementalGarbageCollection( + GCConfig::PreciseConcurrentConfig()); heap->marker()->SetMainThreadMarkingDisabledForTesting(true); } - bool SingleStep(Config::StackState stack_state) { + bool SingleStep(StackState stack_state) { MarkerBase* marker = Heap::From(GetHeap())->marker(); DCHECK(marker); return marker->IncrementalMarkingStepForTesting(stack_state); @@ -50,14 +45,10 @@ class ConcurrentMarkingTest : public testing::TestWithHeap { Heap* heap = Heap::From(GetHeap()); heap->marker()->SetMainThreadMarkingDisabledForTesting(false); heap->FinalizeIncrementalGarbageCollectionIfRunning( - ConcurrentPreciseConfig); + GCConfig::PreciseConcurrentConfig()); } }; -// static -constexpr ConcurrentMarkingTest::Config - ConcurrentMarkingTest::ConcurrentPreciseConfig; - template <typename T> struct GCedHolder : public GarbageCollected<GCedHolder<T>> { void Trace(cppgc::Visitor* visitor) const { visitor->Trace(object); } @@ -110,7 +101,7 @@ TEST_F(ConcurrentMarkingTest, MarkingObjects) { last_object = &(*last_object)->child_; } // Use SingleStep to re-post concurrent jobs. - SingleStep(Config::StackState::kNoHeapPointers); + SingleStep(StackState::kNoHeapPointers); } FinishGC(); } @@ -129,7 +120,7 @@ TEST_F(ConcurrentMarkingTest, MarkingInConstructionObjects) { }); } // Use SingleStep to re-post concurrent jobs. - SingleStep(Config::StackState::kNoHeapPointers); + SingleStep(StackState::kNoHeapPointers); } FinishGC(); } @@ -145,7 +136,7 @@ TEST_F(ConcurrentMarkingTest, MarkingMixinObjects) { last_object = &(*last_object)->child_; } // Use SingleStep to re-post concurrent jobs. - SingleStep(Config::StackState::kNoHeapPointers); + SingleStep(StackState::kNoHeapPointers); } FinishGC(); } diff --git a/deps/v8/test/unittests/heap/cppgc/concurrent-sweeper-unittest.cc b/deps/v8/test/unittests/heap/cppgc/concurrent-sweeper-unittest.cc index 3e417f7a5480b1..d2ea7390165dde 100644 --- a/deps/v8/test/unittests/heap/cppgc/concurrent-sweeper-unittest.cc +++ b/deps/v8/test/unittests/heap/cppgc/concurrent-sweeper-unittest.cc @@ -73,14 +73,13 @@ class ConcurrentSweeperTest : public testing::TestWithHeap { // Pretend do finish marking as StatsCollector verifies that Notify* // methods are called in the right order. heap->stats_collector()->NotifyMarkingStarted( - GarbageCollector::Config::CollectionType::kMajor, - GarbageCollector::Config::MarkingType::kAtomic, - GarbageCollector::Config::IsForcedGC::kNotForced); + CollectionType::kMajor, GCConfig::MarkingType::kAtomic, + GCConfig::IsForcedGC::kNotForced); heap->stats_collector()->NotifyMarkingCompleted(0); Sweeper& sweeper = heap->sweeper(); - const Sweeper::SweepingConfig sweeping_config{ - Sweeper::SweepingConfig::SweepingType::kIncrementalAndConcurrent, - Sweeper::SweepingConfig::CompactableSpaceHandling::kSweep}; + const SweepingConfig sweeping_config{ + SweepingConfig::SweepingType::kIncrementalAndConcurrent, + SweepingConfig::CompactableSpaceHandling::kSweep}; sweeper.Start(sweeping_config); } diff --git a/deps/v8/test/unittests/heap/cppgc/ephemeron-pair-unittest.cc b/deps/v8/test/unittests/heap/cppgc/ephemeron-pair-unittest.cc index fd04531f3a5716..41c98bc64f5bd5 100644 --- a/deps/v8/test/unittests/heap/cppgc/ephemeron-pair-unittest.cc +++ b/deps/v8/test/unittests/heap/cppgc/ephemeron-pair-unittest.cc @@ -49,11 +49,8 @@ class EphemeronHolderTraceEphemeron }; class EphemeronPairTest : public testing::TestWithHeap { - using MarkingConfig = Marker::MarkingConfig; - - static constexpr Marker::MarkingConfig IncrementalPreciseMarkingConfig = { - MarkingConfig::CollectionType::kMajor, - MarkingConfig::StackState::kNoHeapPointers, + static constexpr MarkingConfig IncrementalPreciseMarkingConfig = { + CollectionType::kMajor, StackState::kNoHeapPointers, MarkingConfig::MarkingType::kIncremental}; public: @@ -63,11 +60,11 @@ class EphemeronPairTest : public testing::TestWithHeap { } void FinishMarking() { - marker_->FinishMarking(MarkingConfig::StackState::kNoHeapPointers); + marker_->FinishMarking(StackState::kNoHeapPointers); // Pretend do finish sweeping as StatsCollector verifies that Notify* // methods are called in the right order. Heap::From(GetHeap())->stats_collector()->NotifySweepingCompleted( - GarbageCollector::Config::SweepingType::kIncremental); + GCConfig::SweepingType::kIncremental); } void InitializeMarker(HeapBase& heap, cppgc::Platform* platform) { @@ -81,15 +78,14 @@ class EphemeronPairTest : public testing::TestWithHeap { private: bool SingleStep() { return marker_->IncrementalMarkingStepForTesting( - MarkingConfig::StackState::kNoHeapPointers); + StackState::kNoHeapPointers); } std::unique_ptr<Marker> marker_; }; // static -constexpr Marker::MarkingConfig - EphemeronPairTest::IncrementalPreciseMarkingConfig; +constexpr MarkingConfig EphemeronPairTest::IncrementalPreciseMarkingConfig; } // namespace diff --git a/deps/v8/test/unittests/heap/cppgc/gc-invoker-unittest.cc b/deps/v8/test/unittests/heap/cppgc/gc-invoker-unittest.cc index 529a42aef697e7..301612feb1612e 100644 --- a/deps/v8/test/unittests/heap/cppgc/gc-invoker-unittest.cc +++ b/deps/v8/test/unittests/heap/cppgc/gc-invoker-unittest.cc @@ -18,9 +18,8 @@ namespace { class MockGarbageCollector : public GarbageCollector { public: - MOCK_METHOD(void, CollectGarbage, (GarbageCollector::Config), (override)); - MOCK_METHOD(void, StartIncrementalGarbageCollection, - (GarbageCollector::Config), (override)); + MOCK_METHOD(void, CollectGarbage, (GCConfig), (override)); + MOCK_METHOD(void, StartIncrementalGarbageCollection, (GCConfig), (override)); MOCK_METHOD(size_t, epoch, (), (const, override)); MOCK_METHOD(const EmbedderStackState*, override_stack_state, (), (const, override)); @@ -73,9 +72,8 @@ TEST(GCInvokerTest, PrecideGCIsInvokedSynchronously) { GCInvoker invoker(&gc, &platform, cppgc::Heap::StackSupport::kNoConservativeStackScan); EXPECT_CALL(gc, CollectGarbage(::testing::Field( - &GarbageCollector::Config::stack_state, - GarbageCollector::Config::StackState::kNoHeapPointers))); - invoker.CollectGarbage(GarbageCollector::Config::PreciseAtomicConfig()); + &GCConfig::stack_state, StackState::kNoHeapPointers))); + invoker.CollectGarbage(GCConfig::PreciseAtomicConfig()); } TEST(GCInvokerTest, ConservativeGCIsInvokedSynchronouslyWhenSupported) { @@ -85,9 +83,8 @@ TEST(GCInvokerTest, ConservativeGCIsInvokedSynchronouslyWhenSupported) { cppgc::Heap::StackSupport::kSupportsConservativeStackScan); EXPECT_CALL( gc, CollectGarbage(::testing::Field( - &GarbageCollector::Config::stack_state, - GarbageCollector::Config::StackState::kMayContainHeapPointers))); - invoker.CollectGarbage(GarbageCollector::Config::ConservativeAtomicConfig()); + &GCConfig::stack_state, StackState::kMayContainHeapPointers))); + invoker.CollectGarbage(GCConfig::ConservativeAtomicConfig()); } TEST(GCInvokerTest, ConservativeGCIsScheduledAsPreciseGCViaPlatform) { @@ -100,7 +97,7 @@ TEST(GCInvokerTest, ConservativeGCIsScheduledAsPreciseGCViaPlatform) { EXPECT_CALL(gc, epoch).WillOnce(::testing::Return(0)); EXPECT_CALL(*static_cast<MockTaskRunner*>(runner.get()), PostNonNestableTask(::testing::_)); - invoker.CollectGarbage(GarbageCollector::Config::ConservativeAtomicConfig()); + invoker.CollectGarbage(GCConfig::ConservativeAtomicConfig()); } TEST(GCInvokerTest, ConservativeGCIsInvokedAsPreciseGCViaPlatform) { @@ -110,7 +107,7 @@ TEST(GCInvokerTest, ConservativeGCIsInvokedAsPreciseGCViaPlatform) { cppgc::Heap::StackSupport::kNoConservativeStackScan); EXPECT_CALL(gc, epoch).WillRepeatedly(::testing::Return(0)); EXPECT_CALL(gc, CollectGarbage); - invoker.CollectGarbage(GarbageCollector::Config::ConservativeAtomicConfig()); + invoker.CollectGarbage(GCConfig::ConservativeAtomicConfig()); platform.RunAllForegroundTasks(); } @@ -125,20 +122,18 @@ TEST(GCInvokerTest, IncrementalGCIsStarted) { cppgc::Heap::StackSupport::kSupportsConservativeStackScan); EXPECT_CALL( gc, StartIncrementalGarbageCollection(::testing::Field( - &GarbageCollector::Config::stack_state, - GarbageCollector::Config::StackState::kMayContainHeapPointers))); + &GCConfig::stack_state, StackState::kMayContainHeapPointers))); invoker_with_support.StartIncrementalGarbageCollection( - GarbageCollector::Config::ConservativeIncrementalConfig()); + GCConfig::ConservativeIncrementalConfig()); // Conservative stack scanning *not* supported. GCInvoker invoker_without_support( &gc, &platform, cppgc::Heap::StackSupport::kNoConservativeStackScan); - EXPECT_CALL( - gc, StartIncrementalGarbageCollection(::testing::Field( - &GarbageCollector::Config::stack_state, - GarbageCollector::Config::StackState::kMayContainHeapPointers))) + EXPECT_CALL(gc, + StartIncrementalGarbageCollection(::testing::Field( + &GCConfig::stack_state, StackState::kMayContainHeapPointers))) .Times(0); invoker_without_support.StartIncrementalGarbageCollection( - GarbageCollector::Config::ConservativeIncrementalConfig()); + GCConfig::ConservativeIncrementalConfig()); } } // namespace internal diff --git a/deps/v8/test/unittests/heap/cppgc/heap-growing-unittest.cc b/deps/v8/test/unittests/heap/cppgc/heap-growing-unittest.cc index c980791af99969..5901482e0be840 100644 --- a/deps/v8/test/unittests/heap/cppgc/heap-growing-unittest.cc +++ b/deps/v8/test/unittests/heap/cppgc/heap-growing-unittest.cc @@ -22,19 +22,16 @@ class FakeGarbageCollector : public GarbageCollector { void SetLiveBytes(size_t live_bytes) { live_bytes_ = live_bytes; } - void CollectGarbage(GarbageCollector::Config config) override { - stats_collector_->NotifyMarkingStarted( - GarbageCollector::Config::CollectionType::kMajor, - GarbageCollector::Config::MarkingType::kAtomic, - GarbageCollector::Config::IsForcedGC::kNotForced); + void CollectGarbage(GCConfig config) override { + stats_collector_->NotifyMarkingStarted(CollectionType::kMajor, + GCConfig::MarkingType::kAtomic, + GCConfig::IsForcedGC::kNotForced); stats_collector_->NotifyMarkingCompleted(live_bytes_); - stats_collector_->NotifySweepingCompleted( - GarbageCollector::Config::SweepingType::kAtomic); + stats_collector_->NotifySweepingCompleted(GCConfig::SweepingType::kAtomic); callcount_++; } - void StartIncrementalGarbageCollection( - GarbageCollector::Config config) override { + void StartIncrementalGarbageCollection(GCConfig config) override { UNREACHABLE(); } @@ -51,9 +48,8 @@ class FakeGarbageCollector : public GarbageCollector { class MockGarbageCollector : public GarbageCollector { public: - MOCK_METHOD(void, CollectGarbage, (GarbageCollector::Config), (override)); - MOCK_METHOD(void, StartIncrementalGarbageCollection, - (GarbageCollector::Config), (override)); + MOCK_METHOD(void, CollectGarbage, (GCConfig), (override)); + MOCK_METHOD(void, StartIncrementalGarbageCollection, (GCConfig), (override)); MOCK_METHOD(size_t, epoch, (), (const, override)); MOCK_METHOD(const EmbedderStackState*, override_stack_state, (), (const, override)); @@ -79,8 +75,7 @@ TEST(HeapGrowingTest, ConservativeGCInvoked) { cppgc::Heap::SweepingType::kIncrementalAndConcurrent); EXPECT_CALL( gc, CollectGarbage(::testing::Field( - &GarbageCollector::Config::stack_state, - GarbageCollector::Config::StackState::kMayContainHeapPointers))); + &GCConfig::stack_state, StackState::kMayContainHeapPointers))); FakeAllocate(&stats_collector, 100 * kMB); } @@ -97,8 +92,7 @@ TEST(HeapGrowingTest, InitialHeapSize) { FakeAllocate(&stats_collector, kObjectSize - 1); EXPECT_CALL( gc, CollectGarbage(::testing::Field( - &GarbageCollector::Config::stack_state, - GarbageCollector::Config::StackState::kMayContainHeapPointers))); + &GCConfig::stack_state, StackState::kMayContainHeapPointers))); FakeAllocate(&stats_collector, kObjectSize); } @@ -146,9 +140,8 @@ TEST(HeapGrowingTest, IncrementalGCStarted) { cppgc::Heap::MarkingType::kIncrementalAndConcurrent, cppgc::Heap::SweepingType::kIncrementalAndConcurrent); EXPECT_CALL( - gc, CollectGarbage(::testing::Field( - &GarbageCollector::Config::stack_state, - GarbageCollector::Config::StackState::kMayContainHeapPointers))) + gc, CollectGarbage(::testing::Field(&GCConfig::stack_state, + StackState::kMayContainHeapPointers))) .Times(0); EXPECT_CALL(gc, StartIncrementalGarbageCollection(::testing::_)); // Allocate 1 byte less the limit for atomic gc to trigger incremental gc. @@ -163,9 +156,8 @@ TEST(HeapGrowingTest, IncrementalGCFinalized) { cppgc::Heap::MarkingType::kIncrementalAndConcurrent, cppgc::Heap::SweepingType::kIncrementalAndConcurrent); EXPECT_CALL( - gc, CollectGarbage(::testing::Field( - &GarbageCollector::Config::stack_state, - GarbageCollector::Config::StackState::kMayContainHeapPointers))) + gc, CollectGarbage(::testing::Field(&GCConfig::stack_state, + StackState::kMayContainHeapPointers))) .Times(0); EXPECT_CALL(gc, StartIncrementalGarbageCollection(::testing::_)); // Allocate 1 byte less the limit for atomic gc to trigger incremental gc. @@ -174,8 +166,7 @@ TEST(HeapGrowingTest, IncrementalGCFinalized) { ::testing::Mock::VerifyAndClearExpectations(&gc); EXPECT_CALL( gc, CollectGarbage(::testing::Field( - &GarbageCollector::Config::stack_state, - GarbageCollector::Config::StackState::kMayContainHeapPointers))); + &GCConfig::stack_state, StackState::kMayContainHeapPointers))); EXPECT_CALL(gc, StartIncrementalGarbageCollection(::testing::_)).Times(0); // Allocate the rest needed to trigger atomic gc (). FakeAllocate(&stats_collector, StatsCollector::kAllocationThresholdBytes); diff --git a/deps/v8/test/unittests/heap/cppgc/heap-unittest.cc b/deps/v8/test/unittests/heap/cppgc/heap-unittest.cc index 7dd850c5f6fa8b..c2021fe6eacc77 100644 --- a/deps/v8/test/unittests/heap/cppgc/heap-unittest.cc +++ b/deps/v8/test/unittests/heap/cppgc/heap-unittest.cc @@ -27,11 +27,11 @@ class GCHeapTest : public testing::TestWithHeap { public: void ConservativeGC() { internal::Heap::From(GetHeap())->CollectGarbage( - Heap::Config::ConservativeAtomicConfig()); + GCConfig::ConservativeAtomicConfig()); } void PreciseGC() { internal::Heap::From(GetHeap())->CollectGarbage( - Heap::Config::PreciseAtomicConfig()); + GCConfig::PreciseAtomicConfig()); } }; @@ -74,7 +74,7 @@ namespace { const void* ConservativeGCReturningObject(cppgc::Heap* heap, const void* object) { internal::Heap::From(heap)->CollectGarbage( - Heap::Config::ConservativeAtomicConfig()); + GCConfig::ConservativeAtomicConfig()); return object; } @@ -113,7 +113,7 @@ class LargeObjectGCDuringCtor final : child_(MakeGarbageCollected<GCedWithFinalizer>( heap->GetAllocationHandle())) { internal::Heap::From(heap)->CollectGarbage( - Heap::Config::ConservativeAtomicConfig()); + GCConfig::ConservativeAtomicConfig()); } void Trace(Visitor* visitor) const { visitor->Trace(child_); } @@ -235,8 +235,8 @@ TEST_F(GCHeapTest, IsGarbageCollectionAllowed) { } TEST_F(GCHeapTest, IsMarking) { - GarbageCollector::Config config = GarbageCollector::Config:: - PreciseIncrementalMarkingConcurrentSweepingConfig(); + GCConfig config = + GCConfig::PreciseIncrementalMarkingConcurrentSweepingConfig(); auto* heap = Heap::From(GetHeap()); EXPECT_FALSE(subtle::HeapState::IsMarking(*heap)); heap->StartIncrementalGarbageCollection(config); @@ -248,8 +248,8 @@ TEST_F(GCHeapTest, IsMarking) { } TEST_F(GCHeapTest, IsSweeping) { - GarbageCollector::Config config = GarbageCollector::Config:: - PreciseIncrementalMarkingConcurrentSweepingConfig(); + GCConfig config = + GCConfig::PreciseIncrementalMarkingConcurrentSweepingConfig(); auto* heap = Heap::From(GetHeap()); EXPECT_FALSE(subtle::HeapState::IsSweeping(*heap)); heap->StartIncrementalGarbageCollection(config); @@ -280,8 +280,8 @@ class GCedExpectSweepingOnOwningThread final } // namespace TEST_F(GCHeapTest, IsSweepingOnOwningThread) { - GarbageCollector::Config config = GarbageCollector::Config:: - PreciseIncrementalMarkingConcurrentSweepingConfig(); + GCConfig config = + GCConfig::PreciseIncrementalMarkingConcurrentSweepingConfig(); auto* heap = Heap::From(GetHeap()); MakeGarbageCollected<GCedExpectSweepingOnOwningThread>( heap->GetAllocationHandle(), *heap); @@ -316,8 +316,7 @@ class ExpectAtomicPause final : public GarbageCollected<ExpectAtomicPause> { } // namespace TEST_F(GCHeapTest, IsInAtomicPause) { - GarbageCollector::Config config = - GarbageCollector::Config::PreciseIncrementalConfig(); + GCConfig config = GCConfig::PreciseIncrementalConfig(); auto* heap = Heap::From(GetHeap()); MakeGarbageCollected<ExpectAtomicPause>(heap->object_allocator(), *heap); EXPECT_FALSE(subtle::HeapState::IsInAtomicPause(*heap)); diff --git a/deps/v8/test/unittests/heap/cppgc/marker-unittest.cc b/deps/v8/test/unittests/heap/cppgc/marker-unittest.cc index 0149cac755beeb..2572e4a65c3c90 100644 --- a/deps/v8/test/unittests/heap/cppgc/marker-unittest.cc +++ b/deps/v8/test/unittests/heap/cppgc/marker-unittest.cc @@ -25,18 +25,15 @@ namespace internal { namespace { class MarkerTest : public testing::TestWithHeap { public: - using MarkingConfig = Marker::MarkingConfig; - - void DoMarking(MarkingConfig::StackState stack_state) { - const MarkingConfig config = {MarkingConfig::CollectionType::kMajor, - stack_state}; + void DoMarking(StackState stack_state) { + const MarkingConfig config = {CollectionType::kMajor, stack_state}; auto* heap = Heap::From(GetHeap()); InitializeMarker(*heap, GetPlatformHandle().get(), config); marker_->FinishMarking(stack_state); // Pretend do finish sweeping as StatsCollector verifies that Notify* // methods are called in the right order. heap->stats_collector()->NotifySweepingCompleted( - GarbageCollector::Config::SweepingType::kAtomic); + GCConfig::SweepingType::kAtomic); } void InitializeMarker(HeapBase& heap, cppgc::Platform* platform, @@ -80,7 +77,7 @@ TEST_F(MarkerTest, PersistentIsMarked) { Persistent<GCed> object = MakeGarbageCollected<GCed>(GetAllocationHandle()); HeapObjectHeader& header = HeapObjectHeader::FromObject(object); EXPECT_FALSE(header.IsMarked()); - DoMarking(MarkingConfig::StackState::kNoHeapPointers); + DoMarking(StackState::kNoHeapPointers); EXPECT_TRUE(header.IsMarked()); } @@ -89,7 +86,7 @@ TEST_F(MarkerTest, ReachableMemberIsMarked) { parent->SetChild(MakeGarbageCollected<GCed>(GetAllocationHandle())); HeapObjectHeader& header = HeapObjectHeader::FromObject(parent->child()); EXPECT_FALSE(header.IsMarked()); - DoMarking(MarkingConfig::StackState::kNoHeapPointers); + DoMarking(StackState::kNoHeapPointers); EXPECT_TRUE(header.IsMarked()); } @@ -97,14 +94,14 @@ TEST_F(MarkerTest, UnreachableMemberIsNotMarked) { Member<GCed> object = MakeGarbageCollected<GCed>(GetAllocationHandle()); HeapObjectHeader& header = HeapObjectHeader::FromObject(object); EXPECT_FALSE(header.IsMarked()); - DoMarking(MarkingConfig::StackState::kNoHeapPointers); + DoMarking(StackState::kNoHeapPointers); EXPECT_FALSE(header.IsMarked()); } TEST_F(MarkerTest, ObjectReachableFromStackIsMarked) { GCed* object = MakeGarbageCollected<GCed>(GetAllocationHandle()); EXPECT_FALSE(HeapObjectHeader::FromObject(object).IsMarked()); - DoMarking(MarkingConfig::StackState::kMayContainHeapPointers); + DoMarking(StackState::kMayContainHeapPointers); EXPECT_TRUE(HeapObjectHeader::FromObject(object).IsMarked()); access(object); } @@ -113,7 +110,7 @@ TEST_F(MarkerTest, ObjectReachableOnlyFromStackIsNotMarkedIfStackIsEmpty) { GCed* object = MakeGarbageCollected<GCed>(GetAllocationHandle()); HeapObjectHeader& header = HeapObjectHeader::FromObject(object); EXPECT_FALSE(header.IsMarked()); - DoMarking(MarkingConfig::StackState::kNoHeapPointers); + DoMarking(StackState::kNoHeapPointers); EXPECT_FALSE(header.IsMarked()); access(object); } @@ -123,14 +120,14 @@ TEST_F(MarkerTest, WeakReferenceToUnreachableObjectIsCleared) { WeakPersistent<GCed> weak_object = MakeGarbageCollected<GCed>(GetAllocationHandle()); EXPECT_TRUE(weak_object); - DoMarking(MarkingConfig::StackState::kNoHeapPointers); + DoMarking(StackState::kNoHeapPointers); EXPECT_FALSE(weak_object); } { Persistent<GCed> parent = MakeGarbageCollected<GCed>(GetAllocationHandle()); parent->SetWeakChild(MakeGarbageCollected<GCed>(GetAllocationHandle())); EXPECT_TRUE(parent->weak_child()); - DoMarking(MarkingConfig::StackState::kNoHeapPointers); + DoMarking(StackState::kNoHeapPointers); EXPECT_FALSE(parent->weak_child()); } } @@ -141,7 +138,7 @@ TEST_F(MarkerTest, WeakReferenceToReachableObjectIsNotCleared) { Persistent<GCed> object = MakeGarbageCollected<GCed>(GetAllocationHandle()); WeakPersistent<GCed> weak_object(object); EXPECT_TRUE(weak_object); - DoMarking(MarkingConfig::StackState::kNoHeapPointers); + DoMarking(StackState::kNoHeapPointers); EXPECT_TRUE(weak_object); } { @@ -149,7 +146,7 @@ TEST_F(MarkerTest, WeakReferenceToReachableObjectIsNotCleared) { Persistent<GCed> parent = MakeGarbageCollected<GCed>(GetAllocationHandle()); parent->SetWeakChild(object); EXPECT_TRUE(parent->weak_child()); - DoMarking(MarkingConfig::StackState::kNoHeapPointers); + DoMarking(StackState::kNoHeapPointers); EXPECT_TRUE(parent->weak_child()); } // Reachable from Member @@ -159,7 +156,7 @@ TEST_F(MarkerTest, WeakReferenceToReachableObjectIsNotCleared) { MakeGarbageCollected<GCed>(GetAllocationHandle())); parent->SetChild(weak_object); EXPECT_TRUE(weak_object); - DoMarking(MarkingConfig::StackState::kNoHeapPointers); + DoMarking(StackState::kNoHeapPointers); EXPECT_TRUE(weak_object); } { @@ -167,7 +164,7 @@ TEST_F(MarkerTest, WeakReferenceToReachableObjectIsNotCleared) { parent->SetChild(MakeGarbageCollected<GCed>(GetAllocationHandle())); parent->SetWeakChild(parent->child()); EXPECT_TRUE(parent->weak_child()); - DoMarking(MarkingConfig::StackState::kNoHeapPointers); + DoMarking(StackState::kNoHeapPointers); EXPECT_TRUE(parent->weak_child()); } // Reachable from stack @@ -175,7 +172,7 @@ TEST_F(MarkerTest, WeakReferenceToReachableObjectIsNotCleared) { GCed* object = MakeGarbageCollected<GCed>(GetAllocationHandle()); WeakPersistent<GCed> weak_object(object); EXPECT_TRUE(weak_object); - DoMarking(MarkingConfig::StackState::kMayContainHeapPointers); + DoMarking(StackState::kMayContainHeapPointers); EXPECT_TRUE(weak_object); access(object); } @@ -184,7 +181,7 @@ TEST_F(MarkerTest, WeakReferenceToReachableObjectIsNotCleared) { Persistent<GCed> parent = MakeGarbageCollected<GCed>(GetAllocationHandle()); parent->SetWeakChild(object); EXPECT_TRUE(parent->weak_child()); - DoMarking(MarkingConfig::StackState::kMayContainHeapPointers); + DoMarking(StackState::kMayContainHeapPointers); EXPECT_TRUE(parent->weak_child()); access(object); } @@ -199,7 +196,7 @@ TEST_F(MarkerTest, DeepHierarchyIsMarked) { parent->SetWeakChild(parent->child()); parent = parent->child(); } - DoMarking(MarkingConfig::StackState::kNoHeapPointers); + DoMarking(StackState::kNoHeapPointers); EXPECT_TRUE(HeapObjectHeader::FromObject(root).IsMarked()); parent = root; for (int i = 0; i < kHierarchyDepth; ++i) { @@ -213,7 +210,7 @@ TEST_F(MarkerTest, NestedObjectsOnStackAreMarked) { GCed* root = MakeGarbageCollected<GCed>(GetAllocationHandle()); root->SetChild(MakeGarbageCollected<GCed>(GetAllocationHandle())); root->child()->SetChild(MakeGarbageCollected<GCed>(GetAllocationHandle())); - DoMarking(MarkingConfig::StackState::kMayContainHeapPointers); + DoMarking(StackState::kMayContainHeapPointers); EXPECT_TRUE(HeapObjectHeader::FromObject(root).IsMarked()); EXPECT_TRUE(HeapObjectHeader::FromObject(root->child()).IsMarked()); EXPECT_TRUE(HeapObjectHeader::FromObject(root->child()->child()).IsMarked()); @@ -244,9 +241,8 @@ class GCedWithCallback : public GarbageCollected<GCedWithCallback> { } // namespace TEST_F(MarkerTest, InConstructionObjectIsEventuallyMarkedEmptyStack) { - static const Marker::MarkingConfig config = { - MarkingConfig::CollectionType::kMajor, - MarkingConfig::StackState::kMayContainHeapPointers}; + static const MarkingConfig config = {CollectionType::kMajor, + StackState::kMayContainHeapPointers}; InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(), config); GCedWithCallback* object = MakeGarbageCollected<GCedWithCallback>( GetAllocationHandle(), [marker = marker()](GCedWithCallback* obj) { @@ -254,22 +250,20 @@ TEST_F(MarkerTest, InConstructionObjectIsEventuallyMarkedEmptyStack) { marker->Visitor().Trace(member); }); EXPECT_FALSE(HeapObjectHeader::FromObject(object).IsMarked()); - marker()->FinishMarking(MarkingConfig::StackState::kMayContainHeapPointers); + marker()->FinishMarking(StackState::kMayContainHeapPointers); EXPECT_TRUE(HeapObjectHeader::FromObject(object).IsMarked()); } TEST_F(MarkerTest, InConstructionObjectIsEventuallyMarkedNonEmptyStack) { - static const Marker::MarkingConfig config = { - MarkingConfig::CollectionType::kMajor, - MarkingConfig::StackState::kMayContainHeapPointers}; + static const MarkingConfig config = {CollectionType::kMajor, + StackState::kMayContainHeapPointers}; InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(), config); MakeGarbageCollected<GCedWithCallback>( GetAllocationHandle(), [marker = marker()](GCedWithCallback* obj) { Member<GCedWithCallback> member(obj); marker->Visitor().Trace(member); EXPECT_FALSE(HeapObjectHeader::FromObject(obj).IsMarked()); - marker->FinishMarking( - MarkingConfig::StackState::kMayContainHeapPointers); + marker->FinishMarking(StackState::kMayContainHeapPointers); EXPECT_TRUE(HeapObjectHeader::FromObject(obj).IsMarked()); }); } @@ -318,36 +312,34 @@ V8_NOINLINE void RegisterInConstructionObject( TEST_F(MarkerTest, InConstructionObjectIsEventuallyMarkedDifferentNonEmptyStack) { - static const Marker::MarkingConfig config = { - MarkingConfig::CollectionType::kMajor, - MarkingConfig::StackState::kMayContainHeapPointers}; + static const MarkingConfig config = {CollectionType::kMajor, + StackState::kMayContainHeapPointers}; InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(), config); GCObliviousObjectStorage storage; RegisterInConstructionObject(GetAllocationHandle(), marker()->Visitor(), storage); EXPECT_FALSE(HeapObjectHeader::FromObject(storage.object()).IsMarked()); - marker()->FinishMarking(MarkingConfig::StackState::kMayContainHeapPointers); + marker()->FinishMarking(StackState::kMayContainHeapPointers); EXPECT_TRUE(HeapObjectHeader::FromObject(storage.object()).IsMarked()); } TEST_F(MarkerTest, SentinelNotClearedOnWeakPersistentHandling) { - static const Marker::MarkingConfig config = { - MarkingConfig::CollectionType::kMajor, - MarkingConfig::StackState::kNoHeapPointers, + static const MarkingConfig config = { + CollectionType::kMajor, StackState::kNoHeapPointers, MarkingConfig::MarkingType::kIncremental}; Persistent<GCed> root = MakeGarbageCollected<GCed>(GetAllocationHandle()); auto* tmp = MakeGarbageCollected<GCed>(GetAllocationHandle()); root->SetWeakChild(tmp); InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(), config); while (!marker()->IncrementalMarkingStepForTesting( - MarkingConfig::StackState::kNoHeapPointers)) { + StackState::kNoHeapPointers)) { } // {root} object must be marked at this point because we do not allow // encountering kSentinelPointer in WeakMember on regular Trace() calls. ASSERT_TRUE(HeapObjectHeader::FromObject(root.Get()).IsMarked()); root->SetWeakChild(kSentinelPointer); - marker()->FinishMarking(MarkingConfig::StackState::kNoHeapPointers); + marker()->FinishMarking(StackState::kNoHeapPointers); EXPECT_EQ(kSentinelPointer, root->weak_child()); } @@ -383,15 +375,14 @@ class ObjectWithEphemeronPair final } // namespace TEST_F(MarkerTest, MarkerProcessesAllEphemeronPairs) { - static const Marker::MarkingConfig config = { - MarkingConfig::CollectionType::kMajor, - MarkingConfig::StackState::kNoHeapPointers, - MarkingConfig::MarkingType::kAtomic}; + static const MarkingConfig config = {CollectionType::kMajor, + StackState::kNoHeapPointers, + MarkingConfig::MarkingType::kAtomic}; Persistent<ObjectWithEphemeronPair> obj = MakeGarbageCollected<ObjectWithEphemeronPair>(GetAllocationHandle(), GetAllocationHandle()); InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(), config); - marker()->FinishMarking(MarkingConfig::StackState::kNoHeapPointers); + marker()->FinishMarking(StackState::kNoHeapPointers); ResetMarker(); } @@ -399,26 +390,22 @@ TEST_F(MarkerTest, MarkerProcessesAllEphemeronPairs) { class IncrementalMarkingTest : public testing::TestWithHeap { public: - using MarkingConfig = Marker::MarkingConfig; - static constexpr MarkingConfig IncrementalPreciseMarkingConfig = { - MarkingConfig::CollectionType::kMajor, - MarkingConfig::StackState::kNoHeapPointers, + CollectionType::kMajor, StackState::kNoHeapPointers, MarkingConfig::MarkingType::kIncremental}; - void FinishSteps(MarkingConfig::StackState stack_state) { + void FinishSteps(StackState stack_state) { while (!SingleStep(stack_state)) { } } void FinishMarking() { - GetMarkerRef()->FinishMarking( - MarkingConfig::StackState::kMayContainHeapPointers); + GetMarkerRef()->FinishMarking(StackState::kMayContainHeapPointers); // Pretend do finish sweeping as StatsCollector verifies that Notify* // methods are called in the right order. GetMarkerRef().reset(); Heap::From(GetHeap())->stats_collector()->NotifySweepingCompleted( - GarbageCollector::Config::SweepingType::kIncremental); + GCConfig::SweepingType::kIncremental); } void InitializeMarker(HeapBase& heap, cppgc::Platform* platform, @@ -430,13 +417,12 @@ class IncrementalMarkingTest : public testing::TestWithHeap { MarkerBase* marker() const { return Heap::From(GetHeap())->marker(); } private: - bool SingleStep(MarkingConfig::StackState stack_state) { + bool SingleStep(StackState stack_state) { return GetMarkerRef()->IncrementalMarkingStepForTesting(stack_state); } }; -constexpr IncrementalMarkingTest::MarkingConfig - IncrementalMarkingTest::IncrementalPreciseMarkingConfig; +constexpr MarkingConfig IncrementalMarkingTest::IncrementalPreciseMarkingConfig; TEST_F(IncrementalMarkingTest, RootIsMarkedAfterMarkingStarted) { Persistent<GCed> root = MakeGarbageCollected<GCed>(GetAllocationHandle()); @@ -454,7 +440,7 @@ TEST_F(IncrementalMarkingTest, MemberIsMarkedAfterMarkingSteps) { EXPECT_FALSE(header.IsMarked()); InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(), IncrementalPreciseMarkingConfig); - FinishSteps(MarkingConfig::StackState::kNoHeapPointers); + FinishSteps(StackState::kNoHeapPointers); EXPECT_TRUE(header.IsMarked()); FinishMarking(); } @@ -465,7 +451,7 @@ TEST_F(IncrementalMarkingTest, InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(), IncrementalPreciseMarkingConfig); root->SetChild(MakeGarbageCollected<GCed>(GetAllocationHandle())); - FinishSteps(MarkingConfig::StackState::kNoHeapPointers); + FinishSteps(StackState::kNoHeapPointers); HeapObjectHeader& header = HeapObjectHeader::FromObject(root->child()); EXPECT_TRUE(header.IsMarked()); FinishMarking(); @@ -491,10 +477,10 @@ TEST_F(IncrementalMarkingTest, IncrementalStepDuringAllocation) { header = &HeapObjectHeader::FromObject(obj); holder->member_ = obj; EXPECT_FALSE(header->IsMarked()); - FinishSteps(MarkingConfig::StackState::kMayContainHeapPointers); + FinishSteps(StackState::kMayContainHeapPointers); EXPECT_FALSE(header->IsMarked()); }); - FinishSteps(MarkingConfig::StackState::kNoHeapPointers); + FinishSteps(StackState::kNoHeapPointers); EXPECT_TRUE(header->IsMarked()); FinishMarking(); } @@ -502,7 +488,7 @@ TEST_F(IncrementalMarkingTest, IncrementalStepDuringAllocation) { TEST_F(IncrementalMarkingTest, MarkingRunsOutOfWorkEventually) { InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(), IncrementalPreciseMarkingConfig); - FinishSteps(MarkingConfig::StackState::kNoHeapPointers); + FinishSteps(StackState::kNoHeapPointers); FinishMarking(); } diff --git a/deps/v8/test/unittests/heap/cppgc/marking-verifier-unittest.cc b/deps/v8/test/unittests/heap/cppgc/marking-verifier-unittest.cc index 3e2236abdfbed9..39909ab7bc6c29 100644 --- a/deps/v8/test/unittests/heap/cppgc/marking-verifier-unittest.cc +++ b/deps/v8/test/unittests/heap/cppgc/marking-verifier-unittest.cc @@ -20,12 +20,10 @@ namespace { class MarkingVerifierTest : public testing::TestWithHeap { public: - using StackState = Heap::Config::StackState; - V8_NOINLINE void VerifyMarking(HeapBase& heap, StackState stack_state, size_t expected_marked_bytes) { Heap::From(GetHeap())->object_allocator().ResetLinearAllocationBuffers(); - MarkingVerifier verifier(heap, Heap::Config::CollectionType::kMajor); + MarkingVerifier verifier(heap, CollectionType::kMajor); verifier.Run(stack_state, v8::base::Stack::GetCurrentStackPosition(), expected_marked_bytes); } @@ -140,16 +138,14 @@ TEST_F(MarkingVerifierTest, DoesntDieOnInConstructionObjectWithWriteBarrier) { Persistent<Holder<GCedWithCallbackAndChild>> persistent = MakeGarbageCollected<Holder<GCedWithCallbackAndChild>>( GetAllocationHandle()); - GarbageCollector::Config config = - GarbageCollector::Config::PreciseIncrementalConfig(); + GCConfig config = GCConfig::PreciseIncrementalConfig(); Heap::From(GetHeap())->StartIncrementalGarbageCollection(config); MakeGarbageCollected<GCedWithCallbackAndChild>( GetAllocationHandle(), MakeGarbageCollected<GCed>(GetAllocationHandle()), [&persistent](GCedWithCallbackAndChild* obj) { persistent->object = obj; }); - GetMarkerRef()->IncrementalMarkingStepForTesting( - GarbageCollector::Config::StackState::kNoHeapPointers); + GetMarkerRef()->IncrementalMarkingStepForTesting(StackState::kNoHeapPointers); Heap::From(GetHeap())->FinalizeIncrementalGarbageCollectionIfRunning(config); } diff --git a/deps/v8/test/unittests/heap/cppgc/metric-recorder-unittest.cc b/deps/v8/test/unittests/heap/cppgc/metric-recorder-unittest.cc index 841d400143c0ef..cf02f8a9facf7c 100644 --- a/deps/v8/test/unittests/heap/cppgc/metric-recorder-unittest.cc +++ b/deps/v8/test/unittests/heap/cppgc/metric-recorder-unittest.cc @@ -51,15 +51,13 @@ class MetricRecorderTest : public testing::TestWithHeap { } void StartGC() { - stats->NotifyMarkingStarted( - GarbageCollector::Config::CollectionType::kMajor, - GarbageCollector::Config::MarkingType::kIncremental, - GarbageCollector::Config::IsForcedGC::kNotForced); + stats->NotifyMarkingStarted(CollectionType::kMajor, + GCConfig::MarkingType::kIncremental, + GCConfig::IsForcedGC::kNotForced); } void EndGC(size_t marked_bytes) { stats->NotifyMarkingCompleted(marked_bytes); - stats->NotifySweepingCompleted( - GarbageCollector::Config::SweepingType::kIncremental); + stats->NotifySweepingCompleted(GCConfig::SweepingType::kIncremental); } StatsCollector* stats; @@ -308,8 +306,7 @@ TEST_F(MetricRecorderTest, ObjectSizeMetricsWithAllocations) { stats->NotifyAllocation(150); stats->NotifyAllocatedMemory(1000); stats->NotifyFreedMemory(400); - stats->NotifySweepingCompleted( - GarbageCollector::Config::SweepingType::kAtomic); + stats->NotifySweepingCompleted(GCConfig::SweepingType::kAtomic); EXPECT_EQ(1300u, MetricRecorderImpl::GCCycle_event.objects.before_bytes); EXPECT_EQ(800, MetricRecorderImpl::GCCycle_event.objects.after_bytes); EXPECT_EQ(500u, MetricRecorderImpl::GCCycle_event.objects.freed_bytes); diff --git a/deps/v8/test/unittests/heap/cppgc/minor-gc-unittest.cc b/deps/v8/test/unittests/heap/cppgc/minor-gc-unittest.cc index bc50f0deed3c14..6ec873b71f1a82 100644 --- a/deps/v8/test/unittests/heap/cppgc/minor-gc-unittest.cc +++ b/deps/v8/test/unittests/heap/cppgc/minor-gc-unittest.cc @@ -13,6 +13,7 @@ #include "include/cppgc/internal/caged-heap-local-data.h" #include "include/cppgc/persistent.h" #include "src/heap/cppgc/heap-object-header.h" +#include "src/heap/cppgc/heap-visitor.h" #include "src/heap/cppgc/heap.h" #include "test/unittests/heap/cppgc/tests.h" #include "testing/gtest/include/gtest/gtest.h" @@ -87,6 +88,46 @@ void ExpectPageOld(BasePage& page) { CagedHeap::OffsetFromAddress(page.PayloadEnd()))); } +class RememberedSetExtractor : HeapVisitor<RememberedSetExtractor> { + friend class HeapVisitor<RememberedSetExtractor>; + + public: + static std::set<void*> Extract(cppgc::Heap* heap) { + RememberedSetExtractor extractor; + extractor.Traverse(Heap::From(heap)->raw_heap()); + return std::move(extractor.slots_); + } + + private: + void VisitPage(BasePage& page) { + auto* slot_set = page.slot_set(); + if (!slot_set) return; + + const uintptr_t page_start = reinterpret_cast<uintptr_t>(&page); + const size_t buckets_size = SlotSet::BucketsForSize(page.AllocatedSize()); + + slot_set->Iterate( + page_start, 0, buckets_size, + [this](SlotSet::Address slot) { + slots_.insert(reinterpret_cast<void*>(slot)); + return heap::base::KEEP_SLOT; + }, + SlotSet::EmptyBucketMode::FREE_EMPTY_BUCKETS); + } + + bool VisitNormalPage(NormalPage& page) { + VisitPage(page); + return true; + } + + bool VisitLargePage(LargePage& page) { + VisitPage(page); + return true; + } + + std::set<void*> slots_; +}; + } // namespace class MinorGCTest : public testing::TestWithHeap { @@ -107,16 +148,11 @@ class MinorGCTest : public testing::TestWithHeap { } void CollectMinor() { - Heap::From(GetHeap())->CollectGarbage( - Heap::Config::MinorPreciseAtomicConfig()); + Heap::From(GetHeap())->CollectGarbage(GCConfig::MinorPreciseAtomicConfig()); } void CollectMajor() { - Heap::From(GetHeap())->CollectGarbage(Heap::Config::PreciseAtomicConfig()); - } - - const auto& RememberedSlots() const { - return Heap::From(GetHeap())->remembered_set().remembered_slots_; + Heap::From(GetHeap())->CollectGarbage(GCConfig::PreciseAtomicConfig()); } const auto& RememberedSourceObjects() const { @@ -145,75 +181,72 @@ struct ExpectRememberedSlotsAdded final { ExpectRememberedSlotsAdded( const MinorGCTest& test, std::initializer_list<void*> slots_expected_to_be_remembered) - : remembered_slots_(test.RememberedSlots()), + : test_(test), slots_expected_to_be_remembered_(slots_expected_to_be_remembered), - initial_number_of_slots_(remembered_slots_.size()) { + initial_slots_(RememberedSetExtractor::Extract(test.GetHeap())) { // Check that the remembered set doesn't contain specified slots. - EXPECT_FALSE(std::includes(remembered_slots_.begin(), - remembered_slots_.end(), + EXPECT_FALSE(std::includes(initial_slots_.begin(), initial_slots_.end(), slots_expected_to_be_remembered_.begin(), slots_expected_to_be_remembered_.end())); } ~ExpectRememberedSlotsAdded() { - const size_t current_number_of_slots = remembered_slots_.size(); - EXPECT_EQ( - initial_number_of_slots_ + slots_expected_to_be_remembered_.size(), - current_number_of_slots); - EXPECT_TRUE(std::includes(remembered_slots_.begin(), - remembered_slots_.end(), + const auto current_slots = RememberedSetExtractor::Extract(test_.GetHeap()); + EXPECT_EQ(initial_slots_.size() + slots_expected_to_be_remembered_.size(), + current_slots.size()); + EXPECT_TRUE(std::includes(current_slots.begin(), current_slots.end(), slots_expected_to_be_remembered_.begin(), slots_expected_to_be_remembered_.end())); } private: - const std::set<void*>& remembered_slots_; + const MinorGCTest& test_; std::set<void*> slots_expected_to_be_remembered_; - const size_t initial_number_of_slots_ = 0; + std::set<void*> initial_slots_; }; struct ExpectRememberedSlotsRemoved final { ExpectRememberedSlotsRemoved( const MinorGCTest& test, std::initializer_list<void*> slots_expected_to_be_removed) - : remembered_slots_(test.RememberedSlots()), + : test_(test), slots_expected_to_be_removed_(slots_expected_to_be_removed), - initial_number_of_slots_(remembered_slots_.size()) { - DCHECK_GE(initial_number_of_slots_, slots_expected_to_be_removed_.size()); + initial_slots_(RememberedSetExtractor::Extract(test.GetHeap())) { + DCHECK_GE(initial_slots_.size(), slots_expected_to_be_removed_.size()); // Check that the remembered set does contain specified slots to be removed. - EXPECT_TRUE(std::includes(remembered_slots_.begin(), - remembered_slots_.end(), + EXPECT_TRUE(std::includes(initial_slots_.begin(), initial_slots_.end(), slots_expected_to_be_removed_.begin(), slots_expected_to_be_removed_.end())); } ~ExpectRememberedSlotsRemoved() { - const size_t current_number_of_slots = remembered_slots_.size(); - EXPECT_EQ(initial_number_of_slots_ - slots_expected_to_be_removed_.size(), - current_number_of_slots); - EXPECT_FALSE(std::includes(remembered_slots_.begin(), - remembered_slots_.end(), + const auto current_slots = RememberedSetExtractor::Extract(test_.GetHeap()); + EXPECT_EQ(initial_slots_.size() - slots_expected_to_be_removed_.size(), + current_slots.size()); + EXPECT_FALSE(std::includes(current_slots.begin(), current_slots.end(), slots_expected_to_be_removed_.begin(), slots_expected_to_be_removed_.end())); } private: - const std::set<void*>& remembered_slots_; + const MinorGCTest& test_; std::set<void*> slots_expected_to_be_removed_; - const size_t initial_number_of_slots_ = 0; + std::set<void*> initial_slots_; }; struct ExpectNoRememberedSlotsAdded final { explicit ExpectNoRememberedSlotsAdded(const MinorGCTest& test) - : remembered_slots_(test.RememberedSlots()), - initial_remembered_slots_(remembered_slots_) {} + : test_(test), + initial_remembered_slots_( + RememberedSetExtractor::Extract(test.GetHeap())) {} ~ExpectNoRememberedSlotsAdded() { - EXPECT_EQ(initial_remembered_slots_, remembered_slots_); + EXPECT_EQ(initial_remembered_slots_, + RememberedSetExtractor::Extract(test_.GetHeap())); } private: - const std::set<void*>& remembered_slots_; + const MinorGCTest& test_; std::set<void*> initial_remembered_slots_; }; @@ -298,19 +331,23 @@ void InterGenerationalPointerTest(MinorGCTest* test, cppgc::Heap* heap) { } } - const auto& set = test->RememberedSlots(); - auto set_size_before = set.size(); + auto remembered_set_size_before_barrier = + RememberedSetExtractor::Extract(test->GetHeap()).size(); // Issue generational barrier. old->next = young; - EXPECT_EQ(set_size_before + 1u, set.size()); + auto remembered_set_size_after_barrier = + RememberedSetExtractor::Extract(test->GetHeap()).size(); + + EXPECT_EQ(remembered_set_size_before_barrier + 1u, + remembered_set_size_after_barrier); // Check that the remembered set is visited. test->CollectMinor(); EXPECT_EQ(0u, MinorGCTest::DestructedObjects()); - EXPECT_TRUE(set.empty()); + EXPECT_TRUE(RememberedSetExtractor::Extract(test->GetHeap()).empty()); for (size_t i = 0; i < 64; ++i) { EXPECT_FALSE(HeapObjectHeader::FromObject(young).IsFree()); @@ -428,8 +465,8 @@ TEST_F(MinorGCTest, RememberedSetInvalidationOnShrink) { auto* young = MakeGarbageCollected<Small>(GetAllocationHandle()); - const auto& set = RememberedSlots(); - const size_t set_size_before_barrier = set.size(); + const size_t remembered_set_size_before_barrier = + RememberedSetExtractor::Extract(GetHeap()).size(); // Issue the generational barriers. for (size_t i = kFirstMemberToInvalidate; i < kLastMemberToInvalidate; ++i) { @@ -439,17 +476,23 @@ TEST_F(MinorGCTest, RememberedSetInvalidationOnShrink) { get_member(i) = young; } + const auto remembered_set_size_after_barrier = + RememberedSetExtractor::Extract(GetHeap()).size(); + // Check that barriers hit (kLastMemberToInvalidate - // kFirstMemberToInvalidate) times. - EXPECT_EQ(set_size_before_barrier + + EXPECT_EQ(remembered_set_size_before_barrier + (kLastMemberToInvalidate - kFirstMemberToInvalidate), - set.size()); + remembered_set_size_after_barrier); // Shrink the buffer for old object. subtle::Resize(*old, AdditionalBytes(kBytesToAllocate / 2)); + const auto remembered_set_after_shrink = + RememberedSetExtractor::Extract(GetHeap()).size(); + // Check that the reference was invalidated. - EXPECT_EQ(set_size_before_barrier, set.size()); + EXPECT_EQ(remembered_set_size_before_barrier, remembered_set_after_shrink); // Visiting remembered slots must not fail. CollectMinor(); diff --git a/deps/v8/test/unittests/heap/cppgc/stats-collector-scopes-unittest.cc b/deps/v8/test/unittests/heap/cppgc/stats-collector-scopes-unittest.cc index 59dcfabd98ba31..131841564a5b0c 100644 --- a/deps/v8/test/unittests/heap/cppgc/stats-collector-scopes-unittest.cc +++ b/deps/v8/test/unittests/heap/cppgc/stats-collector-scopes-unittest.cc @@ -4,6 +4,7 @@ #if CPPGC_IS_STANDALONE +#include "src/heap/cppgc/heap-config.h" #include "src/heap/cppgc/stats-collector.h" #include "test/unittests/heap/cppgc/tests.h" #include "testing/gtest/include/gtest/gtest.h" @@ -57,17 +58,14 @@ std::vector<uint8_t> DelegatingTracingControllerImpl::stored_arg_types; std::vector<uint64_t> DelegatingTracingControllerImpl::stored_arg_values; class V8_NODISCARD CppgcTracingScopesTest : public testing::TestWithHeap { - using Config = Marker::MarkingConfig; - public: CppgcTracingScopesTest() { SetTracingController(std::make_unique<DelegatingTracingControllerImpl>()); } void StartGC() { - Config config = {Config::CollectionType::kMajor, - Config::StackState::kNoHeapPointers, - Config::MarkingType::kIncremental}; + MarkingConfig config = {CollectionType::kMajor, StackState::kNoHeapPointers, + GCConfig::MarkingType::kIncremental}; GetMarkerRef() = std::make_unique<Marker>( Heap::From(GetHeap())->AsBase(), GetPlatformHandle().get(), config); GetMarkerRef()->StartMarking(); @@ -76,10 +74,10 @@ class V8_NODISCARD CppgcTracingScopesTest : public testing::TestWithHeap { void EndGC() { DelegatingTracingControllerImpl::check_expectations = false; - GetMarkerRef()->FinishMarking(Config::StackState::kNoHeapPointers); + GetMarkerRef()->FinishMarking(StackState::kNoHeapPointers); GetMarkerRef().reset(); Heap::From(GetHeap())->stats_collector()->NotifySweepingCompleted( - GarbageCollector::Config::SweepingType::kAtomic); + GCConfig::SweepingType::kAtomic); } void ResetDelegatingTracingController(const char* expected_name = nullptr) { @@ -228,13 +226,11 @@ TEST_F(CppgcTracingScopesTest, CheckScopeArgs) { TEST_F(CppgcTracingScopesTest, InitalScopesAreZero) { StatsCollector* stats_collector = Heap::From(GetHeap())->stats_collector(); - stats_collector->NotifyMarkingStarted( - GarbageCollector::Config::CollectionType::kMajor, - GarbageCollector::Config::MarkingType::kAtomic, - GarbageCollector::Config::IsForcedGC::kNotForced); + stats_collector->NotifyMarkingStarted(CollectionType::kMajor, + GCConfig::MarkingType::kAtomic, + GCConfig::IsForcedGC::kNotForced); stats_collector->NotifyMarkingCompleted(0); - stats_collector->NotifySweepingCompleted( - GarbageCollector::Config::SweepingType::kAtomic); + stats_collector->NotifySweepingCompleted(GCConfig::SweepingType::kAtomic); const StatsCollector::Event& event = stats_collector->GetPreviousEventForTesting(); for (int i = 0; i < StatsCollector::kNumHistogramScopeIds; ++i) { @@ -249,10 +245,9 @@ TEST_F(CppgcTracingScopesTest, TestIndividualScopes) { for (int scope_id = 0; scope_id < StatsCollector::kNumHistogramScopeIds; ++scope_id) { StatsCollector* stats_collector = Heap::From(GetHeap())->stats_collector(); - stats_collector->NotifyMarkingStarted( - GarbageCollector::Config::CollectionType::kMajor, - GarbageCollector::Config::MarkingType::kIncremental, - GarbageCollector::Config::IsForcedGC::kNotForced); + stats_collector->NotifyMarkingStarted(CollectionType::kMajor, + GCConfig::MarkingType::kIncremental, + GCConfig::IsForcedGC::kNotForced); DelegatingTracingControllerImpl::check_expectations = false; { StatsCollector::EnabledScope scope( @@ -265,7 +260,7 @@ TEST_F(CppgcTracingScopesTest, TestIndividualScopes) { } stats_collector->NotifyMarkingCompleted(0); stats_collector->NotifySweepingCompleted( - GarbageCollector::Config::SweepingType::kIncremental); + GCConfig::SweepingType::kIncremental); const StatsCollector::Event& event = stats_collector->GetPreviousEventForTesting(); for (int i = 0; i < StatsCollector::kNumHistogramScopeIds; ++i) { @@ -284,10 +279,9 @@ TEST_F(CppgcTracingScopesTest, TestIndividualConcurrentScopes) { for (int scope_id = 0; scope_id < StatsCollector::kNumHistogramConcurrentScopeIds; ++scope_id) { StatsCollector* stats_collector = Heap::From(GetHeap())->stats_collector(); - stats_collector->NotifyMarkingStarted( - GarbageCollector::Config::CollectionType::kMajor, - GarbageCollector::Config::MarkingType::kAtomic, - GarbageCollector::Config::IsForcedGC::kNotForced); + stats_collector->NotifyMarkingStarted(CollectionType::kMajor, + GCConfig::MarkingType::kAtomic, + GCConfig::IsForcedGC::kNotForced); DelegatingTracingControllerImpl::check_expectations = false; { StatsCollector::EnabledConcurrentScope scope( @@ -299,8 +293,7 @@ TEST_F(CppgcTracingScopesTest, TestIndividualConcurrentScopes) { } } stats_collector->NotifyMarkingCompleted(0); - stats_collector->NotifySweepingCompleted( - GarbageCollector::Config::SweepingType::kAtomic); + stats_collector->NotifySweepingCompleted(GCConfig::SweepingType::kAtomic); const StatsCollector::Event& event = stats_collector->GetPreviousEventForTesting(); for (int i = 0; i < StatsCollector::kNumHistogramScopeIds; ++i) { diff --git a/deps/v8/test/unittests/heap/cppgc/stats-collector-unittest.cc b/deps/v8/test/unittests/heap/cppgc/stats-collector-unittest.cc index 765456d9df5038..c28b69b09267cb 100644 --- a/deps/v8/test/unittests/heap/cppgc/stats-collector-unittest.cc +++ b/deps/v8/test/unittests/heap/cppgc/stats-collector-unittest.cc @@ -38,23 +38,21 @@ class StatsCollectorTest : public ::testing::Test { } // namespace TEST_F(StatsCollectorTest, NoMarkedBytes) { - stats.NotifyMarkingStarted(GarbageCollector::Config::CollectionType::kMajor, - GarbageCollector::Config::MarkingType::kAtomic, - GarbageCollector::Config::IsForcedGC::kNotForced); + stats.NotifyMarkingStarted(CollectionType::kMajor, + GCConfig::MarkingType::kAtomic, + GCConfig::IsForcedGC::kNotForced); stats.NotifyMarkingCompleted(kNoMarkedBytes); - stats.NotifySweepingCompleted( - GarbageCollector::Config::SweepingType::kAtomic); + stats.NotifySweepingCompleted(GCConfig::SweepingType::kAtomic); auto event = stats.GetPreviousEventForTesting(); EXPECT_EQ(0u, event.marked_bytes); } TEST_F(StatsCollectorTest, EventPrevGCMarkedObjectSize) { - stats.NotifyMarkingStarted(GarbageCollector::Config::CollectionType::kMajor, - GarbageCollector::Config::MarkingType::kAtomic, - GarbageCollector::Config::IsForcedGC::kNotForced); + stats.NotifyMarkingStarted(CollectionType::kMajor, + GCConfig::MarkingType::kAtomic, + GCConfig::IsForcedGC::kNotForced); stats.NotifyMarkingCompleted(1024); - stats.NotifySweepingCompleted( - GarbageCollector::Config::SweepingType::kAtomic); + stats.NotifySweepingCompleted(GCConfig::SweepingType::kAtomic); auto event = stats.GetPreviousEventForTesting(); EXPECT_EQ(1024u, event.marked_bytes); } @@ -74,54 +72,50 @@ TEST_F(StatsCollectorTest, AlllocationReportAboveAllocationThresholdBytes) { } TEST_F(StatsCollectorTest, InitialAllocatedObjectSize) { - stats.NotifyMarkingStarted(GarbageCollector::Config::CollectionType::kMajor, - GarbageCollector::Config::MarkingType::kAtomic, - GarbageCollector::Config::IsForcedGC::kNotForced); + stats.NotifyMarkingStarted(CollectionType::kMajor, + GCConfig::MarkingType::kAtomic, + GCConfig::IsForcedGC::kNotForced); EXPECT_EQ(0u, stats.allocated_object_size()); stats.NotifyMarkingCompleted(kNoMarkedBytes); EXPECT_EQ(0u, stats.allocated_object_size()); - stats.NotifySweepingCompleted( - GarbageCollector::Config::SweepingType::kAtomic); + stats.NotifySweepingCompleted(GCConfig::SweepingType::kAtomic); EXPECT_EQ(0u, stats.allocated_object_size()); } TEST_F(StatsCollectorTest, AllocatedObjectSize) { - stats.NotifyMarkingStarted(GarbageCollector::Config::CollectionType::kMajor, - GarbageCollector::Config::MarkingType::kAtomic, - GarbageCollector::Config::IsForcedGC::kNotForced); + stats.NotifyMarkingStarted(CollectionType::kMajor, + GCConfig::MarkingType::kAtomic, + GCConfig::IsForcedGC::kNotForced); FakeAllocate(kMinReportedSize); EXPECT_EQ(kMinReportedSize, stats.allocated_object_size()); stats.NotifyMarkingCompleted(kMinReportedSize); EXPECT_EQ(kMinReportedSize, stats.allocated_object_size()); - stats.NotifySweepingCompleted( - GarbageCollector::Config::SweepingType::kAtomic); + stats.NotifySweepingCompleted(GCConfig::SweepingType::kAtomic); EXPECT_EQ(kMinReportedSize, stats.allocated_object_size()); } TEST_F(StatsCollectorTest, AllocatedObjectSizeNoMarkedBytes) { - stats.NotifyMarkingStarted(GarbageCollector::Config::CollectionType::kMajor, - GarbageCollector::Config::MarkingType::kAtomic, - GarbageCollector::Config::IsForcedGC::kNotForced); + stats.NotifyMarkingStarted(CollectionType::kMajor, + GCConfig::MarkingType::kAtomic, + GCConfig::IsForcedGC::kNotForced); FakeAllocate(kMinReportedSize); EXPECT_EQ(kMinReportedSize, stats.allocated_object_size()); stats.NotifyMarkingCompleted(kNoMarkedBytes); EXPECT_EQ(0u, stats.allocated_object_size()); - stats.NotifySweepingCompleted( - GarbageCollector::Config::SweepingType::kAtomic); + stats.NotifySweepingCompleted(GCConfig::SweepingType::kAtomic); EXPECT_EQ(0u, stats.allocated_object_size()); } TEST_F(StatsCollectorTest, AllocatedObjectSizeAllocateAfterMarking) { - stats.NotifyMarkingStarted(GarbageCollector::Config::CollectionType::kMajor, - GarbageCollector::Config::MarkingType::kAtomic, - GarbageCollector::Config::IsForcedGC::kNotForced); + stats.NotifyMarkingStarted(CollectionType::kMajor, + GCConfig::MarkingType::kAtomic, + GCConfig::IsForcedGC::kNotForced); FakeAllocate(kMinReportedSize); EXPECT_EQ(kMinReportedSize, stats.allocated_object_size()); stats.NotifyMarkingCompleted(kMinReportedSize); FakeAllocate(kMinReportedSize); EXPECT_EQ(2 * kMinReportedSize, stats.allocated_object_size()); - stats.NotifySweepingCompleted( - GarbageCollector::Config::SweepingType::kAtomic); + stats.NotifySweepingCompleted(GCConfig::SweepingType::kAtomic); EXPECT_EQ(2 * kMinReportedSize, stats.allocated_object_size()); } @@ -153,12 +147,11 @@ TEST_F(StatsCollectorTest, ObserveAllocatedObjectSizeIncreaseAndDecrease) { namespace { void FakeGC(StatsCollector* stats, size_t marked_bytes) { - stats->NotifyMarkingStarted(GarbageCollector::Config::CollectionType::kMajor, - GarbageCollector::Config::MarkingType::kAtomic, - GarbageCollector::Config::IsForcedGC::kNotForced); + stats->NotifyMarkingStarted(CollectionType::kMajor, + GCConfig::MarkingType::kAtomic, + GCConfig::IsForcedGC::kNotForced); stats->NotifyMarkingCompleted(marked_bytes); - stats->NotifySweepingCompleted( - GarbageCollector::Config::SweepingType::kAtomic); + stats->NotifySweepingCompleted(GCConfig::SweepingType::kAtomic); } } // namespace diff --git a/deps/v8/test/unittests/heap/cppgc/sweeper-unittest.cc b/deps/v8/test/unittests/heap/cppgc/sweeper-unittest.cc index 3f5c01ac001b04..b93c7b6bc040b1 100644 --- a/deps/v8/test/unittests/heap/cppgc/sweeper-unittest.cc +++ b/deps/v8/test/unittests/heap/cppgc/sweeper-unittest.cc @@ -14,6 +14,7 @@ #include "src/heap/cppgc/heap-page.h" #include "src/heap/cppgc/heap-visitor.h" #include "src/heap/cppgc/heap.h" +#include "src/heap/cppgc/object-view.h" #include "src/heap/cppgc/page-memory.h" #include "src/heap/cppgc/stats-collector.h" #include "test/unittests/heap/cppgc/tests.h" @@ -48,13 +49,12 @@ class SweeperTest : public testing::TestWithHeap { // Pretend do finish marking as StatsCollector verifies that Notify* // methods are called in the right order. heap->stats_collector()->NotifyMarkingStarted( - GarbageCollector::Config::CollectionType::kMajor, - GarbageCollector::Config::MarkingType::kAtomic, - GarbageCollector::Config::IsForcedGC::kNotForced); + CollectionType::kMajor, GCConfig::MarkingType::kAtomic, + GCConfig::IsForcedGC::kNotForced); heap->stats_collector()->NotifyMarkingCompleted(0); - const Sweeper::SweepingConfig sweeping_config{ - Sweeper::SweepingConfig::SweepingType::kAtomic, - Sweeper::SweepingConfig::CompactableSpaceHandling::kSweep}; + const SweepingConfig sweeping_config{ + SweepingConfig::SweepingType::kAtomic, + SweepingConfig::CompactableSpaceHandling::kSweep}; sweeper.Start(sweeping_config); sweeper.FinishIfRunning(); } @@ -226,8 +226,7 @@ class GCInDestructor final : public GarbageCollected<GCInDestructor> { ~GCInDestructor() { // Instead of directly calling GC, allocations should be supported here as // well. - heap_->CollectGarbage( - internal::GarbageCollector::Config::ConservativeAtomicConfig()); + heap_->CollectGarbage(internal::GCConfig::ConservativeAtomicConfig()); } void Trace(Visitor*) const {} @@ -272,7 +271,16 @@ TEST_F(SweeperTest, UnmarkObjects) { } TEST_F(SweeperTest, LazySweepingDuringAllocation) { - using GCedObject = GCed<256>; + // The test allocates objects in such a way that the object with its header is + // power of two. This is to make sure that if there is some padding at the end + // of the page, it will go to a different freelist bucket. To get that, + // subtract vptr and object-header-size from a power-of-two. + static constexpr size_t kGCObjectSize = + 256 - sizeof(void*) - sizeof(HeapObjectHeader); + using GCedObject = GCed<kGCObjectSize>; + static_assert(v8::base::bits::IsPowerOfTwo(sizeof(GCedObject) + + sizeof(HeapObjectHeader))); + static const size_t kObjectsPerPage = NormalPage::PayloadSize() / (sizeof(GCedObject) + sizeof(HeapObjectHeader)); @@ -299,11 +307,10 @@ TEST_F(SweeperTest, LazySweepingDuringAllocation) { testing::TestPlatform::DisableBackgroundTasksScope no_concurrent_sweep_scope( GetPlatformHandle().get()); g_destructor_callcount = 0; - static constexpr Heap::Config config = { - Heap::Config::CollectionType::kMajor, - Heap::Config::StackState::kNoHeapPointers, - Heap::Config::MarkingType::kAtomic, - Heap::Config::SweepingType::kIncrementalAndConcurrent}; + static constexpr GCConfig config = { + CollectionType::kMajor, StackState::kNoHeapPointers, + GCConfig::MarkingType::kAtomic, + GCConfig::SweepingType::kIncrementalAndConcurrent}; Heap::From(GetHeap())->CollectGarbage(config); // Incremental sweeping is active and the space should have two pages with // no room for an additional GCedObject. Allocating a new GCedObject should @@ -334,11 +341,13 @@ TEST_F(SweeperTest, LazySweepingNormalPages) { PreciseGC(); EXPECT_EQ(0u, g_destructor_callcount); MakeGarbageCollected<GCedObject>(GetAllocationHandle()); - static constexpr Heap::Config config = { - Heap::Config::CollectionType::kMajor, - Heap::Config::StackState::kNoHeapPointers, - Heap::Config::MarkingType::kAtomic, - Heap::Config::SweepingType::kIncrementalAndConcurrent}; + static constexpr GCConfig config = { + CollectionType::kMajor, StackState::kNoHeapPointers, + GCConfig::MarkingType::kAtomic, + // Sweeping type must not include concurrent as that could lead to the + // concurrent sweeper holding onto pages in rare cases which delays + // reclamation of objects. + GCConfig::SweepingType::kIncremental}; Heap::From(GetHeap())->CollectGarbage(config); EXPECT_EQ(0u, g_destructor_callcount); MakeGarbageCollected<GCedObject>(GetAllocationHandle()); @@ -439,10 +448,9 @@ TEST_F(SweeperTest, CrossThreadPersistentCanBeClearedFromOtherThread) { testing::TestPlatform::DisableBackgroundTasksScope no_concurrent_sweep_scope( GetPlatformHandle().get()); Heap::From(GetHeap())->CollectGarbage( - {Heap::Config::CollectionType::kMajor, - Heap::Config::StackState::kNoHeapPointers, - Heap::Config::MarkingType::kAtomic, - Heap::Config::SweepingType::kIncrementalAndConcurrent}); + {CollectionType::kMajor, StackState::kNoHeapPointers, + GCConfig::MarkingType::kAtomic, + GCConfig::SweepingType::kIncrementalAndConcurrent}); // `holder` is unreachable (as the stack is not scanned) and will be // reclaimed. Its payload memory is generally poisoned at this point. The // CrossThreadPersistent slot should be unpoisoned. @@ -467,11 +475,10 @@ TEST_F(SweeperTest, WeakCrossThreadPersistentCanBeClearedFromOtherThread) { testing::TestPlatform::DisableBackgroundTasksScope no_concurrent_sweep_scope( GetPlatformHandle().get()); - static constexpr Heap::Config config = { - Heap::Config::CollectionType::kMajor, - Heap::Config::StackState::kNoHeapPointers, - Heap::Config::MarkingType::kAtomic, - Heap::Config::SweepingType::kIncrementalAndConcurrent}; + static constexpr GCConfig config = { + CollectionType::kMajor, StackState::kNoHeapPointers, + GCConfig::MarkingType::kAtomic, + GCConfig::SweepingType::kIncrementalAndConcurrent}; Heap::From(GetHeap())->CollectGarbage(config); // `holder` is unreachable (as the stack is not scanned) and will be // reclaimed. Its payload memory is generally poisoned at this point. The @@ -480,10 +487,9 @@ TEST_F(SweeperTest, WeakCrossThreadPersistentCanBeClearedFromOtherThread) { // GC in the remote heap should also clear `holder->weak_ref`. The slot for // `weak_ref` should be unpoisoned by the GC. Heap::From(remote_heap.get()) - ->CollectGarbage({Heap::Config::CollectionType::kMajor, - Heap::Config::StackState::kNoHeapPointers, - Heap::Config::MarkingType::kAtomic, - Heap::Config::SweepingType::kAtomic}); + ->CollectGarbage({CollectionType::kMajor, StackState::kNoHeapPointers, + GCConfig::MarkingType::kAtomic, + GCConfig::SweepingType::kAtomic}); // Finish the sweeper which will find the CrossThreadPersistent in cleared // state. @@ -491,5 +497,41 @@ TEST_F(SweeperTest, WeakCrossThreadPersistentCanBeClearedFromOtherThread) { EXPECT_EQ(1u, Holder::destructor_callcount); } +TEST_F(SweeperTest, SweepOnAllocationTakeLastFreeListEntry) { + // The test allocates the following layout: + // |--object-A--|-object-B-|--object-A--|---free-space---| + // Objects A are reachable, whereas object B is not. sizeof(B) is smaller than + // that of A. The test starts garbage-collection with lazy sweeping, then + // tries to allocate object A, expecting the allocation to end up on the same + // page at the free-space. + using GCedA = GCed<256>; + using GCedB = GCed<240>; + + PreciseGC(); + + // Allocate the layout. + Persistent<GCedA> a1 = MakeGarbageCollected<GCedA>(GetAllocationHandle()); + MakeGarbageCollected<GCedB>(GetAllocationHandle()); + Persistent<GCedA> a2 = MakeGarbageCollected<GCedA>(GetAllocationHandle()); + ConstAddress free_space_start = + ObjectView<>(HeapObjectHeader::FromObject(a2.Get())).End(); + + // Start the GC without sweeping. + testing::TestPlatform::DisableBackgroundTasksScope no_concurrent_sweep_scope( + GetPlatformHandle().get()); + static constexpr GCConfig config = { + CollectionType::kMajor, StackState::kNoHeapPointers, + GCConfig::MarkingType::kAtomic, + GCConfig::SweepingType::kIncrementalAndConcurrent}; + Heap::From(GetHeap())->CollectGarbage(config); + + // Allocate and sweep. + const GCedA* allocated_after_sweeping = + MakeGarbageCollected<GCedA>(GetAllocationHandle()); + EXPECT_EQ(free_space_start, + reinterpret_cast<ConstAddress>( + &HeapObjectHeader::FromObject(allocated_after_sweeping))); +} + } // namespace internal } // namespace cppgc diff --git a/deps/v8/test/unittests/heap/cppgc/testing-unittest.cc b/deps/v8/test/unittests/heap/cppgc/testing-unittest.cc index 70f971e62f3393..7700a55fc6ab1e 100644 --- a/deps/v8/test/unittests/heap/cppgc/testing-unittest.cc +++ b/deps/v8/test/unittests/heap/cppgc/testing-unittest.cc @@ -28,7 +28,7 @@ TEST_F(TestingTest, auto* gced = MakeGarbageCollected<GCed>(GetHeap()->GetAllocationHandle()); WeakPersistent<GCed> weak{gced}; internal::Heap::From(GetHeap())->CollectGarbage( - Heap::Config::PreciseAtomicConfig()); + GCConfig::PreciseAtomicConfig()); EXPECT_FALSE(weak); } { @@ -38,7 +38,7 @@ TEST_F(TestingTest, GetHeap()->GetHeapHandle(), EmbedderStackState::kMayContainHeapPointers); internal::Heap::From(GetHeap())->CollectGarbage( - Heap::Config::PreciseAtomicConfig()); + GCConfig::PreciseAtomicConfig()); EXPECT_FALSE(weak); } { @@ -47,7 +47,7 @@ TEST_F(TestingTest, cppgc::testing::OverrideEmbedderStackStateScope override_stack( GetHeap()->GetHeapHandle(), EmbedderStackState::kNoHeapPointers); internal::Heap::From(GetHeap())->CollectGarbage( - Heap::Config::ConservativeAtomicConfig()); + GCConfig::ConservativeAtomicConfig()); EXPECT_TRUE(weak); } } diff --git a/deps/v8/test/unittests/heap/cppgc/tests.h b/deps/v8/test/unittests/heap/cppgc/tests.h index aa6db51e422bb0..5a9536b048395e 100644 --- a/deps/v8/test/unittests/heap/cppgc/tests.h +++ b/deps/v8/test/unittests/heap/cppgc/tests.h @@ -87,10 +87,9 @@ class TestWithHeap : public TestWithPlatform { // size of the heap and corresponding pages. void ConservativeMemoryDiscardingGC() { internal::Heap::From(GetHeap())->CollectGarbage( - {GarbageCollector::Config::CollectionType::kMajor, - Heap::StackState::kMayContainHeapPointers, + {CollectionType::kMajor, Heap::StackState::kMayContainHeapPointers, cppgc::Heap::MarkingType::kAtomic, cppgc::Heap::SweepingType::kAtomic, - GarbageCollector::Config::FreeMemoryHandling::kDiscardWherePossible}); + GCConfig::FreeMemoryHandling::kDiscardWherePossible}); } cppgc::Heap* GetHeap() const { return heap_.get(); } diff --git a/deps/v8/test/unittests/heap/cppgc/weak-container-unittest.cc b/deps/v8/test/unittests/heap/cppgc/weak-container-unittest.cc index 5fa8b1a884fcd4..192f4b7052c854 100644 --- a/deps/v8/test/unittests/heap/cppgc/weak-container-unittest.cc +++ b/deps/v8/test/unittests/heap/cppgc/weak-container-unittest.cc @@ -18,26 +18,23 @@ namespace internal { namespace { class WeakContainerTest : public testing::TestWithHeap { public: - using Config = Marker::MarkingConfig; - void StartMarking() { CHECK_EQ(0u, Heap::From(GetHeap())->AsBase().stats_collector()->marked_bytes()); - Config config = {Config::CollectionType::kMajor, - Config::StackState::kNoHeapPointers, - Config::MarkingType::kIncremental}; + MarkingConfig config = {CollectionType::kMajor, StackState::kNoHeapPointers, + MarkingConfig::MarkingType::kIncremental}; GetMarkerRef() = std::make_unique<Marker>( Heap::From(GetHeap())->AsBase(), GetPlatformHandle().get(), config); GetMarkerRef()->StartMarking(); } - void FinishMarking(Config::StackState stack_state) { + void FinishMarking(StackState stack_state) { GetMarkerRef()->FinishMarking(stack_state); marked_bytes_ = Heap::From(GetHeap())->AsBase().stats_collector()->marked_bytes(); GetMarkerRef().reset(); Heap::From(GetHeap())->stats_collector()->NotifySweepingCompleted( - GarbageCollector::Config::SweepingType::kAtomic); + GCConfig::SweepingType::kAtomic); } size_t GetMarkedBytes() const { return marked_bytes_; } @@ -96,7 +93,7 @@ TEST_F(WeakContainerTest, TraceableGCedTraced) { obj->n_trace_calls = 0u; StartMarking(); GetMarkerRef()->Visitor().TraceWeakContainer(obj, EmptyWeakCallback, nullptr); - FinishMarking(Config::StackState::kNoHeapPointers); + FinishMarking(StackState::kNoHeapPointers); EXPECT_NE(0u, obj->n_trace_calls); EXPECT_EQ(SizeOf<TraceableGCed>(), GetMarkedBytes()); } @@ -107,7 +104,7 @@ TEST_F(WeakContainerTest, NonTraceableGCedNotTraced) { obj->n_trace_calls = 0u; StartMarking(); GetMarkerRef()->Visitor().TraceWeakContainer(obj, EmptyWeakCallback, nullptr); - FinishMarking(Config::StackState::kNoHeapPointers); + FinishMarking(StackState::kNoHeapPointers); EXPECT_EQ(0u, obj->n_trace_calls); EXPECT_EQ(SizeOf<NonTraceableGCed>(), GetMarkedBytes()); } @@ -118,7 +115,7 @@ TEST_F(WeakContainerTest, NonTraceableGCedNotTracedConservatively) { obj->n_trace_calls = 0u; StartMarking(); GetMarkerRef()->Visitor().TraceWeakContainer(obj, EmptyWeakCallback, nullptr); - FinishMarking(Config::StackState::kMayContainHeapPointers); + FinishMarking(StackState::kMayContainHeapPointers); EXPECT_NE(0u, obj->n_trace_calls); EXPECT_EQ(SizeOf<NonTraceableGCed>(), GetMarkedBytes()); } @@ -129,7 +126,7 @@ TEST_F(WeakContainerTest, PreciseGCTracesWeakContainerWhenTraced) { obj->n_trace_calls = 0u; StartMarking(); GetMarkerRef()->Visitor().TraceWeakContainer(obj, EmptyWeakCallback, nullptr); - FinishMarking(Config::StackState::kNoHeapPointers); + FinishMarking(StackState::kNoHeapPointers); EXPECT_EQ(1u, obj->n_trace_calls); EXPECT_EQ(SizeOf<TraceableGCed>(), GetMarkedBytes()); } @@ -140,7 +137,7 @@ TEST_F(WeakContainerTest, ConservativeGCTracesWeakContainer) { obj->n_trace_calls = 0u; StartMarking(); GetMarkerRef()->Visitor().TraceWeakContainer(obj, EmptyWeakCallback, nullptr); - FinishMarking(Config::StackState::kMayContainHeapPointers); + FinishMarking(StackState::kMayContainHeapPointers); EXPECT_EQ(2u, obj->n_trace_calls); EXPECT_EQ(SizeOf<TraceableGCed>(), GetMarkedBytes()); } @@ -155,7 +152,7 @@ TEST_F(WeakContainerTest, ConservativeGCTracesWeakContainerOnce) { obj->n_trace_calls = 0u; StartMarking(); GetMarkerRef()->Visitor().TraceWeakContainer(obj, EmptyWeakCallback, nullptr); - FinishMarking(Config::StackState::kMayContainHeapPointers); + FinishMarking(StackState::kMayContainHeapPointers); EXPECT_EQ(1u, obj->n_trace_calls); EXPECT_EQ(SizeOf<NonTraceableGCed>(), GetMarkedBytes()); } @@ -183,7 +180,7 @@ TEST_F(WeakContainerTest, WeakContainerWeakCallbackCalled) { StartMarking(); GetMarkerRef()->Visitor().TraceWeakContainer(obj, WeakCallback::callback, obj); - FinishMarking(Config::StackState::kMayContainHeapPointers); + FinishMarking(StackState::kMayContainHeapPointers); EXPECT_NE(0u, WeakCallback::n_callback_called); EXPECT_EQ(SizeOf<TraceableGCed>(), GetMarkedBytes()); EXPECT_EQ(obj, WeakCallback::obj); diff --git a/deps/v8/test/unittests/heap/cppgc/workloads-unittest.cc b/deps/v8/test/unittests/heap/cppgc/workloads-unittest.cc index 820bac967aa090..2b3c5b0841f41b 100644 --- a/deps/v8/test/unittests/heap/cppgc/workloads-unittest.cc +++ b/deps/v8/test/unittests/heap/cppgc/workloads-unittest.cc @@ -26,11 +26,11 @@ class WorkloadsTest : public testing::TestWithHeap { public: void ConservativeGC() { internal::Heap::From(GetHeap())->CollectGarbage( - Heap::Config::ConservativeAtomicConfig()); + GCConfig::ConservativeAtomicConfig()); } void PreciseGC() { internal::Heap::From(GetHeap())->CollectGarbage( - Heap::Config::PreciseAtomicConfig()); + GCConfig::PreciseAtomicConfig()); } }; diff --git a/deps/v8/test/unittests/heap/cppgc/write-barrier-unittest.cc b/deps/v8/test/unittests/heap/cppgc/write-barrier-unittest.cc index 845e580ccdfba6..2d6df97180de73 100644 --- a/deps/v8/test/unittests/heap/cppgc/write-barrier-unittest.cc +++ b/deps/v8/test/unittests/heap/cppgc/write-barrier-unittest.cc @@ -29,16 +29,15 @@ class V8_NODISCARD IncrementalMarkingScope { marker_->FinishMarking(kIncrementalConfig.stack_state); } - static constexpr Marker::MarkingConfig kIncrementalConfig{ - Marker::MarkingConfig::CollectionType::kMajor, - Marker::MarkingConfig::StackState::kNoHeapPointers, - Marker::MarkingConfig::MarkingType::kIncremental}; + static constexpr MarkingConfig kIncrementalConfig{ + CollectionType::kMajor, StackState::kNoHeapPointers, + MarkingConfig::MarkingType::kIncremental}; private: MarkerBase* marker_; }; -constexpr Marker::MarkingConfig IncrementalMarkingScope::kIncrementalConfig; +constexpr MarkingConfig IncrementalMarkingScope::kIncrementalConfig; class V8_NODISCARD ExpectWriteBarrierFires final : private IncrementalMarkingScope { diff --git a/deps/v8/test/unittests/heap/embedder-tracing-unittest.cc b/deps/v8/test/unittests/heap/embedder-tracing-unittest.cc index 6391c7dce2568e..5c60de51ce7040 100644 --- a/deps/v8/test/unittests/heap/embedder-tracing-unittest.cc +++ b/deps/v8/test/unittests/heap/embedder-tracing-unittest.cc @@ -457,9 +457,9 @@ TEST_F(EmbedderTracingTest, FinalizeTracingWhenMarking) { // Finalize a potentially running garbage collection. CollectGarbage(OLD_SPACE); - if (heap->mark_compact_collector()->sweeping_in_progress()) { - heap->mark_compact_collector()->EnsureSweepingCompleted( - MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only); + if (heap->sweeping_in_progress()) { + heap->EnsureSweepingCompleted( + Heap::SweepingForcedFinalizationMode::kV8Only); } heap->tracer()->StopFullCycleIfNeeded(); EXPECT_TRUE(heap->incremental_marking()->IsStopped()); diff --git a/deps/v8/test/unittests/heap/global-handles-unittest.cc b/deps/v8/test/unittests/heap/global-handles-unittest.cc index 65ad38f0d48205..99e9f23d980593 100644 --- a/deps/v8/test/unittests/heap/global-handles-unittest.cc +++ b/deps/v8/test/unittests/heap/global-handles-unittest.cc @@ -30,6 +30,7 @@ #include "include/v8-function.h" #include "src/api/api-inl.h" #include "src/execution/isolate.h" +#include "src/flags/flags.h" #include "src/heap/factory.h" #include "src/heap/heap-inl.h" #include "src/objects/objects-inl.h" @@ -496,7 +497,8 @@ TEST_F(GlobalHandlesTest, GCFromWeakCallbacks) { if (v8_flags.single_generation) { FlagAndGlobal fp; ConstructJSApiObject(isolate, context, &fp); - CHECK(!InYoungGeneration(isolate, fp.handle)); + CHECK_IMPLIES(!v8_flags.single_generation, + !InYoungGeneration(isolate, fp.handle)); fp.flag = false; fp.handle.SetWeak(&fp, &ForceMarkSweep1, v8::WeakCallbackType::kParameter); CollectAllGarbage(); diff --git a/deps/v8/test/unittests/heap/global-safepoint-unittest.cc b/deps/v8/test/unittests/heap/global-safepoint-unittest.cc index 57e94e15dfee64..16cb6ea64ab736 100644 --- a/deps/v8/test/unittests/heap/global-safepoint-unittest.cc +++ b/deps/v8/test/unittests/heap/global-safepoint-unittest.cc @@ -22,7 +22,7 @@ namespace { class IsolateWithContextWrapper { public: - explicit IsolateWithContextWrapper() + IsolateWithContextWrapper() : isolate_wrapper_(kNoCounters), isolate_scope_(isolate_wrapper_.isolate()), handle_scope_(isolate_wrapper_.isolate()), @@ -125,8 +125,9 @@ TEST_F(GlobalSafepointTest, Interrupt) { // as of FeedbackVectors, and we wouldn't be testing the interrupt check. base::OS::Sleep(base::TimeDelta::FromMilliseconds(500)); GlobalSafepointScope global_safepoint(i_main_isolate); - i_main_isolate->shared_isolate()->global_safepoint()->IterateClientIsolates( - [](Isolate* client) { + i_main_isolate->shared_heap_isolate() + ->global_safepoint() + ->IterateClientIsolates([](Isolate* client) { client->stack_guard()->RequestTerminateExecution(); }); } diff --git a/deps/v8/test/unittests/heap/heap-unittest.cc b/deps/v8/test/unittests/heap/heap-unittest.cc index 300602ee5cdede..32d1d4525a3fb4 100644 --- a/deps/v8/test/unittests/heap/heap-unittest.cc +++ b/deps/v8/test/unittests/heap/heap-unittest.cc @@ -10,8 +10,10 @@ #include "include/v8-isolate.h" #include "include/v8-object.h" +#include "src/flags/flags.h" #include "src/handles/handles-inl.h" #include "src/heap/gc-tracer.h" +#include "src/heap/marking-state-inl.h" #include "src/heap/memory-chunk.h" #include "src/heap/remembered-set.h" #include "src/heap/safepoint.h" @@ -179,6 +181,37 @@ TEST_F(HeapTest, HeapLayout) { } #endif // V8_COMPRESS_POINTERS +namespace { +void ShrinkNewSpace(NewSpace* new_space) { + if (!v8_flags.minor_mc) { + new_space->Shrink(); + return; + } + // MinorMC shrinks the space as part of sweeping. + PagedNewSpace* paged_new_space = PagedNewSpace::From(new_space); + GCTracer* tracer = paged_new_space->heap()->tracer(); + tracer->StartObservablePause(); + tracer->StartCycle(GarbageCollector::MARK_COMPACTOR, + GarbageCollectionReason::kTesting, "heap unittest", + GCTracer::MarkingType::kAtomic); + tracer->StartAtomicPause(); + paged_new_space->StartShrinking(); + for (Page* page = paged_new_space->first_page(); + page != paged_new_space->last_page() && + (paged_new_space->ShouldReleasePage());) { + Page* current_page = page; + page = page->next_page(); + if (current_page->allocated_bytes() == 0) { + paged_new_space->ReleasePage(current_page); + } + } + paged_new_space->FinishShrinking(); + tracer->StopAtomicPause(); + tracer->StopObservablePause(); + tracer->NotifySweepingCompleted(); +} +} // namespace + TEST_F(HeapTest, GrowAndShrinkNewSpace) { if (v8_flags.single_generation) return; { @@ -197,7 +230,7 @@ TEST_F(HeapTest, GrowAndShrinkNewSpace) { // Make sure we're in a consistent state to start out. CollectAllGarbage(); CollectAllGarbage(); - new_space->Shrink(); + ShrinkNewSpace(new_space); // Explicitly growing should double the space capacity. size_t old_capacity, new_capacity; @@ -216,7 +249,7 @@ TEST_F(HeapTest, GrowAndShrinkNewSpace) { // Explicitly shrinking should not affect space capacity. old_capacity = new_space->TotalCapacity(); - new_space->Shrink(); + ShrinkNewSpace(new_space); new_capacity = new_space->TotalCapacity(); CHECK_EQ(old_capacity, new_capacity); @@ -226,7 +259,7 @@ TEST_F(HeapTest, GrowAndShrinkNewSpace) { // Explicitly shrinking should halve the space capacity. old_capacity = new_space->TotalCapacity(); - new_space->Shrink(); + ShrinkNewSpace(new_space); new_capacity = new_space->TotalCapacity(); if (v8_flags.minor_mc) { // Shrinking may not be able to remove any pages if all contain live @@ -238,9 +271,9 @@ TEST_F(HeapTest, GrowAndShrinkNewSpace) { // Consecutive shrinking should not affect space capacity. old_capacity = new_space->TotalCapacity(); - new_space->Shrink(); - new_space->Shrink(); - new_space->Shrink(); + ShrinkNewSpace(new_space); + ShrinkNewSpace(new_space); + ShrinkNewSpace(new_space); new_capacity = new_space->TotalCapacity(); CHECK_EQ(old_capacity, new_capacity); } @@ -396,7 +429,7 @@ TEST_F(HeapTest, Regress978156) { marking->Start(GarbageCollector::MARK_COMPACTOR, i::GarbageCollectionReason::kTesting); } - MarkingState* marking_state = marking->marking_state(); + MarkingState* marking_state = heap->marking_state(); // 6. Mark the filler black to access its two markbits. This triggers // an out-of-bounds access of the marking bitmap in a bad case. marking_state->WhiteToGrey(filler); diff --git a/deps/v8/test/unittests/heap/heap-utils.cc b/deps/v8/test/unittests/heap/heap-utils.cc index ea7d2a8002d0b7..880b03400224de 100644 --- a/deps/v8/test/unittests/heap/heap-utils.cc +++ b/deps/v8/test/unittests/heap/heap-utils.cc @@ -13,7 +13,6 @@ #include "src/heap/new-spaces.h" #include "src/heap/safepoint.h" #include "src/objects/free-space-inl.h" -#include "v8-internal.h" namespace v8 { namespace internal { @@ -23,19 +22,18 @@ void HeapInternalsBase::SimulateIncrementalMarking(Heap* heap, constexpr double kStepSizeInMs = 100; CHECK(v8_flags.incremental_marking); i::IncrementalMarking* marking = heap->incremental_marking(); - i::MarkCompactCollector* collector = heap->mark_compact_collector(); - if (collector->sweeping_in_progress()) { + if (heap->sweeping_in_progress()) { SafepointScope scope(heap); - collector->EnsureSweepingCompleted( - MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only); + heap->EnsureSweepingCompleted( + Heap::SweepingForcedFinalizationMode::kV8Only); } if (marking->IsStopped()) { heap->StartIncrementalMarking(i::Heap::kNoGCFlags, i::GarbageCollectionReason::kTesting); } - CHECK(marking->IsMarking()); + CHECK(marking->IsMajorMarking()); if (!force_completion) return; while (!marking->IsMajorMarkingComplete()) { @@ -153,11 +151,11 @@ void HeapInternalsBase::SimulateFullSpace(v8::internal::PagedSpace* space) { // v8_flags.stress_concurrent_allocation = false; // Background thread allocating concurrently interferes with this function. CHECK(!v8_flags.stress_concurrent_allocation); - CodePageCollectionMemoryModificationScopeForTesting code_scope(space->heap()); - i::MarkCompactCollector* collector = space->heap()->mark_compact_collector(); - if (collector->sweeping_in_progress()) { - collector->EnsureSweepingCompleted( - MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only); + Heap* heap = space->heap(); + CodePageCollectionMemoryModificationScopeForTesting code_scope(heap); + if (heap->sweeping_in_progress()) { + heap->EnsureSweepingCompleted( + Heap::SweepingForcedFinalizationMode::kV8Only); } space->FreeLinearAllocationArea(); space->ResetFreeList(); diff --git a/deps/v8/test/unittests/heap/heap-utils.h b/deps/v8/test/unittests/heap/heap-utils.h index f4bd609d01315e..6a111aaa80ae0d 100644 --- a/deps/v8/test/unittests/heap/heap-utils.h +++ b/deps/v8/test/unittests/heap/heap-utils.h @@ -78,8 +78,8 @@ class WithHeapInternals : public TMixin, HeapInternalsBase { CHECK(!v8_flags.stress_concurrent_allocation); FullGC(); FullGC(); - heap()->mark_compact_collector()->EnsureSweepingCompleted( - MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only); + heap()->EnsureSweepingCompleted( + Heap::SweepingForcedFinalizationMode::kV8Only); heap()->old_space()->FreeLinearAllocationArea(); for (Page* page : *heap()->old_space()) { page->MarkNeverAllocateForTesting(); @@ -88,10 +88,10 @@ class WithHeapInternals : public TMixin, HeapInternalsBase { void GcAndSweep(i::AllocationSpace space) { heap()->CollectGarbage(space, GarbageCollectionReason::kTesting); - if (heap()->mark_compact_collector()->sweeping_in_progress()) { + if (heap()->sweeping_in_progress()) { SafepointScope scope(heap()); - heap()->mark_compact_collector()->EnsureSweepingCompleted( - MarkCompactCollector::SweepingForcedFinalizationMode::kV8Only); + heap()->EnsureSweepingCompleted( + Heap::SweepingForcedFinalizationMode::kV8Only); } } }; diff --git a/deps/v8/test/unittests/heap/marking-inner-pointer-resolution-unittest.cc b/deps/v8/test/unittests/heap/marking-inner-pointer-resolution-unittest.cc index 2ab6589a5b354b..1dd205574f5731 100644 --- a/deps/v8/test/unittests/heap/marking-inner-pointer-resolution-unittest.cc +++ b/deps/v8/test/unittests/heap/marking-inner-pointer-resolution-unittest.cc @@ -2,15 +2,19 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. +#include "src/heap/gc-tracer.h" #include "src/heap/mark-compact.h" +#include "test/unittests/heap/heap-utils.h" #include "test/unittests/test-utils.h" namespace v8 { namespace internal { -#ifdef V8_ENABLE_INNER_POINTER_RESOLUTION_MB namespace { +constexpr int Tagged = kTaggedSize; +constexpr int FullCell = Bitmap::kBitsPerCell * Tagged; + class InnerPointerResolutionTest : public TestWithIsolate { public: struct ObjectRequest { @@ -99,7 +103,7 @@ class InnerPointerResolutionTest : public TestWithIsolate { Address ptr = page->area_start(); for (auto object : objects) { DCHECK_NE(ObjectRequest::LARGE, object.type); - DCHECK_EQ(0, object.size % kTaggedSize); + DCHECK_EQ(0, object.size % Tagged); // Check if padding is needed. int index_in_cell = Bitmap::IndexInCell(page->AddressToMarkbitIndex(ptr)); @@ -111,7 +115,7 @@ class InnerPointerResolutionTest : public TestWithIsolate { const int needed_padding_size = ((Bitmap::kBitsPerCell + object.index_in_cell - index_in_cell) % Bitmap::kBitsPerCell) * - Bitmap::kBytesPerCell; + Tagged; if (needed_padding_size > 0) { ObjectRequest pad{needed_padding_size, ObjectRequest::FREE, @@ -181,13 +185,13 @@ class InnerPointerResolutionTest : public TestWithIsolate { switch (object.type) { case ObjectRequest::REGULAR: case ObjectRequest::LARGE: { - DCHECK_LE(2 * kTaggedSize, object.size); + DCHECK_LE(2 * Tagged, object.size); ReadOnlyRoots roots(heap()); HeapObject heap_object(HeapObject::FromAddress(object.address)); heap_object.set_map_after_allocation(roots.unchecked_fixed_array_map(), SKIP_WRITE_BARRIER); FixedArray arr(FixedArray::cast(heap_object)); - arr.set_length((object.size - FixedArray::SizeFor(0)) / kTaggedSize); + arr.set_length((object.size - FixedArray::SizeFor(0)) / Tagged); DCHECK_EQ(object.size, arr.AllocatedSize()); break; } @@ -201,17 +205,17 @@ class InnerPointerResolutionTest : public TestWithIsolate { case ObjectRequest::WHITE: break; case ObjectRequest::GREY: - collector()->marking_state()->WhiteToGrey( + heap()->marking_state()->WhiteToGrey( HeapObject::FromAddress(object.address)); break; case ObjectRequest::BLACK: - DCHECK_LE(2 * kTaggedSize, object.size); - collector()->marking_state()->WhiteToBlack( + DCHECK_LE(2 * Tagged, object.size); + heap()->marking_state()->WhiteToBlack( HeapObject::FromAddress(object.address)); break; case ObjectRequest::BLACK_AREA: { MemoryChunk* page = LookupPage(object.page_id); - collector()->marking_state()->bitmap(page)->SetRange( + heap()->marking_state()->bitmap(page)->SetRange( page->AddressToMarkbitIndex(object.address), page->AddressToMarkbitIndex(object.address + object.size)); break; @@ -229,8 +233,8 @@ class InnerPointerResolutionTest : public TestWithIsolate { !IsPageAlive(object.page_id) || (object.type == ObjectRequest::FREE) || (object.type == ObjectRequest::REGULAR && (object.marked == ObjectRequest::BLACK_AREA || - (object.marked == ObjectRequest::BLACK && offset < 2 * kTaggedSize) || - (object.marked == ObjectRequest::GREY && offset < kTaggedSize))); + (object.marked == ObjectRequest::BLACK && offset < 2 * Tagged) || + (object.marked == ObjectRequest::GREY && offset < Tagged))); if (should_return_null) EXPECT_EQ(kNullAddress, base_ptr); else @@ -279,15 +283,15 @@ TEST_F(InnerPointerResolutionTest, EmptyPage) { TEST_F(InnerPointerResolutionTest, NothingMarked) { if (v8_flags.enable_third_party_heap) return; CreateObjectsInPage({ - {64}, - {48}, - {52}, - {512}, - {4, ObjectRequest::FREE}, - {60}, - {8, ObjectRequest::FREE}, - {8}, - {42176}, + {16 * Tagged}, + {12 * Tagged}, + {13 * Tagged}, + {128 * Tagged}, + {1 * Tagged, ObjectRequest::FREE}, + {15 * Tagged}, + {2 * Tagged, ObjectRequest::FREE}, + {2 * Tagged}, + {10544 * Tagged}, }); TestAll(); } @@ -295,15 +299,15 @@ TEST_F(InnerPointerResolutionTest, NothingMarked) { TEST_F(InnerPointerResolutionTest, AllMarked) { if (v8_flags.enable_third_party_heap) return; CreateObjectsInPage({ - {64, ObjectRequest::REGULAR, ObjectRequest::BLACK}, - {48, ObjectRequest::REGULAR, ObjectRequest::GREY}, - {52, ObjectRequest::REGULAR, ObjectRequest::BLACK}, - {512, ObjectRequest::REGULAR, ObjectRequest::BLACK}, - {4, ObjectRequest::FREE, ObjectRequest::GREY}, - {60, ObjectRequest::REGULAR, ObjectRequest::BLACK}, - {8, ObjectRequest::FREE, ObjectRequest::GREY}, - {8, ObjectRequest::REGULAR, ObjectRequest::BLACK}, - {42176, ObjectRequest::REGULAR, ObjectRequest::BLACK}, + {16 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK}, + {12 * Tagged, ObjectRequest::REGULAR, ObjectRequest::GREY}, + {13 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK}, + {128 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK}, + {1 * Tagged, ObjectRequest::FREE, ObjectRequest::GREY}, + {15 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK}, + {2 * Tagged, ObjectRequest::FREE, ObjectRequest::GREY}, + {2 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK}, + {10544 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK}, }); TestAll(); } @@ -311,15 +315,15 @@ TEST_F(InnerPointerResolutionTest, AllMarked) { TEST_F(InnerPointerResolutionTest, SomeMarked) { if (v8_flags.enable_third_party_heap) return; CreateObjectsInPage({ - {64, ObjectRequest::REGULAR, ObjectRequest::WHITE}, - {48, ObjectRequest::REGULAR, ObjectRequest::WHITE}, - {52, ObjectRequest::REGULAR, ObjectRequest::BLACK}, - {512, ObjectRequest::REGULAR, ObjectRequest::WHITE}, - {4, ObjectRequest::FREE, ObjectRequest::GREY}, - {60, ObjectRequest::REGULAR, ObjectRequest::BLACK}, - {8, ObjectRequest::FREE, ObjectRequest::GREY}, - {8, ObjectRequest::REGULAR, ObjectRequest::WHITE}, - {42176, ObjectRequest::REGULAR, ObjectRequest::GREY}, + {16 * Tagged, ObjectRequest::REGULAR, ObjectRequest::WHITE}, + {12 * Tagged, ObjectRequest::REGULAR, ObjectRequest::WHITE}, + {13 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK}, + {128 * Tagged, ObjectRequest::REGULAR, ObjectRequest::WHITE}, + {1 * Tagged, ObjectRequest::FREE, ObjectRequest::GREY}, + {15 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK}, + {2 * Tagged, ObjectRequest::FREE, ObjectRequest::GREY}, + {2 * Tagged, ObjectRequest::REGULAR, ObjectRequest::WHITE}, + {10544 * Tagged, ObjectRequest::REGULAR, ObjectRequest::GREY}, }); TestAll(); } @@ -327,15 +331,15 @@ TEST_F(InnerPointerResolutionTest, SomeMarked) { TEST_F(InnerPointerResolutionTest, BlackAreas) { if (v8_flags.enable_third_party_heap) return; CreateObjectsInPage({ - {64, ObjectRequest::REGULAR, ObjectRequest::WHITE}, - {48, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA}, - {52, ObjectRequest::REGULAR, ObjectRequest::BLACK}, - {512, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA}, - {4, ObjectRequest::FREE, ObjectRequest::GREY}, - {60, ObjectRequest::REGULAR, ObjectRequest::BLACK}, - {8, ObjectRequest::FREE, ObjectRequest::GREY}, - {8, ObjectRequest::REGULAR, ObjectRequest::WHITE}, - {42176, ObjectRequest::REGULAR, ObjectRequest::GREY}, + {16 * Tagged, ObjectRequest::REGULAR, ObjectRequest::WHITE}, + {12 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA}, + {13 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK}, + {128 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA}, + {1 * Tagged, ObjectRequest::FREE, ObjectRequest::GREY}, + {15 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK}, + {2 * Tagged, ObjectRequest::FREE, ObjectRequest::GREY}, + {2 * Tagged, ObjectRequest::REGULAR, ObjectRequest::WHITE}, + {10544 * Tagged, ObjectRequest::REGULAR, ObjectRequest::GREY}, }); TestAll(); } @@ -347,16 +351,16 @@ TEST_F(InnerPointerResolutionTest, ThreeMarkedObjectsInSameCell) { CreateObjectsInPage({ // Some initial large unmarked object, followed by a small marked object // towards the end of the cell. - {512}, - {20, ObjectRequest::REGULAR, ObjectRequest::BLACK, 20, + {128 * Tagged}, + {5 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK, 20, ObjectRequest::PAD_WHITE}, // Then three marked objects in the same cell. - {32, ObjectRequest::REGULAR, ObjectRequest::BLACK, 3, + {8 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK, 3, ObjectRequest::PAD_WHITE}, - {48, ObjectRequest::REGULAR, ObjectRequest::BLACK, 11}, - {20, ObjectRequest::REGULAR, ObjectRequest::BLACK, 23}, + {12 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK, 11}, + {5 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK, 23}, // This marked object is in the next cell. - {64, ObjectRequest::REGULAR, ObjectRequest::BLACK, 17, + {16 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK, 17, ObjectRequest::PAD_WHITE}, }); TestAll(); @@ -367,16 +371,16 @@ TEST_F(InnerPointerResolutionTest, ThreeBlackAreasInSameCell) { CreateObjectsInPage({ // Some initial large unmarked object, followed by a small black area // towards the end of the cell. - {512}, - {20, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA, 20, + {128 * Tagged}, + {5 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA, 20, ObjectRequest::PAD_WHITE}, // Then three black areas in the same cell. - {32, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA, 3, + {8 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA, 3, ObjectRequest::PAD_WHITE}, - {48, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA, 11}, - {20, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA, 23}, + {12 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA, 11}, + {5 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA, 23}, // This black area is in the next cell. - {64, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA, 17, + {16 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA, 17, ObjectRequest::PAD_WHITE}, }); TestAll(); @@ -385,7 +389,7 @@ TEST_F(InnerPointerResolutionTest, ThreeBlackAreasInSameCell) { TEST_F(InnerPointerResolutionTest, SmallBlackAreaAtPageStart) { if (v8_flags.enable_third_party_heap) return; CreateObjectsInPage({ - {64, ObjectRequest::REGULAR, ObjectRequest::WHITE, 30, + {16 * Tagged, ObjectRequest::REGULAR, ObjectRequest::WHITE, 30, ObjectRequest::PAD_BLACK}, }); TestAll(); @@ -394,8 +398,8 @@ TEST_F(InnerPointerResolutionTest, SmallBlackAreaAtPageStart) { TEST_F(InnerPointerResolutionTest, SmallBlackAreaAtPageStartUntilCellBoundary) { if (v8_flags.enable_third_party_heap) return; CreateObjectsInPage({ - {8, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA}, - {64, ObjectRequest::REGULAR, ObjectRequest::WHITE, 0, + {2 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA}, + {16 * Tagged, ObjectRequest::REGULAR, ObjectRequest::WHITE, 0, ObjectRequest::PAD_BLACK}, }); TestAll(); @@ -404,9 +408,8 @@ TEST_F(InnerPointerResolutionTest, SmallBlackAreaAtPageStartUntilCellBoundary) { TEST_F(InnerPointerResolutionTest, LargeBlackAreaAtPageStart) { if (v8_flags.enable_third_party_heap) return; CreateObjectsInPage({ - {42 * Bitmap::kBitsPerCell * Bitmap::kBytesPerCell, - ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA}, - {64, ObjectRequest::REGULAR, ObjectRequest::WHITE, 30, + {42 * FullCell, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA}, + {16 * Tagged, ObjectRequest::REGULAR, ObjectRequest::WHITE, 30, ObjectRequest::PAD_BLACK}, }); TestAll(); @@ -415,9 +418,8 @@ TEST_F(InnerPointerResolutionTest, LargeBlackAreaAtPageStart) { TEST_F(InnerPointerResolutionTest, LargeBlackAreaAtPageStartUntilCellBoundary) { if (v8_flags.enable_third_party_heap) return; CreateObjectsInPage({ - {42 * Bitmap::kBitsPerCell * Bitmap::kBytesPerCell, - ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA}, - {64, ObjectRequest::REGULAR, ObjectRequest::WHITE, 0, + {42 * FullCell, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA}, + {16 * Tagged, ObjectRequest::REGULAR, ObjectRequest::WHITE, 0, ObjectRequest::PAD_BLACK}, }); TestAll(); @@ -426,8 +428,8 @@ TEST_F(InnerPointerResolutionTest, LargeBlackAreaAtPageStartUntilCellBoundary) { TEST_F(InnerPointerResolutionTest, SmallBlackAreaStartingAtCellBoundary) { if (v8_flags.enable_third_party_heap) return; CreateObjectsInPage({ - {512}, - {20, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA, 0, + {128 * Tagged}, + {5 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA, 0, ObjectRequest::PAD_WHITE}, }); TestAll(); @@ -436,10 +438,9 @@ TEST_F(InnerPointerResolutionTest, SmallBlackAreaStartingAtCellBoundary) { TEST_F(InnerPointerResolutionTest, LargeBlackAreaStartingAtCellBoundary) { if (v8_flags.enable_third_party_heap) return; CreateObjectsInPage({ - {512}, - {42 * Bitmap::kBitsPerCell * Bitmap::kBytesPerCell + 64, - ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA, 0, - ObjectRequest::PAD_WHITE}, + {128 * Tagged}, + {42 * FullCell + 16 * Tagged, ObjectRequest::REGULAR, + ObjectRequest::BLACK_AREA, 0, ObjectRequest::PAD_WHITE}, }); TestAll(); } @@ -447,10 +448,10 @@ TEST_F(InnerPointerResolutionTest, LargeBlackAreaStartingAtCellBoundary) { TEST_F(InnerPointerResolutionTest, SmallBlackAreaEndingAtCellBoundary) { if (v8_flags.enable_third_party_heap) return; CreateObjectsInPage({ - {512}, - {8, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA, 13, + {128 * Tagged}, + {2 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA, 13, ObjectRequest::PAD_WHITE}, - {64, ObjectRequest::REGULAR, ObjectRequest::WHITE, 0, + {16 * Tagged, ObjectRequest::REGULAR, ObjectRequest::WHITE, 0, ObjectRequest::PAD_BLACK}, }); TestAll(); @@ -459,11 +460,10 @@ TEST_F(InnerPointerResolutionTest, SmallBlackAreaEndingAtCellBoundary) { TEST_F(InnerPointerResolutionTest, LargeBlackAreaEndingAtCellBoundary) { if (v8_flags.enable_third_party_heap) return; CreateObjectsInPage({ - {512}, - {42 * Bitmap::kBitsPerCell * Bitmap::kBytesPerCell + 64, - ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA, 0, - ObjectRequest::PAD_WHITE}, - {64, ObjectRequest::REGULAR, ObjectRequest::WHITE, 0, + {128 * Tagged}, + {42 * FullCell + 16 * Tagged, ObjectRequest::REGULAR, + ObjectRequest::BLACK_AREA, 0, ObjectRequest::PAD_WHITE}, + {16 * Tagged, ObjectRequest::REGULAR, ObjectRequest::WHITE, 0, ObjectRequest::PAD_BLACK}, }); TestAll(); @@ -472,12 +472,12 @@ TEST_F(InnerPointerResolutionTest, LargeBlackAreaEndingAtCellBoundary) { TEST_F(InnerPointerResolutionTest, TwoSmallBlackAreasAtCellBoundaries) { if (v8_flags.enable_third_party_heap) return; CreateObjectsInPage({ - {512}, - {24, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA, 0, + {128 * Tagged}, + {6 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA, 0, ObjectRequest::PAD_WHITE}, - {8, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA, 25, + {2 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA, 25, ObjectRequest::PAD_WHITE}, - {64, ObjectRequest::REGULAR, ObjectRequest::WHITE, 0, + {16 * Tagged, ObjectRequest::REGULAR, ObjectRequest::WHITE, 0, ObjectRequest::PAD_BLACK}, }); TestAll(); @@ -486,9 +486,9 @@ TEST_F(InnerPointerResolutionTest, TwoSmallBlackAreasAtCellBoundaries) { TEST_F(InnerPointerResolutionTest, BlackAreaOfOneCell) { if (v8_flags.enable_third_party_heap) return; CreateObjectsInPage({ - {512}, - {Bitmap::kBitsPerCell * Bitmap::kBytesPerCell, ObjectRequest::REGULAR, - ObjectRequest::BLACK_AREA, 0, ObjectRequest::PAD_WHITE}, + {128 * Tagged}, + {1 * FullCell, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA, 0, + ObjectRequest::PAD_WHITE}, }); TestAll(); } @@ -496,9 +496,8 @@ TEST_F(InnerPointerResolutionTest, BlackAreaOfOneCell) { TEST_F(InnerPointerResolutionTest, BlackAreaOfManyCells) { if (v8_flags.enable_third_party_heap) return; CreateObjectsInPage({ - {512}, - {17 * Bitmap::kBitsPerCell * Bitmap::kBytesPerCell, - ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA, 0, + {128 * Tagged}, + {17 * FullCell, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA, 0, ObjectRequest::PAD_WHITE}, }); TestAll(); @@ -509,21 +508,21 @@ TEST_F(InnerPointerResolutionTest, BlackAreaOfManyCells) { TEST_F(InnerPointerResolutionTest, TwoPages) { if (v8_flags.enable_third_party_heap) return; CreateObjectsInPage({ - {64, ObjectRequest::REGULAR, ObjectRequest::WHITE}, - {52, ObjectRequest::REGULAR, ObjectRequest::BLACK}, - {512, ObjectRequest::REGULAR, ObjectRequest::WHITE}, - {60, ObjectRequest::REGULAR, ObjectRequest::BLACK}, - {42176, ObjectRequest::REGULAR, ObjectRequest::GREY}, + {16 * Tagged, ObjectRequest::REGULAR, ObjectRequest::WHITE}, + {13 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK}, + {128 * Tagged, ObjectRequest::REGULAR, ObjectRequest::WHITE}, + {15 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK}, + {10544 * Tagged, ObjectRequest::REGULAR, ObjectRequest::GREY}, }); CreateObjectsInPage({ - {512, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA}, - {64, ObjectRequest::REGULAR, ObjectRequest::WHITE}, - {48, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA}, - {52, ObjectRequest::REGULAR, ObjectRequest::BLACK}, - {4, ObjectRequest::FREE, ObjectRequest::GREY}, - {8, ObjectRequest::FREE, ObjectRequest::GREY}, - {8, ObjectRequest::REGULAR, ObjectRequest::WHITE}, - {60, ObjectRequest::REGULAR, ObjectRequest::BLACK}, + {128 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA}, + {16 * Tagged, ObjectRequest::REGULAR, ObjectRequest::WHITE}, + {12 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA}, + {13 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK}, + {1 * Tagged, ObjectRequest::FREE, ObjectRequest::GREY}, + {2 * Tagged, ObjectRequest::FREE, ObjectRequest::GREY}, + {2 * Tagged, ObjectRequest::REGULAR, ObjectRequest::WHITE}, + {15 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK}, }); TestAll(); } @@ -548,21 +547,21 @@ TEST_F(InnerPointerResolutionTest, SeveralLargePages) { TEST_F(InnerPointerResolutionTest, PagesOfBothKind) { if (v8_flags.enable_third_party_heap) return; CreateObjectsInPage({ - {64, ObjectRequest::REGULAR, ObjectRequest::WHITE}, - {52, ObjectRequest::REGULAR, ObjectRequest::BLACK}, - {512, ObjectRequest::REGULAR, ObjectRequest::WHITE}, - {60, ObjectRequest::REGULAR, ObjectRequest::BLACK}, - {42176, ObjectRequest::REGULAR, ObjectRequest::GREY}, + {16 * Tagged, ObjectRequest::REGULAR, ObjectRequest::WHITE}, + {13 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK}, + {128 * Tagged, ObjectRequest::REGULAR, ObjectRequest::WHITE}, + {15 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK}, + {10544 * Tagged, ObjectRequest::REGULAR, ObjectRequest::GREY}, }); CreateObjectsInPage({ - {512, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA}, - {64, ObjectRequest::REGULAR, ObjectRequest::WHITE}, - {48, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA}, - {52, ObjectRequest::REGULAR, ObjectRequest::BLACK}, - {4, ObjectRequest::FREE, ObjectRequest::GREY}, - {8, ObjectRequest::FREE, ObjectRequest::GREY}, - {8, ObjectRequest::REGULAR, ObjectRequest::WHITE}, - {60, ObjectRequest::REGULAR, ObjectRequest::BLACK}, + {128 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA}, + {16 * Tagged, ObjectRequest::REGULAR, ObjectRequest::WHITE}, + {12 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA}, + {13 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK}, + {1 * Tagged, ObjectRequest::FREE, ObjectRequest::GREY}, + {2 * Tagged, ObjectRequest::FREE, ObjectRequest::GREY}, + {2 * Tagged, ObjectRequest::REGULAR, ObjectRequest::WHITE}, + {15 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK}, }); CreateLargeObjects({ {1 * MB, ObjectRequest::LARGE, ObjectRequest::WHITE}, @@ -574,21 +573,21 @@ TEST_F(InnerPointerResolutionTest, PagesOfBothKind) { TEST_F(InnerPointerResolutionTest, FreePages) { if (v8_flags.enable_third_party_heap) return; int some_normal_page = CreateObjectsInPage({ - {64, ObjectRequest::REGULAR, ObjectRequest::WHITE}, - {52, ObjectRequest::REGULAR, ObjectRequest::BLACK}, - {512, ObjectRequest::REGULAR, ObjectRequest::WHITE}, - {60, ObjectRequest::REGULAR, ObjectRequest::BLACK}, - {42176, ObjectRequest::REGULAR, ObjectRequest::GREY}, + {16 * Tagged, ObjectRequest::REGULAR, ObjectRequest::WHITE}, + {13 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK}, + {128 * Tagged, ObjectRequest::REGULAR, ObjectRequest::WHITE}, + {15 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK}, + {10544 * Tagged, ObjectRequest::REGULAR, ObjectRequest::GREY}, }); CreateObjectsInPage({ - {512, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA}, - {64, ObjectRequest::REGULAR, ObjectRequest::WHITE}, - {48, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA}, - {52, ObjectRequest::REGULAR, ObjectRequest::BLACK}, - {4, ObjectRequest::FREE, ObjectRequest::GREY}, - {8, ObjectRequest::FREE, ObjectRequest::GREY}, - {8, ObjectRequest::REGULAR, ObjectRequest::WHITE}, - {60, ObjectRequest::REGULAR, ObjectRequest::BLACK}, + {128 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA}, + {16 * Tagged, ObjectRequest::REGULAR, ObjectRequest::WHITE}, + {12 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK_AREA}, + {13 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK}, + {1 * Tagged, ObjectRequest::FREE, ObjectRequest::GREY}, + {2 * Tagged, ObjectRequest::FREE, ObjectRequest::GREY}, + {2 * Tagged, ObjectRequest::REGULAR, ObjectRequest::WHITE}, + {15 * Tagged, ObjectRequest::REGULAR, ObjectRequest::BLACK}, }); auto large_pages = CreateLargeObjects({ {1 * MB, ObjectRequest::LARGE, ObjectRequest::WHITE}, @@ -601,7 +600,262 @@ TEST_F(InnerPointerResolutionTest, FreePages) { TestAll(); } -#endif // V8_ENABLE_INNER_POINTER_RESOLUTION_MB +using InnerPointerResolutionHeapTest = TestWithHeapInternalsAndContext; + +TEST_F(InnerPointerResolutionHeapTest, UnusedRegularYoungPages) { + ManualGCScope manual_gc_scope(isolate()); + v8_flags.page_promotion = false; + + Persistent<v8::FixedArray> weak1, weak2, strong; + Address inner_ptr1, inner_ptr2, inner_ptr3, outside_ptr1, outside_ptr2; + Page *page1, *page2; + + { + PtrComprCageBase cage_base{isolate()}; + HandleScope scope(isolate()); + + // Allocate two objects, large enough that they fall in two different young + // generation pages. Keep weak references to these objects. + const int length = + (heap()->MaxRegularHeapObjectSize(AllocationType::kYoung) - + FixedArray::SizeFor(0)) / + Tagged; + auto h1 = factory()->NewFixedArray(length, AllocationType::kYoung); + auto h2 = factory()->NewFixedArray(length, AllocationType::kYoung); + weak1.Reset(v8_isolate(), Utils::FixedArrayToLocal(h1)); + weak2.Reset(v8_isolate(), Utils::FixedArrayToLocal(h2)); + weak1.SetWeak(); + weak2.SetWeak(); + auto obj1 = h1->GetHeapObject(); + auto obj2 = h2->GetHeapObject(); + page1 = Page::FromHeapObject(obj1); + EXPECT_TRUE(!page1->IsLargePage()); + EXPECT_TRUE(v8_flags.minor_mc || page1->IsToPage()); + page2 = Page::FromHeapObject(obj2); + EXPECT_TRUE(!page2->IsLargePage()); + EXPECT_TRUE(v8_flags.minor_mc || page2->IsToPage()); + EXPECT_NE(page1, page2); + + // Allocate one more object, small enough that it fits in page2. + // Keep a strong reference to this object. + auto h3 = factory()->NewFixedArray(16, AllocationType::kYoung); + strong.Reset(v8_isolate(), Utils::FixedArrayToLocal(h3)); + auto obj3 = h3->GetHeapObject(); + EXPECT_EQ(page2, Page::FromHeapObject(obj3)); + EXPECT_EQ(obj3.address(), obj2.address() + obj2.Size(cage_base)); + + // Keep inner pointers to all objects. + inner_ptr1 = obj1.address() + 17 * Tagged; + inner_ptr2 = obj2.address() + 37 * Tagged; + inner_ptr3 = obj3.address() + 7 * Tagged; + + // Keep pointers to the end of the pages, after the objects. + outside_ptr1 = page1->area_end() - 3 * Tagged; + outside_ptr2 = page2->area_end() - 2 * Tagged; + EXPECT_LE(obj1.address() + obj1.Size(cage_base), outside_ptr1); + EXPECT_LE(obj2.address() + obj2.Size(cage_base), outside_ptr2); + EXPECT_LE(obj3.address() + obj3.Size(cage_base), outside_ptr2); + + // Ensure the young generation space is iterable. + heap()->new_space()->MakeLinearAllocationAreaIterable(); + + // Inner pointer resolution should work now, finding the objects in the + // case of the inner pointers. + EXPECT_EQ( + obj1.address(), + heap()->mark_compact_collector()->FindBasePtrForMarking(inner_ptr1)); + EXPECT_EQ( + obj2.address(), + heap()->mark_compact_collector()->FindBasePtrForMarking(inner_ptr2)); + EXPECT_EQ( + obj3.address(), + heap()->mark_compact_collector()->FindBasePtrForMarking(inner_ptr3)); + EXPECT_EQ( + kNullAddress, + heap()->mark_compact_collector()->FindBasePtrForMarking(outside_ptr1)); + EXPECT_EQ( + kNullAddress, + heap()->mark_compact_collector()->FindBasePtrForMarking(outside_ptr2)); + + // Start incremental marking and mark the third object. + i::IncrementalMarking* marking = heap()->incremental_marking(); + if (marking->IsStopped()) { + SafepointScope scope(heap()); + heap()->tracer()->StartCycle( + GarbageCollector::MARK_COMPACTOR, GarbageCollectionReason::kTesting, + "unit test", GCTracer::MarkingType::kIncremental); + marking->Start(GarbageCollector::MARK_COMPACTOR, + i::GarbageCollectionReason::kTesting); + } + MarkingState* marking_state = heap()->marking_state(); + marking_state->WhiteToGrey(obj3); + marking_state->GreyToBlack(obj3); + } + + // Garbage collection should reclaim the two large objects with the weak + // references, but not the small one with the strong reference. + CollectGarbage(NEW_SPACE); + EXPECT_TRUE(weak1.IsEmpty()); + EXPECT_TRUE(weak2.IsEmpty()); + EXPECT_TRUE(!strong.IsEmpty()); + // The two pages should still be around, in the new space. + EXPECT_EQ(page1, heap()->memory_allocator()->LookupChunkContainingAddress( + inner_ptr1)); + EXPECT_EQ(page2, heap()->memory_allocator()->LookupChunkContainingAddress( + inner_ptr2)); + EXPECT_EQ(AllocationSpace::NEW_SPACE, page1->owner_identity()); + EXPECT_EQ(AllocationSpace::NEW_SPACE, page2->owner_identity()); + EXPECT_TRUE(v8_flags.minor_mc || page1->IsFromPage()); + EXPECT_TRUE(v8_flags.minor_mc || page2->IsFromPage()); + + // Inner pointer resolution should work with pointers to unused young + // generation pages (in case of the scavenger, the two pages are now in the + // "from" semispace). There are no objects to be found. + EXPECT_EQ( + kNullAddress, + heap()->mark_compact_collector()->FindBasePtrForMarking(inner_ptr1)); + EXPECT_EQ( + kNullAddress, + heap()->mark_compact_collector()->FindBasePtrForMarking(inner_ptr2)); + EXPECT_EQ( + kNullAddress, + heap()->mark_compact_collector()->FindBasePtrForMarking(inner_ptr3)); + EXPECT_EQ( + kNullAddress, + heap()->mark_compact_collector()->FindBasePtrForMarking(outside_ptr1)); + EXPECT_EQ( + kNullAddress, + heap()->mark_compact_collector()->FindBasePtrForMarking(outside_ptr2)); + + // Garbage collection once more. + CollectGarbage(NEW_SPACE); + EXPECT_EQ(AllocationSpace::NEW_SPACE, page1->owner_identity()); + EXPECT_EQ(AllocationSpace::NEW_SPACE, page2->owner_identity()); + // The two pages should still be around, in the new space. + EXPECT_EQ(page1, heap()->memory_allocator()->LookupChunkContainingAddress( + inner_ptr1)); + EXPECT_EQ(page2, heap()->memory_allocator()->LookupChunkContainingAddress( + inner_ptr2)); + EXPECT_TRUE(v8_flags.minor_mc || page1->IsToPage()); + EXPECT_TRUE(v8_flags.minor_mc || page2->IsToPage()); + + // Inner pointer resolution should work with pointers to unused young + // generation pages (in case of the scavenger, the two pages are now in the + // "to" semispace). There are no objects to be found. + EXPECT_EQ( + kNullAddress, + heap()->mark_compact_collector()->FindBasePtrForMarking(inner_ptr1)); + EXPECT_EQ( + kNullAddress, + heap()->mark_compact_collector()->FindBasePtrForMarking(inner_ptr2)); + EXPECT_EQ( + kNullAddress, + heap()->mark_compact_collector()->FindBasePtrForMarking(inner_ptr3)); + EXPECT_EQ( + kNullAddress, + heap()->mark_compact_collector()->FindBasePtrForMarking(outside_ptr1)); + EXPECT_EQ( + kNullAddress, + heap()->mark_compact_collector()->FindBasePtrForMarking(outside_ptr2)); +} + +TEST_F(InnerPointerResolutionHeapTest, UnusedLargeYoungPage) { + ManualGCScope manual_gc_scope(isolate()); + v8_flags.page_promotion = false; + + Global<v8::FixedArray> weak; + Address inner_ptr; + Page* page; + + { + PtrComprCageBase cage_base{isolate()}; + HandleScope scope(isolate()); + + // Allocate a large object in the young generation. + const int length = + std::max(1 << kPageSizeBits, + 2 * heap()->MaxRegularHeapObjectSize(AllocationType::kYoung)) / + Tagged; + auto h = factory()->NewFixedArray(length, AllocationType::kYoung); + weak.Reset(v8_isolate(), Utils::FixedArrayToLocal(h)); + weak.SetWeak(); + auto obj = h->GetHeapObject(); + page = Page::FromHeapObject(obj); + EXPECT_TRUE(page->IsLargePage()); + EXPECT_EQ(AllocationSpace::NEW_LO_SPACE, page->owner_identity()); + EXPECT_TRUE(v8_flags.minor_mc || page->IsToPage()); + + // Keep inner pointer. + inner_ptr = obj.address() + 17 * Tagged; + + // Inner pointer resolution should work now, finding the object. + EXPECT_EQ( + obj.address(), + heap()->mark_compact_collector()->FindBasePtrForMarking(inner_ptr)); + } + + // Garbage collection should reclaim the object. + CollectGarbage(NEW_SPACE); + EXPECT_TRUE(weak.IsEmpty()); + + // Inner pointer resolution should work with a pointer to an unused young + // generation large page. There is no object to be found. + EXPECT_EQ(kNullAddress, + heap()->mark_compact_collector()->FindBasePtrForMarking(inner_ptr)); +} + +TEST_F(InnerPointerResolutionHeapTest, RegularPageAfterEnd) { + // Allocate a regular page. + OldSpace* old_space = heap()->old_space(); + DCHECK_NE(nullptr, old_space); + auto* page = heap()->memory_allocator()->AllocatePage( + MemoryAllocator::AllocationMode::kRegular, old_space, NOT_EXECUTABLE); + EXPECT_NE(nullptr, page); + + // The end of the page area is expected not to coincide with the beginning of + // the next page. + const int size = (1 << kPageSizeBits) / 2; + const Address mark = page->area_start() + size; + heap()->CreateFillerObjectAt(page->area_start(), size); + heap()->CreateFillerObjectAt(mark, static_cast<int>(page->area_end() - mark)); + Page::UpdateHighWaterMark(mark); + page->ShrinkToHighWaterMark(); + EXPECT_FALSE(Page::IsAlignedToPageSize(page->area_end())); + + // Inner pointer resolution after the end of the page area should work. + Address inner_ptr = page->area_end() + Tagged; + EXPECT_FALSE(Page::IsAlignedToPageSize(inner_ptr)); + EXPECT_EQ(kNullAddress, + heap()->mark_compact_collector()->FindBasePtrForMarking(inner_ptr)); + + // Deallocate the page. + heap()->memory_allocator()->Free(MemoryAllocator::FreeMode::kImmediately, + page); +} + +TEST_F(InnerPointerResolutionHeapTest, LargePageAfterEnd) { + // Allocate a large page. + OldLargeObjectSpace* lo_space = heap()->lo_space(); + EXPECT_NE(nullptr, lo_space); + const int size = 3 * (1 << kPageSizeBits) / 2; + LargePage* page = heap()->memory_allocator()->AllocateLargePage( + lo_space, size, NOT_EXECUTABLE); + EXPECT_NE(nullptr, page); + + // The end of the page area is expected not to coincide with the beginning of + // the next page. + EXPECT_FALSE(Page::IsAlignedToPageSize(page->area_end())); + + // Inner pointer resolution after the end of the pare area should work. + Address inner_ptr = page->area_end() + Tagged; + EXPECT_FALSE(Page::IsAlignedToPageSize(inner_ptr)); + EXPECT_EQ(kNullAddress, + heap()->mark_compact_collector()->FindBasePtrForMarking(inner_ptr)); + + // Deallocate the page. + heap()->memory_allocator()->Free(MemoryAllocator::FreeMode::kImmediately, + page); +} } // namespace internal } // namespace v8 diff --git a/deps/v8/test/unittests/heap/page-promotion-unittest.cc b/deps/v8/test/unittests/heap/page-promotion-unittest.cc index 36c08091c0c8f7..515a6b98197815 100644 --- a/deps/v8/test/unittests/heap/page-promotion-unittest.cc +++ b/deps/v8/test/unittests/heap/page-promotion-unittest.cc @@ -3,8 +3,8 @@ // found in the LICENSE file. #include "src/execution/isolate.h" +#include "src/heap/marking-state-inl.h" #include "test/unittests/heap/heap-utils.h" -#include "v8-isolate.h" namespace v8 { namespace internal { @@ -65,8 +65,7 @@ TEST_F(PagePromotionTest, PagePromotion_NewToOld) { const int threshold_bytes = static_cast<int>( v8_flags.page_promotion_threshold * MemoryChunkLayout::AllocatableMemoryInDataPage() / 100); - CHECK_GE(heap->incremental_marking()->marking_state()->live_bytes( - to_be_promoted_page), + CHECK_GE(heap->marking_state()->live_bytes(to_be_promoted_page), threshold_bytes); // Actual checks: The page is in new space first, but is moved to old space diff --git a/deps/v8/test/unittests/heap/shared-heap-unittest.cc b/deps/v8/test/unittests/heap/shared-heap-unittest.cc index cae779a3605957..19b5b1eb6f67d5 100644 --- a/deps/v8/test/unittests/heap/shared-heap-unittest.cc +++ b/deps/v8/test/unittests/heap/shared-heap-unittest.cc @@ -19,23 +19,33 @@ using SharedHeapTest = TestJSSharedMemoryWithIsolate; class SharedHeapNoClientsTest : public TestJSSharedMemoryWithPlatform { public: SharedHeapNoClientsTest() { - bool created; - shared_isolate_ = Isolate::GetProcessWideSharedIsolate(&created); - CHECK(created); + if (v8_flags.shared_space) { + shared_space_isolate_wrapper.emplace(kNoCounters); + shared_isolate_ = shared_space_isolate_wrapper->i_isolate(); + } else { + bool created; + shared_isolate_ = Isolate::GetProcessWideSharedIsolate(&created); + CHECK(created); + } } ~SharedHeapNoClientsTest() override { - Isolate::DeleteProcessWideSharedIsolate(); + if (!v8_flags.shared_space) { + Isolate::DeleteProcessWideSharedIsolate(); + } + + shared_isolate_ = nullptr; } - v8::Isolate* shared_isolate() { - return reinterpret_cast<v8::Isolate*>(i_shared_isolate()); + v8::Isolate* shared_heap_isolate() { + return reinterpret_cast<v8::Isolate*>(i_shared_heap_isolate()); } - Isolate* i_shared_isolate() { return shared_isolate_; } + Isolate* i_shared_heap_isolate() { return shared_isolate_; } private: Isolate* shared_isolate_; + base::Optional<IsolateWrapper> shared_space_isolate_wrapper; }; namespace { @@ -182,10 +192,12 @@ TEST_F(SharedHeapTest, ConcurrentAllocationInSharedMapSpace) { } TEST_F(SharedHeapNoClientsTest, SharedCollectionWithoutClients) { - DCHECK_NULL(i_shared_isolate()->heap()->new_space()); - DCHECK_NULL(i_shared_isolate()->heap()->new_lo_space()); + if (!v8_flags.shared_space) { + DCHECK_NULL(i_shared_heap_isolate()->heap()->new_space()); + DCHECK_NULL(i_shared_heap_isolate()->heap()->new_lo_space()); + } - ::v8::internal::CollectGarbage(OLD_SPACE, shared_isolate()); + ::v8::internal::CollectGarbage(OLD_SPACE, shared_heap_isolate()); } void AllocateInSharedHeap(int iterations = 100) { diff --git a/deps/v8/test/unittests/heap/slot-set-unittest.cc b/deps/v8/test/unittests/heap/slot-set-unittest.cc index effd54290f8166..dc1a862059c2cd 100644 --- a/deps/v8/test/unittests/heap/slot-set-unittest.cc +++ b/deps/v8/test/unittests/heap/slot-set-unittest.cc @@ -2,11 +2,12 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. +#include "src/heap/slot-set.h" + #include <limits> #include <map> #include "src/common/globals.h" -#include "src/heap/slot-set.h" #include "src/heap/spaces.h" #include "src/objects/slots.h" #include "testing/gtest/include/gtest/gtest.h" @@ -44,128 +45,6 @@ TEST(SlotSet, BucketsForSize) { SlotSet::BucketsForSize(Page::kPageSize * 2)); } -TEST(SlotSet, InsertAndLookup1) { - SlotSet* set = SlotSet::Allocate(SlotSet::kBucketsRegularPage); - for (int i = 0; i < Page::kPageSize; i += kTaggedSize) { - EXPECT_FALSE(set->Lookup(i)); - } - for (int i = 0; i < Page::kPageSize; i += kTaggedSize) { - set->Insert<AccessMode::ATOMIC>(i); - } - for (int i = 0; i < Page::kPageSize; i += kTaggedSize) { - EXPECT_TRUE(set->Lookup(i)); - } - SlotSet::Delete(set, SlotSet::kBucketsRegularPage); -} - -TEST(SlotSet, InsertAndLookup2) { - SlotSet* set = SlotSet::Allocate(SlotSet::kBucketsRegularPage); - for (int i = 0; i < Page::kPageSize; i += kTaggedSize) { - if (i % 7 == 0) { - set->Insert<AccessMode::ATOMIC>(i); - } - } - for (int i = 0; i < Page::kPageSize; i += kTaggedSize) { - if (i % 7 == 0) { - EXPECT_TRUE(set->Lookup(i)); - } else { - EXPECT_FALSE(set->Lookup(i)); - } - } - SlotSet::Delete(set, SlotSet::kBucketsRegularPage); -} - -TEST(SlotSet, Iterate) { - SlotSet* set = SlotSet::Allocate(SlotSet::kBucketsRegularPage); - - for (int i = 0; i < Page::kPageSize; i += kTaggedSize) { - if (i % 7 == 0) { - set->Insert<AccessMode::ATOMIC>(i); - } - } - - set->Iterate( - kNullAddress, 0, SlotSet::kBucketsRegularPage, - [](MaybeObjectSlot slot) { - if (slot.address() % 3 == 0) { - return KEEP_SLOT; - } else { - return REMOVE_SLOT; - } - }, - SlotSet::KEEP_EMPTY_BUCKETS); - - for (int i = 0; i < Page::kPageSize; i += kTaggedSize) { - if (i % 21 == 0) { - EXPECT_TRUE(set->Lookup(i)); - } else { - EXPECT_FALSE(set->Lookup(i)); - } - } - - SlotSet::Delete(set, SlotSet::kBucketsRegularPage); -} - -TEST(SlotSet, IterateFromHalfway) { - SlotSet* set = SlotSet::Allocate(SlotSet::kBucketsRegularPage); - - for (int i = 0; i < Page::kPageSize; i += kTaggedSize) { - if (i % 7 == 0) { - set->Insert<AccessMode::ATOMIC>(i); - } - } - - set->Iterate( - kNullAddress, SlotSet::kBucketsRegularPage / 2, - SlotSet::kBucketsRegularPage, - [](MaybeObjectSlot slot) { - if (slot.address() % 3 == 0) { - return KEEP_SLOT; - } else { - return REMOVE_SLOT; - } - }, - SlotSet::KEEP_EMPTY_BUCKETS); - - for (int i = 0; i < Page::kPageSize; i += kTaggedSize) { - if (i < Page::kPageSize / 2 && i % 7 == 0) { - EXPECT_TRUE(set->Lookup(i)); - } else if (i >= Page::kPageSize / 2 && i % 21 == 0) { - EXPECT_TRUE(set->Lookup(i)); - } else { - EXPECT_FALSE(set->Lookup(i)); - } - } - - SlotSet::Delete(set, SlotSet::kBucketsRegularPage); -} - -TEST(SlotSet, Remove) { - SlotSet* set = SlotSet::Allocate(SlotSet::kBucketsRegularPage); - - for (int i = 0; i < Page::kPageSize; i += kTaggedSize) { - if (i % 7 == 0) { - set->Insert<AccessMode::ATOMIC>(i); - } - } - - for (int i = 0; i < Page::kPageSize; i += kTaggedSize) { - if (i % 3 != 0) { - set->Remove(i); - } - } - - for (int i = 0; i < Page::kPageSize; i += kTaggedSize) { - if (i % 21 == 0) { - EXPECT_TRUE(set->Lookup(i)); - } else { - EXPECT_FALSE(set->Lookup(i)); - } - } - - SlotSet::Delete(set, SlotSet::kBucketsRegularPage); -} - TEST(PossiblyEmptyBuckets, ContainsAndInsert) { static const int kBuckets = 100; PossiblyEmptyBuckets possibly_empty_buckets; @@ -180,57 +59,6 @@ TEST(PossiblyEmptyBuckets, ContainsAndInsert) { EXPECT_TRUE(possibly_empty_buckets.Contains(last + 1)); } -void CheckRemoveRangeOn(uint32_t start, uint32_t end) { - SlotSet* set = SlotSet::Allocate(SlotSet::kBucketsRegularPage); - uint32_t first = start == 0 ? 0 : start - kTaggedSize; - uint32_t last = end == Page::kPageSize ? end - kTaggedSize : end; - for (const auto mode : - {SlotSet::FREE_EMPTY_BUCKETS, SlotSet::KEEP_EMPTY_BUCKETS}) { - for (uint32_t i = first; i <= last; i += kTaggedSize) { - set->Insert<AccessMode::ATOMIC>(i); - } - set->RemoveRange(start, end, SlotSet::kBucketsRegularPage, mode); - if (first != start) { - EXPECT_TRUE(set->Lookup(first)); - } - if (last == end) { - EXPECT_TRUE(set->Lookup(last)); - } - for (uint32_t i = start; i < end; i += kTaggedSize) { - EXPECT_FALSE(set->Lookup(i)); - } - } - SlotSet::Delete(set, SlotSet::kBucketsRegularPage); -} - -TEST(SlotSet, RemoveRange) { - CheckRemoveRangeOn(0, Page::kPageSize); - CheckRemoveRangeOn(1 * kTaggedSize, 1023 * kTaggedSize); - for (uint32_t start = 0; start <= 32; start++) { - CheckRemoveRangeOn(start * kTaggedSize, (start + 1) * kTaggedSize); - CheckRemoveRangeOn(start * kTaggedSize, (start + 2) * kTaggedSize); - const uint32_t kEnds[] = {32, 64, 100, 128, 1024, 1500, 2048}; - for (size_t i = 0; i < sizeof(kEnds) / sizeof(uint32_t); i++) { - for (int k = -3; k <= 3; k++) { - uint32_t end = (kEnds[i] + k); - if (start < end) { - CheckRemoveRangeOn(start * kTaggedSize, end * kTaggedSize); - } - } - } - } - SlotSet* set = SlotSet::Allocate(SlotSet::kBucketsRegularPage); - for (const auto mode : - {SlotSet::FREE_EMPTY_BUCKETS, SlotSet::KEEP_EMPTY_BUCKETS}) { - set->Insert<AccessMode::ATOMIC>(Page::kPageSize / 2); - set->RemoveRange(0, Page::kPageSize, SlotSet::kBucketsRegularPage, mode); - for (uint32_t i = 0; i < Page::kPageSize; i += kTaggedSize) { - EXPECT_FALSE(set->Lookup(i)); - } - } - SlotSet::Delete(set, SlotSet::kBucketsRegularPage); -} - TEST(TypedSlotSet, Iterate) { TypedSlotSet set(0); // These two constants must be static as a workaround diff --git a/deps/v8/test/unittests/heap/unmapper-unittest.cc b/deps/v8/test/unittests/heap/unmapper-unittest.cc index 894699f68ae4db..90a32db51dfeed 100644 --- a/deps/v8/test/unittests/heap/unmapper-unittest.cc +++ b/deps/v8/test/unittests/heap/unmapper-unittest.cc @@ -312,8 +312,10 @@ class SequentialUnmapperTest : public // CHECK(tracking_page_allocator_->IsEmpty()); CHECK_EQ(old_page_allocator_, SetPlatformPageAllocatorForTesting(tracking_page_allocator_)); - old_flag_ = i::v8_flags.concurrent_sweeping; + old_sweeping_flag_ = i::v8_flags.concurrent_sweeping; i::v8_flags.concurrent_sweeping = false; + old_minor_sweeping_flag_ = i::v8_flags.concurrent_minor_mc_sweeping; + i::v8_flags.concurrent_minor_mc_sweeping = false; #ifdef V8_COMPRESS_POINTERS_IN_SHARED_CAGE // Reinitialize the process-wide pointer cage so it can pick up the // TrackingPageAllocator. @@ -339,7 +341,8 @@ class SequentialUnmapperTest : public // #ifdef V8_ENABLE_SANDBOX GetProcessWideSandbox()->TearDown(); #endif - i::v8_flags.concurrent_sweeping = old_flag_; + i::v8_flags.concurrent_sweeping = old_sweeping_flag_; + i::v8_flags.concurrent_minor_mc_sweeping = old_minor_sweeping_flag_; CHECK(tracking_page_allocator_->IsEmpty()); // Restore the original v8::PageAllocator and delete the tracking one. @@ -360,13 +363,15 @@ class SequentialUnmapperTest : public // private: static TrackingPageAllocator* tracking_page_allocator_; static v8::PageAllocator* old_page_allocator_; - static bool old_flag_; + static bool old_sweeping_flag_; + static bool old_minor_sweeping_flag_; }; TrackingPageAllocator* SequentialUnmapperTest::tracking_page_allocator_ = nullptr; v8::PageAllocator* SequentialUnmapperTest::old_page_allocator_ = nullptr; -bool SequentialUnmapperTest::old_flag_; +bool SequentialUnmapperTest::old_sweeping_flag_; +bool SequentialUnmapperTest::old_minor_sweeping_flag_; template <typename TMixin> SequentialUnmapperTestMixin<TMixin>::SequentialUnmapperTestMixin() { diff --git a/deps/v8/test/unittests/inspector/inspector-unittest.cc b/deps/v8/test/unittests/inspector/inspector-unittest.cc index 442c32a118f955..0ca795ecc7c937 100644 --- a/deps/v8/test/unittests/inspector/inspector-unittest.cc +++ b/deps/v8/test/unittests/inspector/inspector-unittest.cc @@ -259,7 +259,7 @@ TEST_F(InspectorTest, NoConsoleAPIForUntrustedClient) { } TEST_F(InspectorTest, ApiCreatedTasksAreCleanedUp) { - i::FLAG_experimental_async_stack_tagging_api = true; + i::v8_flags.experimental_async_stack_tagging_api = true; v8::Isolate* isolate = v8_isolate(); v8::HandleScope handle_scope(isolate); diff --git a/deps/v8/test/unittests/interpreter/bytecode-array-builder-unittest.cc b/deps/v8/test/unittests/interpreter/bytecode-array-builder-unittest.cc index 07259a782a20ac..2a0770a90c83ad 100644 --- a/deps/v8/test/unittests/interpreter/bytecode-array-builder-unittest.cc +++ b/deps/v8/test/unittests/interpreter/bytecode-array-builder-unittest.cc @@ -484,12 +484,9 @@ TEST_F(BytecodeArrayBuilderTest, AllBytecodesGenerated) { // Insert entry for illegal bytecode as this is never willingly emitted. scorecard[Bytecodes::ToByte(Bytecode::kIllegal)] = 1; - // Bytecode for CollectTypeProfile is only emitted when - // Type Information for DevTools is turned on. - scorecard[Bytecodes::ToByte(Bytecode::kCollectTypeProfile)] = 1; - // This bytecode is too inconvenient to test manually. - scorecard[Bytecodes::ToByte(Bytecode::kFindNonDefaultConstructor)] = 1; + scorecard[Bytecodes::ToByte( + Bytecode::kFindNonDefaultConstructorOrConstruct)] = 1; // Check return occurs at the end and only once in the BytecodeArray. CHECK_EQ(final_bytecode, Bytecode::kReturn); diff --git a/deps/v8/test/unittests/interpreter/bytecode-array-writer-unittest.cc b/deps/v8/test/unittests/interpreter/bytecode-array-writer-unittest.cc index 441718040a0113..873e27bb56a0bc 100644 --- a/deps/v8/test/unittests/interpreter/bytecode-array-writer-unittest.cc +++ b/deps/v8/test/unittests/interpreter/bytecode-array-writer-unittest.cc @@ -246,7 +246,7 @@ TEST_F(BytecodeArrayWriterUnittest, ComplexExample) { } TEST_F(BytecodeArrayWriterUnittest, ElideNoneffectfulBytecodes) { - if (!i::FLAG_ignition_elide_noneffectful_bytecodes) return; + if (!i::v8_flags.ignition_elide_noneffectful_bytecodes) return; static const uint8_t expected_bytes[] = { // clang-format off diff --git a/deps/v8/test/unittests/interpreter/bytecode-expectations-printer.cc b/deps/v8/test/unittests/interpreter/bytecode-expectations-printer.cc index 013e8ff1439e22..bec63500b30c1b 100644 --- a/deps/v8/test/unittests/interpreter/bytecode-expectations-printer.cc +++ b/deps/v8/test/unittests/interpreter/bytecode-expectations-printer.cc @@ -393,7 +393,7 @@ void BytecodeExpectationsPrinter::PrintExpectation( wrap_ ? WrapCodeInFunction(test_function_name_.c_str(), snippet) : snippet; - i::FLAG_compilation_cache = false; + i::v8_flags.compilation_cache = false; i::Handle<i::BytecodeArray> bytecode_array; if (module_) { CHECK(top_level_ && !wrap_); diff --git a/deps/v8/test/unittests/interpreter/bytecode-generator-unittest.cc b/deps/v8/test/unittests/interpreter/bytecode-generator-unittest.cc index a0cf4552b625b5..eb855dd4d67a55 100644 --- a/deps/v8/test/unittests/interpreter/bytecode-generator-unittest.cc +++ b/deps/v8/test/unittests/interpreter/bytecode-generator-unittest.cc @@ -23,9 +23,9 @@ class BytecodeGeneratorTest : public TestWithContext { public: BytecodeGeneratorTest() : printer_(isolate()) {} static void SetUpTestSuite() { - i::FLAG_always_turbofan = false; - i::FLAG_allow_natives_syntax = true; - i::FLAG_enable_lazy_source_positions = false; + i::v8_flags.always_turbofan = false; + i::v8_flags.allow_natives_syntax = true; + i::v8_flags.enable_lazy_source_positions = false; TestWithContext::SetUpTestSuite(); } diff --git a/deps/v8/test/unittests/interpreter/bytecode_expectations/ArrayLiterals.golden b/deps/v8/test/unittests/interpreter/bytecode_expectations/ArrayLiterals.golden index 28eb0aaa8ba313..c93d81af3084e4 100644 --- a/deps/v8/test/unittests/interpreter/bytecode_expectations/ArrayLiterals.golden +++ b/deps/v8/test/unittests/interpreter/bytecode_expectations/ArrayLiterals.golden @@ -145,7 +145,7 @@ bytecodes: [ /* 52 S> */ B(CreateArrayLiteral), U8(1), U8(1), U8(37), B(Star1), B(LdaSmi), I8(1), - /* 67 S> */ B(Star2), + B(Star2), /* 67 E> */ B(GetIterator), R(0), U8(2), U8(4), B(Star4), B(GetNamedProperty), R(4), U8(2), U8(6), diff --git a/deps/v8/test/unittests/interpreter/bytecode_expectations/AssignmentsInBinaryExpression.golden b/deps/v8/test/unittests/interpreter/bytecode_expectations/AssignmentsInBinaryExpression.golden index 6f2b1bf616d4a0..77edb47b8b41c8 100644 --- a/deps/v8/test/unittests/interpreter/bytecode_expectations/AssignmentsInBinaryExpression.golden +++ b/deps/v8/test/unittests/interpreter/bytecode_expectations/AssignmentsInBinaryExpression.golden @@ -20,11 +20,11 @@ bytecodes: [ B(Star1), /* 52 S> */ B(LdaSmi), I8(2), B(Star0), - B(LdaSmi), I8(3), + /* 69 S> */ B(LdaSmi), I8(3), B(Star1), - B(LdaSmi), I8(4), + /* 76 S> */ B(LdaSmi), I8(4), B(Star0), - B(LdaSmi), I8(5), + /* 83 S> */ B(LdaSmi), I8(5), B(Star1), /* 88 S> */ B(Return), ] diff --git a/deps/v8/test/unittests/interpreter/bytecode_expectations/AsyncGenerators.golden b/deps/v8/test/unittests/interpreter/bytecode_expectations/AsyncGenerators.golden index 841a2a45262a6b..de4576618d9c51 100644 --- a/deps/v8/test/unittests/interpreter/bytecode_expectations/AsyncGenerators.golden +++ b/deps/v8/test/unittests/interpreter/bytecode_expectations/AsyncGenerators.golden @@ -128,7 +128,7 @@ bytecodes: [ B(LdaFalse), B(Star7), B(Mov), R(0), R(5), - B(InvokeIntrinsic), U8(Runtime::k_AsyncGeneratorYield), R(5), U8(3), + B(InvokeIntrinsic), U8(Runtime::k_AsyncGeneratorYieldWithAwait), R(5), U8(3), /* 22 E> */ B(SuspendGenerator), R(0), R(0), U8(5), U8(1), B(ResumeGenerator), R(0), R(0), U8(5), B(Star5), @@ -258,7 +258,7 @@ bytecodes: [ B(Star), R(16), B(Mov), R(0), R(14), B(Mov), R(3), R(15), - B(InvokeIntrinsic), U8(Runtime::k_AsyncGeneratorYield), R(14), U8(3), + B(InvokeIntrinsic), U8(Runtime::k_AsyncGeneratorYieldWithAwait), R(14), U8(3), /* 42 E> */ B(SuspendGenerator), R(0), R(0), U8(14), U8(1), B(ResumeGenerator), R(0), R(0), U8(14), B(Star14), @@ -511,7 +511,7 @@ bytecodes: [ B(LdaFalse), B(Star), R(16), B(Mov), R(0), R(14), - B(InvokeIntrinsic), U8(Runtime::k_AsyncGeneratorYield), R(14), U8(3), + B(InvokeIntrinsic), U8(Runtime::k_AsyncGeneratorResolve), R(14), U8(3), /* 49 E> */ B(SuspendGenerator), R(0), R(0), U8(14), U8(4), B(ResumeGenerator), R(0), R(0), U8(14), B(Star8), diff --git a/deps/v8/test/unittests/interpreter/bytecode_expectations/CallAndSpread.golden b/deps/v8/test/unittests/interpreter/bytecode_expectations/CallAndSpread.golden index 16ca804e77ed57..cfcf6aa5f30e3d 100644 --- a/deps/v8/test/unittests/interpreter/bytecode_expectations/CallAndSpread.golden +++ b/deps/v8/test/unittests/interpreter/bytecode_expectations/CallAndSpread.golden @@ -75,7 +75,7 @@ bytecodes: [ B(Star2), B(LdaSmi), I8(1), B(Star3), - /* 49 S> */ B(CreateArrayLiteral), U8(3), U8(5), U8(37), + /* 49 E> */ B(CreateArrayLiteral), U8(3), U8(5), U8(37), B(Star6), /* 49 E> */ B(GetIterator), R(6), U8(6), U8(8), B(Star5), diff --git a/deps/v8/test/unittests/interpreter/bytecode_expectations/DestructuringAssignment.golden b/deps/v8/test/unittests/interpreter/bytecode_expectations/DestructuringAssignment.golden index 8bc522dc46e3b4..bf6c4115021626 100644 --- a/deps/v8/test/unittests/interpreter/bytecode_expectations/DestructuringAssignment.golden +++ b/deps/v8/test/unittests/interpreter/bytecode_expectations/DestructuringAssignment.golden @@ -24,8 +24,8 @@ bytecodes: [ B(Mov), R(1), R(2), B(Star5), B(Mov), R(context), R(8), - /* 57 S> */ B(Ldar), R(5), - B(JumpIfToBooleanTrue), U8(33), + B(Ldar), R(5), + /* 57 E> */ B(JumpIfToBooleanTrue), U8(33), B(LdaTrue), B(Star5), B(CallProperty0), R(3), R(4), U8(11), @@ -122,8 +122,8 @@ bytecodes: [ B(JumpIfToBooleanTrue), U8(4), B(LdaFalse), B(Star6), - /* 61 S> */ B(Ldar), R(6), - B(JumpIfToBooleanTrue), U8(33), + B(Ldar), R(6), + /* 61 E> */ B(JumpIfToBooleanTrue), U8(33), B(LdaTrue), B(Star6), B(CallProperty0), R(4), R(5), U8(13), @@ -140,7 +140,7 @@ bytecodes: [ B(Jump), U8(3), B(LdaUndefined), B(Star0), - /* 63 S> */ B(CreateEmptyArrayLiteral), U8(15), + /* 63 E> */ B(CreateEmptyArrayLiteral), U8(15), B(Star11), B(Ldar), R(6), B(JumpIfToBooleanTrue), U8(41), @@ -231,9 +231,9 @@ bytecodes: [ B(Mov), R(2), R(3), B(Star6), B(Mov), R(context), R(9), - /* 59 S> */ B(Ldar), R(6), + B(Ldar), R(6), B(Mov), R(0), R(11), - B(JumpIfToBooleanTrue), U8(33), + /* 57 E> */ B(JumpIfToBooleanTrue), U8(33), B(LdaTrue), B(Star6), B(CallProperty0), R(4), R(5), U8(11), @@ -250,8 +250,8 @@ bytecodes: [ B(Jump), U8(3), B(LdaUndefined), B(SetNamedProperty), R(11), U8(4), U8(13), - /* 63 S> */ B(Ldar), R(6), - B(JumpIfToBooleanTrue), U8(33), + B(Ldar), R(6), + /* 63 E> */ B(JumpIfToBooleanTrue), U8(33), B(LdaTrue), B(Star6), B(CallProperty0), R(4), R(5), U8(15), @@ -354,7 +354,7 @@ bytecodes: [ B(Star0), /* 48 S> */ B(CreateObjectLiteral), U8(0), U8(0), U8(41), B(Star1), - /* 61 S> */ B(GetNamedProperty), R(1), U8(1), U8(1), + /* 59 S> */ B(GetNamedProperty), R(1), U8(1), U8(1), B(SetNamedProperty), R(0), U8(2), U8(3), B(LdaUndefined), /* 72 S> */ B(Return), @@ -378,14 +378,14 @@ bytecode array length: 28 bytecodes: [ /* 45 S> */ B(CreateObjectLiteral), U8(0), U8(0), U8(41), B(Star1), - /* 64 S> */ B(LdaConstant), U8(1), + /* 62 S> */ B(LdaConstant), U8(1), B(Star3), - B(GetNamedProperty), R(1), U8(1), U8(1), + /* 64 E> */ B(GetNamedProperty), R(1), U8(1), U8(1), B(Mov), R(1), R(2), B(JumpIfNotUndefined), U8(3), B(LdaZero), B(Star0), - /* 71 S> */ B(InvokeIntrinsic), U8(Runtime::k_CopyDataPropertiesWithExcludedPropertiesOnStack), R(2), U8(2), + /* 71 E> */ B(InvokeIntrinsic), U8(Runtime::k_CopyDataPropertiesWithExcludedPropertiesOnStack), R(2), U8(2), B(StaGlobal), U8(2), U8(3), B(LdaUndefined), /* 80 S> */ B(Return), diff --git a/deps/v8/test/unittests/interpreter/bytecode_expectations/ForOfLoop.golden b/deps/v8/test/unittests/interpreter/bytecode_expectations/ForOfLoop.golden index d09401f25db731..0f801aa51681fe 100644 --- a/deps/v8/test/unittests/interpreter/bytecode_expectations/ForOfLoop.golden +++ b/deps/v8/test/unittests/interpreter/bytecode_expectations/ForOfLoop.golden @@ -339,7 +339,7 @@ bytecodes: [ B(Mov), R(12), R(0), /* 31 S> */ B(GetNamedProperty), R(12), U8(3), U8(12), B(Star3), - /* 34 S> */ B(GetNamedProperty), R(12), U8(4), U8(14), + /* 34 E> */ B(GetNamedProperty), R(12), U8(4), U8(14), B(Star4), /* 56 S> */ B(Ldar), R(4), /* 58 E> */ B(Add), R(3), U8(16), diff --git a/deps/v8/test/unittests/interpreter/bytecode_expectations/LogicalExpressions.golden b/deps/v8/test/unittests/interpreter/bytecode_expectations/LogicalExpressions.golden index 13bed9d061b0cb..d53ffdcfd10390 100644 --- a/deps/v8/test/unittests/interpreter/bytecode_expectations/LogicalExpressions.golden +++ b/deps/v8/test/unittests/interpreter/bytecode_expectations/LogicalExpressions.golden @@ -96,7 +96,7 @@ bytecodes: [ /* 42 S> */ B(LdaZero), B(Star0), /* 45 S> */ B(JumpIfToBooleanTrue), U8(4), - B(LdaSmi), I8(3), + /* 64 S> */ B(LdaSmi), I8(3), /* 67 S> */ B(Return), ] constant pool: [ @@ -120,9 +120,9 @@ bytecodes: [ B(Star2), /* 59 S> */ B(Ldar), R(0), B(JumpIfToBooleanTrue), U8(7), - B(LdaSmi), I8(5), + /* 86 S> */ B(LdaSmi), I8(5), B(Star2), - B(LdaSmi), I8(3), + /* 91 S> */ B(LdaSmi), I8(3), /* 94 S> */ B(Return), ] constant pool: [ @@ -180,133 +180,133 @@ bytecodes: [ B(JumpIfToBooleanTrue), U8(196), B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 88 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 98 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 105 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 115 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 122 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 132 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 139 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 149 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 156 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 166 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 173 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 183 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 190 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 200 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 207 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 217 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 224 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 234 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 241 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 251 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 258 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 268 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 275 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 285 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 292 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 302 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 309 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 319 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 326 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 336 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 343 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 353 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 360 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 370 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 377 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 387 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 394 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 404 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 411 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 421 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 428 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 438 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 445 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 455 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 462 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 472 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 479 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 489 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 496 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 506 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 513 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 523 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 530 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 540 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 547 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 557 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 564 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 574 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 581 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 591 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 598 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 608 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 615 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(3), + /* 620 S> */ B(LdaSmi), I8(3), /* 623 S> */ B(Return), ] constant pool: [ @@ -364,133 +364,133 @@ bytecodes: [ B(JumpIfToBooleanFalse), U8(196), B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 88 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 98 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 105 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 115 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 122 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 132 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 139 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 149 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 156 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 166 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 173 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 183 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 190 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 200 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 207 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 217 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 224 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 234 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 241 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 251 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 258 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 268 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 275 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 285 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 292 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 302 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 309 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 319 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 326 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 336 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 343 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 353 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 360 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 370 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 377 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 387 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 394 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 404 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 411 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 421 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 428 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 438 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 445 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 455 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 462 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 472 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 479 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 489 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 496 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 506 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 513 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 523 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 530 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 540 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 547 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 557 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 564 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 574 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 581 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 591 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 598 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 608 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 615 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(3), + /* 620 S> */ B(LdaSmi), I8(3), /* 623 S> */ B(Return), ] constant pool: [ @@ -549,133 +549,133 @@ bytecodes: [ B(JumpIfTrue), U8(196), B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 94 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 104 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 111 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 121 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 128 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 138 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 145 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 155 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 162 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 172 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 179 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 189 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 196 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 206 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 213 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 223 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 230 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 240 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 247 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 257 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 264 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 274 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 281 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 291 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 298 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 308 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 315 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 325 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 332 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 342 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 349 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 359 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 366 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 376 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 383 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 393 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 400 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 410 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 417 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 427 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 434 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 444 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 451 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 461 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 468 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 478 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 485 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 495 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 502 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 512 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 519 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 529 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 536 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 546 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 553 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 563 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 570 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 580 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 587 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 597 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 604 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 614 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 621 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(3), + /* 626 S> */ B(LdaSmi), I8(3), /* 629 S> */ B(Return), ] constant pool: [ @@ -734,133 +734,133 @@ bytecodes: [ B(JumpIfFalse), U8(196), B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 94 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 104 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 111 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 121 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 128 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 138 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 145 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 155 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 162 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 172 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 179 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 189 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 196 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 206 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 213 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 223 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 230 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 240 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 247 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 257 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 264 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 274 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 281 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 291 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 298 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 308 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 315 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 325 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 332 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 342 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 349 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 359 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 366 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 376 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 383 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 393 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 400 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 410 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 417 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 427 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 434 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 444 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 451 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 461 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 468 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 478 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 485 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 495 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 502 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 512 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 519 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 529 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 536 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 546 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 553 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 563 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 570 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 580 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 587 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 597 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 604 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(1), + /* 614 S> */ B(LdaSmi), I8(1), B(Star1), - B(LdaSmi), I8(2), + /* 621 S> */ B(LdaSmi), I8(2), B(Star2), - B(LdaSmi), I8(3), + /* 626 S> */ B(LdaSmi), I8(3), /* 629 S> */ B(Return), ] constant pool: [ @@ -914,7 +914,7 @@ bytecodes: [ B(LdaSmi), I8(3), B(JumpIfToBooleanTrue), U8(3), B(LdaZero), - B(LdaSmi), I8(1), + /* 65 S> */ B(LdaSmi), I8(1), /* 67 S> */ B(Return), ] constant pool: [ diff --git a/deps/v8/test/unittests/interpreter/bytecode_expectations/NewAndSpread.golden b/deps/v8/test/unittests/interpreter/bytecode_expectations/NewAndSpread.golden index 07182e59a78909..46a4f12b03cc4b 100644 --- a/deps/v8/test/unittests/interpreter/bytecode_expectations/NewAndSpread.golden +++ b/deps/v8/test/unittests/interpreter/bytecode_expectations/NewAndSpread.golden @@ -106,7 +106,7 @@ bytecodes: [ B(Star2), B(LdaSmi), I8(1), B(Star3), - /* 101 S> */ B(CreateArrayLiteral), U8(4), U8(1), U8(37), + /* 101 E> */ B(CreateArrayLiteral), U8(4), U8(1), U8(37), B(Star6), /* 101 E> */ B(GetIterator), R(6), U8(2), U8(4), B(Star5), diff --git a/deps/v8/test/unittests/interpreter/bytecode_expectations/PrimitiveExpressions.golden b/deps/v8/test/unittests/interpreter/bytecode_expectations/PrimitiveExpressions.golden index e2d02ee52701b9..b8490f7295304e 100644 --- a/deps/v8/test/unittests/interpreter/bytecode_expectations/PrimitiveExpressions.golden +++ b/deps/v8/test/unittests/interpreter/bytecode_expectations/PrimitiveExpressions.golden @@ -449,7 +449,7 @@ bytecode array length: 5 bytecodes: [ /* 42 S> */ B(LdaZero), B(Star0), - /* 45 S> */ B(LdaSmi), I8(3), + /* 56 S> */ B(LdaSmi), I8(3), /* 59 S> */ B(Return), ] constant pool: [ diff --git a/deps/v8/test/unittests/interpreter/bytecode_expectations/PrivateAccessorAccess.golden b/deps/v8/test/unittests/interpreter/bytecode_expectations/PrivateAccessorAccess.golden index 040e0cd41dd7d1..e2fafcdcee3986 100644 --- a/deps/v8/test/unittests/interpreter/bytecode_expectations/PrivateAccessorAccess.golden +++ b/deps/v8/test/unittests/interpreter/bytecode_expectations/PrivateAccessorAccess.golden @@ -83,7 +83,7 @@ bytecodes: [ /* 48 E> */ B(DefineKeyedOwnProperty), R(this), R(0), U8(0), /* 53 S> */ B(LdaImmutableCurrentContextSlot), U8(3), /* 58 E> */ B(GetKeyedProperty), R(this), U8(2), - B(Wide), B(LdaSmi), I16(304), + B(Wide), B(LdaSmi), I16(305), B(Star2), B(LdaConstant), U8(0), B(Star3), @@ -115,7 +115,7 @@ bytecodes: [ /* 41 E> */ B(DefineKeyedOwnProperty), R(this), R(0), U8(0), /* 46 S> */ B(LdaImmutableCurrentContextSlot), U8(3), /* 51 E> */ B(GetKeyedProperty), R(this), U8(2), - B(Wide), B(LdaSmi), I16(303), + B(Wide), B(LdaSmi), I16(304), B(Star2), B(LdaConstant), U8(0), B(Star3), @@ -149,7 +149,7 @@ bytecodes: [ B(Star2), B(LdaImmutableCurrentContextSlot), U8(3), /* 58 E> */ B(GetKeyedProperty), R(this), U8(2), - B(Wide), B(LdaSmi), I16(304), + B(Wide), B(LdaSmi), I16(305), B(Star3), B(LdaConstant), U8(0), B(Star4), @@ -181,7 +181,7 @@ bytecodes: [ /* 41 E> */ B(DefineKeyedOwnProperty), R(this), R(0), U8(0), /* 46 S> */ B(LdaImmutableCurrentContextSlot), U8(3), /* 51 E> */ B(GetKeyedProperty), R(this), U8(2), - B(Wide), B(LdaSmi), I16(303), + B(Wide), B(LdaSmi), I16(304), B(Star2), B(LdaConstant), U8(0), B(Star3), diff --git a/deps/v8/test/unittests/interpreter/bytecode_expectations/PrivateMethodAccess.golden b/deps/v8/test/unittests/interpreter/bytecode_expectations/PrivateMethodAccess.golden index a386bdccbf7d0e..40e7d2bd4f30d4 100644 --- a/deps/v8/test/unittests/interpreter/bytecode_expectations/PrivateMethodAccess.golden +++ b/deps/v8/test/unittests/interpreter/bytecode_expectations/PrivateMethodAccess.golden @@ -58,7 +58,7 @@ bytecodes: [ B(Star2), B(LdaImmutableCurrentContextSlot), U8(3), /* 54 E> */ B(GetKeyedProperty), R(this), U8(2), - B(Wide), B(LdaSmi), I16(302), + B(Wide), B(LdaSmi), I16(303), B(Star3), B(LdaConstant), U8(0), B(Star4), @@ -91,7 +91,7 @@ bytecodes: [ /* 44 E> */ B(DefineKeyedOwnProperty), R(this), R(0), U8(0), /* 49 S> */ B(LdaImmutableCurrentContextSlot), U8(3), /* 54 E> */ B(GetKeyedProperty), R(this), U8(2), - B(Wide), B(LdaSmi), I16(302), + B(Wide), B(LdaSmi), I16(303), B(Star2), B(LdaConstant), U8(0), B(Star3), diff --git a/deps/v8/test/unittests/interpreter/bytecode_expectations/StandardForLoop.golden b/deps/v8/test/unittests/interpreter/bytecode_expectations/StandardForLoop.golden index e89a2db8df6b30..dbfdbf9f31ba6a 100644 --- a/deps/v8/test/unittests/interpreter/bytecode_expectations/StandardForLoop.golden +++ b/deps/v8/test/unittests/interpreter/bytecode_expectations/StandardForLoop.golden @@ -216,9 +216,9 @@ bytecode array length: 38 bytecodes: [ /* 37 S> */ B(CreateObjectLiteral), U8(0), U8(0), U8(41), B(Star3), - /* 28 S> */ B(GetNamedProperty), R(3), U8(1), U8(1), + /* 28 E> */ B(GetNamedProperty), R(3), U8(1), U8(1), B(Star0), - /* 31 S> */ B(GetNamedProperty), R(3), U8(2), U8(3), + /* 31 E> */ B(GetNamedProperty), R(3), U8(2), U8(3), B(Star1), /* 55 S> */ B(LdaZero), /* 55 E> */ B(TestGreaterThan), R(1), U8(5), diff --git a/deps/v8/test/unittests/interpreter/bytecode_expectations/StaticPrivateMethodAccess.golden b/deps/v8/test/unittests/interpreter/bytecode_expectations/StaticPrivateMethodAccess.golden index 653b11e6d90b5e..57dfc42dc55127 100644 --- a/deps/v8/test/unittests/interpreter/bytecode_expectations/StaticPrivateMethodAccess.golden +++ b/deps/v8/test/unittests/interpreter/bytecode_expectations/StaticPrivateMethodAccess.golden @@ -24,7 +24,7 @@ bytecodes: [ B(TestReferenceEqual), R(this), B(Mov), R(this), R(1), B(JumpIfTrue), U8(16), - B(Wide), B(LdaSmi), I16(296), + B(Wide), B(LdaSmi), I16(297), B(Star2), B(LdaConstant), U8(0), B(Star3), @@ -61,13 +61,13 @@ bytecodes: [ B(TestReferenceEqual), R(this), B(Mov), R(this), R(0), B(JumpIfTrue), U8(16), - B(Wide), B(LdaSmi), I16(296), + B(Wide), B(LdaSmi), I16(297), B(Star2), B(LdaConstant), U8(0), B(Star3), /* 61 E> */ B(CallRuntime), U16(Runtime::kNewTypeError), R(2), U8(2), B(Throw), - B(Wide), B(LdaSmi), I16(302), + B(Wide), B(LdaSmi), I16(303), B(Star2), B(LdaConstant), U8(1), B(Star3), @@ -99,13 +99,13 @@ bytecodes: [ B(TestReferenceEqual), R(this), B(Mov), R(this), R(0), B(JumpIfTrue), U8(16), - B(Wide), B(LdaSmi), I16(296), + B(Wide), B(LdaSmi), I16(297), B(Star1), B(LdaConstant), U8(0), B(Star2), /* 61 E> */ B(CallRuntime), U16(Runtime::kNewTypeError), R(1), U8(2), B(Throw), - B(Wide), B(LdaSmi), I16(302), + B(Wide), B(LdaSmi), I16(303), B(Star1), B(LdaConstant), U8(1), B(Star2), @@ -145,7 +145,7 @@ bytecodes: [ B(TestReferenceEqual), R(this), B(Mov), R(this), R(0), B(JumpIfTrue), U8(16), - B(Wide), B(LdaSmi), I16(296), + B(Wide), B(LdaSmi), I16(297), B(Star2), B(LdaConstant), U8(0), B(Star3), @@ -167,7 +167,7 @@ bytecodes: [ B(TestReferenceEqual), R(this), B(Mov), R(this), R(0), B(JumpIfTrue), U8(16), - B(Wide), B(LdaSmi), I16(296), + B(Wide), B(LdaSmi), I16(297), B(Star3), B(LdaConstant), U8(0), B(Star4), @@ -182,7 +182,7 @@ bytecodes: [ B(TestReferenceEqual), R(this), B(Mov), R(this), R(0), B(JumpIfTrue), U8(16), - B(Wide), B(LdaSmi), I16(296), + B(Wide), B(LdaSmi), I16(297), B(Star2), B(LdaConstant), U8(0), B(Star3), @@ -216,13 +216,13 @@ bytecodes: [ B(TestReferenceEqual), R(this), B(Mov), R(this), R(0), B(JumpIfTrue), U8(16), - B(Wide), B(LdaSmi), I16(296), + B(Wide), B(LdaSmi), I16(297), B(Star1), B(LdaConstant), U8(0), B(Star2), /* 65 E> */ B(CallRuntime), U16(Runtime::kNewTypeError), R(1), U8(2), B(Throw), - B(Wide), B(LdaSmi), I16(304), + B(Wide), B(LdaSmi), I16(305), B(Star1), B(LdaConstant), U8(1), B(Star2), @@ -253,13 +253,13 @@ bytecodes: [ B(TestReferenceEqual), R(this), B(Mov), R(this), R(0), B(JumpIfTrue), U8(16), - B(Wide), B(LdaSmi), I16(296), + B(Wide), B(LdaSmi), I16(297), B(Star1), B(LdaConstant), U8(0), B(Star2), /* 58 E> */ B(CallRuntime), U16(Runtime::kNewTypeError), R(1), U8(2), B(Throw), - B(Wide), B(LdaSmi), I16(303), + B(Wide), B(LdaSmi), I16(304), B(Star1), B(LdaConstant), U8(1), B(Star2), @@ -292,13 +292,13 @@ bytecodes: [ B(TestReferenceEqual), R(this), B(Mov), R(this), R(0), B(JumpIfTrue), U8(16), - B(Wide), B(LdaSmi), I16(296), + B(Wide), B(LdaSmi), I16(297), B(Star2), B(LdaConstant), U8(0), B(Star3), /* 65 E> */ B(CallRuntime), U16(Runtime::kNewTypeError), R(2), U8(2), B(Throw), - B(Wide), B(LdaSmi), I16(304), + B(Wide), B(LdaSmi), I16(305), B(Star2), B(LdaConstant), U8(1), B(Star3), @@ -327,7 +327,7 @@ bytecode array length: 19 bytecodes: [ /* 46 S> */ B(LdaImmutableCurrentContextSlot), U8(3), /* 51 E> */ B(GetKeyedProperty), R(this), U8(0), - B(Wide), B(LdaSmi), I16(303), + B(Wide), B(LdaSmi), I16(304), B(Star1), B(LdaConstant), U8(0), B(Star2), diff --git a/deps/v8/test/unittests/interpreter/bytecode_expectations/SuperCallAndSpread.golden b/deps/v8/test/unittests/interpreter/bytecode_expectations/SuperCallAndSpread.golden index 568f7770249744..3ed6406af66c7a 100644 --- a/deps/v8/test/unittests/interpreter/bytecode_expectations/SuperCallAndSpread.golden +++ b/deps/v8/test/unittests/interpreter/bytecode_expectations/SuperCallAndSpread.golden @@ -102,7 +102,7 @@ bytecodes: [ /* 140 S> */ B(CreateArrayLiteral), U8(0), U8(0), U8(37), B(Star6), B(LdaSmi), I8(1), - /* 152 S> */ B(Star7), + B(Star7), /* 152 E> */ B(GetIterator), R(3), U8(1), U8(3), B(Star9), B(GetNamedProperty), R(9), U8(1), U8(5), diff --git a/deps/v8/test/unittests/interpreter/generate-bytecode-expectations.cc b/deps/v8/test/unittests/interpreter/generate-bytecode-expectations.cc index 9a3749264fcc2e..ed0f0e1c6dce30 100644 --- a/deps/v8/test/unittests/interpreter/generate-bytecode-expectations.cc +++ b/deps/v8/test/unittests/interpreter/generate-bytecode-expectations.cc @@ -336,13 +336,13 @@ void ProgramOptions::PrintHeader(std::ostream* stream) const { V8InitializationScope::V8InitializationScope(const char* exec_path) : platform_(v8::platform::NewDefaultPlatform()) { - i::FLAG_always_turbofan = false; - i::FLAG_allow_natives_syntax = true; - i::FLAG_enable_lazy_source_positions = false; + i::v8_flags.always_turbofan = false; + i::v8_flags.allow_natives_syntax = true; + i::v8_flags.enable_lazy_source_positions = false; // The bytecode expectations printer changes flags; this is not security // relevant, allow this. - i::FLAG_freeze_flags_after_init = false; + i::v8_flags.freeze_flags_after_init = false; v8::V8::InitializeICUDefaultLocation(exec_path); v8::V8::InitializeExternalStartupData(exec_path); diff --git a/deps/v8/test/unittests/interpreter/interpreter-tester.cc b/deps/v8/test/unittests/interpreter/interpreter-tester.cc index 559b4b9f3b1013..7c22729aaa705e 100644 --- a/deps/v8/test/unittests/interpreter/interpreter-tester.cc +++ b/deps/v8/test/unittests/interpreter/interpreter-tester.cc @@ -25,7 +25,7 @@ InterpreterTester::InterpreterTester( source_(source), bytecode_(bytecode), feedback_metadata_(feedback_metadata) { - i::FLAG_always_turbofan = false; + i::v8_flags.always_turbofan = false; } InterpreterTester::InterpreterTester( diff --git a/deps/v8/test/unittests/interpreter/interpreter-unittest.cc b/deps/v8/test/unittests/interpreter/interpreter-unittest.cc index 0f567ec757ba72..1052bccb54bf69 100644 --- a/deps/v8/test/unittests/interpreter/interpreter-unittest.cc +++ b/deps/v8/test/unittests/interpreter/interpreter-unittest.cc @@ -377,7 +377,10 @@ TEST_F(InterpreterTest, InterpreterBinaryOpsBigInt) { if (tester.HasFeedbackMetadata()) { MaybeObject feedback = callable.vector().Get(slot); CHECK(feedback->IsSmi()); - CHECK_EQ(BinaryOperationFeedback::kBigInt, feedback->ToSmi().value()); + // TODO(panq): Create a standalone unit test for kBigInt64. + CHECK(BinaryOperationFeedback::kBigInt64 == + feedback->ToSmi().value() || + BinaryOperationFeedback::kBigInt == feedback->ToSmi().value()); } } } @@ -4739,9 +4742,9 @@ TEST_F(InterpreterTest, InterpreterGenerators) { #ifndef V8_TARGET_ARCH_ARM TEST_F(InterpreterTest, InterpreterWithNativeStack) { // "Always sparkplug" messes with this test. - if (FLAG_always_sparkplug) return; + if (v8_flags.always_sparkplug) return; - i::FLAG_interpreted_frames_native_stack = true; + i::v8_flags.interpreted_frames_native_stack = true; const char* source_text = "function testInterpreterWithNativeStack(a,b) { return a + b };"; @@ -4793,8 +4796,8 @@ TEST_F(InterpreterTest, InterpreterGetBytecodeHandler) { } TEST_F(InterpreterTest, InterpreterCollectSourcePositions) { - FLAG_enable_lazy_source_positions = true; - FLAG_stress_lazy_source_positions = false; + v8_flags.enable_lazy_source_positions = true; + v8_flags.stress_lazy_source_positions = false; const char* source = "(function () {\n" @@ -4817,8 +4820,8 @@ TEST_F(InterpreterTest, InterpreterCollectSourcePositions) { } TEST_F(InterpreterTest, InterpreterCollectSourcePositions_StackOverflow) { - FLAG_enable_lazy_source_positions = true; - FLAG_stress_lazy_source_positions = false; + v8_flags.enable_lazy_source_positions = true; + v8_flags.stress_lazy_source_positions = false; const char* source = "(function () {\n" @@ -4852,8 +4855,8 @@ TEST_F(InterpreterTest, InterpreterCollectSourcePositions_StackOverflow) { } TEST_F(InterpreterTest, InterpreterCollectSourcePositions_ThrowFrom1stFrame) { - FLAG_enable_lazy_source_positions = true; - FLAG_stress_lazy_source_positions = false; + v8_flags.enable_lazy_source_positions = true; + v8_flags.stress_lazy_source_positions = false; const char* source = R"javascript( @@ -4886,8 +4889,8 @@ TEST_F(InterpreterTest, InterpreterCollectSourcePositions_ThrowFrom1stFrame) { } TEST_F(InterpreterTest, InterpreterCollectSourcePositions_ThrowFrom2ndFrame) { - FLAG_enable_lazy_source_positions = true; - FLAG_stress_lazy_source_positions = false; + v8_flags.enable_lazy_source_positions = true; + v8_flags.stress_lazy_source_positions = false; const char* source = R"javascript( @@ -4941,8 +4944,8 @@ void CheckStringEqual(const char* expected_ptr, Handle<Object> actual_handle) { } // namespace TEST_F(InterpreterTest, InterpreterCollectSourcePositions_GenerateStackTrace) { - FLAG_enable_lazy_source_positions = true; - FLAG_stress_lazy_source_positions = false; + v8_flags.enable_lazy_source_positions = true; + v8_flags.stress_lazy_source_positions = false; const char* source = R"javascript( diff --git a/deps/v8/test/unittests/interpreter/source-positions-unittest.cc b/deps/v8/test/unittests/interpreter/source-positions-unittest.cc index e55bf540656a8a..cdba5378441b1e 100644 --- a/deps/v8/test/unittests/interpreter/source-positions-unittest.cc +++ b/deps/v8/test/unittests/interpreter/source-positions-unittest.cc @@ -21,9 +21,10 @@ namespace interpreter { // Flags enabling optimizations that change generated bytecode array. // Format is <command-line flag> <flag name> <bit index> -#define OPTIMIZATION_FLAGS(V) \ - V(FLAG_ignition_reo, kUseReo, 0) \ - V(FLAG_ignition_filter_expression_positions, kUseFilterExpressionPositions, 2) +#define OPTIMIZATION_FLAGS(V) \ + V(v8_flags.ignition_reo, kUseReo, 0) \ + V(v8_flags.ignition_filter_expression_positions, \ + kUseFilterExpressionPositions, 2) #define DECLARE_BIT(_, Name, BitIndex) static const int Name = 1 << BitIndex; OPTIMIZATION_FLAGS(DECLARE_BIT) @@ -105,8 +106,8 @@ class SourcePositionTest : public TestWithContext, std::tuple<int, TestCaseData>> { public: static void SetUpTestSuite() { - FLAG_always_turbofan = false; - FLAG_enable_lazy_source_positions = false; + v8_flags.always_turbofan = false; + v8_flags.enable_lazy_source_positions = false; TestWithContext::SetUpTestSuite(); } bool SourcePositionsMatch(int optimization_bitmap, const char* function_body, @@ -196,9 +197,8 @@ TEST_P(SourcePositionTest, SourcePositionsEquivalent) { INSTANTIATE_TEST_SUITE_P( SourcePositionsEquivalentTestCases, SourcePositionTest, ::testing::Combine(::testing::Values(kUseReo, kUseFilterExpressionPositions, - kUseReo | kUseFilterExpressionPositions - - ), + kUseReo | + kUseFilterExpressionPositions), ::testing::ValuesIn(kTestCaseData))); } // namespace interpreter diff --git a/deps/v8/test/unittests/libplatform/single-threaded-default-platform-unittest.cc b/deps/v8/test/unittests/libplatform/single-threaded-default-platform-unittest.cc index 33b26af40ff578..d3d9580a21d4d2 100644 --- a/deps/v8/test/unittests/libplatform/single-threaded-default-platform-unittest.cc +++ b/deps/v8/test/unittests/libplatform/single-threaded-default-platform-unittest.cc @@ -37,7 +37,7 @@ class SingleThreadedDefaultPlatformTest ::testing::Test>>> { public: static void SetUpTestSuite() { - i::FLAG_single_threaded = true; + i::v8_flags.single_threaded = true; i::FlagList::EnforceFlagImplications(); WithIsolateScopeMixin::SetUpTestSuite(); } diff --git a/deps/v8/test/unittests/logging/counters-unittest.cc b/deps/v8/test/unittests/logging/counters-unittest.cc index 6e8a4243a3a93d..b7b92867fa7f12 100644 --- a/deps/v8/test/unittests/logging/counters-unittest.cc +++ b/deps/v8/test/unittests/logging/counters-unittest.cc @@ -75,7 +75,7 @@ class SnapshotNativeCounterTest : public TestWithNativeContextAndCounters { } // namespace TEST_F(AggregatedMemoryHistogramTest, OneSample1) { - FLAG_histogram_interval = 10; + v8_flags.histogram_interval = 10; AddSample(10, 1000); AddSample(20, 1000); EXPECT_EQ(1U, samples()->size()); @@ -83,7 +83,7 @@ TEST_F(AggregatedMemoryHistogramTest, OneSample1) { } TEST_F(AggregatedMemoryHistogramTest, OneSample2) { - FLAG_histogram_interval = 10; + v8_flags.histogram_interval = 10; AddSample(10, 500); AddSample(20, 1000); EXPECT_EQ(1U, samples()->size()); @@ -91,7 +91,7 @@ TEST_F(AggregatedMemoryHistogramTest, OneSample2) { } TEST_F(AggregatedMemoryHistogramTest, OneSample3) { - FLAG_histogram_interval = 10; + v8_flags.histogram_interval = 10; AddSample(10, 500); AddSample(15, 500); AddSample(15, 1000); @@ -101,7 +101,7 @@ TEST_F(AggregatedMemoryHistogramTest, OneSample3) { } TEST_F(AggregatedMemoryHistogramTest, OneSample4) { - FLAG_histogram_interval = 10; + v8_flags.histogram_interval = 10; AddSample(10, 500); AddSample(15, 750); AddSample(20, 1000); @@ -110,7 +110,7 @@ TEST_F(AggregatedMemoryHistogramTest, OneSample4) { } TEST_F(AggregatedMemoryHistogramTest, TwoSamples1) { - FLAG_histogram_interval = 10; + v8_flags.histogram_interval = 10; AddSample(10, 1000); AddSample(30, 1000); EXPECT_EQ(2U, samples()->size()); @@ -119,7 +119,7 @@ TEST_F(AggregatedMemoryHistogramTest, TwoSamples1) { } TEST_F(AggregatedMemoryHistogramTest, TwoSamples2) { - FLAG_histogram_interval = 10; + v8_flags.histogram_interval = 10; AddSample(10, 1000); AddSample(20, 1000); AddSample(30, 1000); @@ -129,7 +129,7 @@ TEST_F(AggregatedMemoryHistogramTest, TwoSamples2) { } TEST_F(AggregatedMemoryHistogramTest, TwoSamples3) { - FLAG_histogram_interval = 10; + v8_flags.histogram_interval = 10; AddSample(10, 1000); AddSample(20, 1000); AddSample(20, 500); @@ -140,7 +140,7 @@ TEST_F(AggregatedMemoryHistogramTest, TwoSamples3) { } TEST_F(AggregatedMemoryHistogramTest, TwoSamples4) { - FLAG_histogram_interval = 10; + v8_flags.histogram_interval = 10; AddSample(10, 1000); AddSample(30, 0); EXPECT_EQ(2U, samples()->size()); @@ -149,7 +149,7 @@ TEST_F(AggregatedMemoryHistogramTest, TwoSamples4) { } TEST_F(AggregatedMemoryHistogramTest, TwoSamples5) { - FLAG_histogram_interval = 10; + v8_flags.histogram_interval = 10; AddSample(10, 0); AddSample(30, 1000); EXPECT_EQ(2U, samples()->size()); @@ -158,7 +158,7 @@ TEST_F(AggregatedMemoryHistogramTest, TwoSamples5) { } TEST_F(AggregatedMemoryHistogramTest, TwoSamples6) { - FLAG_histogram_interval = 10; + v8_flags.histogram_interval = 10; AddSample(10, 0); AddSample(15, 1000); AddSample(30, 1000); @@ -168,7 +168,7 @@ TEST_F(AggregatedMemoryHistogramTest, TwoSamples6) { } TEST_F(AggregatedMemoryHistogramTest, TwoSamples7) { - FLAG_histogram_interval = 10; + v8_flags.histogram_interval = 10; AddSample(10, 0); AddSample(15, 1000); AddSample(25, 0); @@ -179,7 +179,7 @@ TEST_F(AggregatedMemoryHistogramTest, TwoSamples7) { } TEST_F(AggregatedMemoryHistogramTest, TwoSamples8) { - FLAG_histogram_interval = 10; + v8_flags.histogram_interval = 10; AddSample(10, 1000); AddSample(15, 0); AddSample(25, 1000); @@ -190,7 +190,7 @@ TEST_F(AggregatedMemoryHistogramTest, TwoSamples8) { } TEST_F(AggregatedMemoryHistogramTest, ManySamples1) { - FLAG_histogram_interval = 10; + v8_flags.histogram_interval = 10; const int kMaxSamples = 1000; AddSample(0, 0); AddSample(10 * kMaxSamples, 10 * kMaxSamples); @@ -201,7 +201,7 @@ TEST_F(AggregatedMemoryHistogramTest, ManySamples1) { } TEST_F(AggregatedMemoryHistogramTest, ManySamples2) { - FLAG_histogram_interval = 10; + v8_flags.histogram_interval = 10; const int kMaxSamples = 1000; AddSample(0, 0); AddSample(10 * (2 * kMaxSamples), 10 * (2 * kMaxSamples)); @@ -214,7 +214,7 @@ TEST_F(AggregatedMemoryHistogramTest, ManySamples2) { TEST_F(SnapshotNativeCounterTest, WriteBarrier) { RunJS("let o = {a: 42};"); - if (!FLAG_single_generation && SupportsNativeCounters()) { + if (!v8_flags.single_generation && SupportsNativeCounters()) { EXPECT_NE(0, write_barriers()); } else { EXPECT_EQ(0, write_barriers()); diff --git a/deps/v8/test/unittests/logging/log-unittest.cc b/deps/v8/test/unittests/logging/log-unittest.cc index ff7729edbde8f8..764064c40df85d 100644 --- a/deps/v8/test/unittests/logging/log-unittest.cc +++ b/deps/v8/test/unittests/logging/log-unittest.cc @@ -57,11 +57,11 @@ namespace { class LogTest : public TestWithIsolate { public: static void SetUpTestSuite() { - i::FLAG_log = true; - i::FLAG_prof = true; - i::FLAG_log_code = true; - i::FLAG_logfile = i::LogFile::kLogToTemporaryFile; - i::FLAG_logfile_per_isolate = false; + i::v8_flags.log = true; + i::v8_flags.prof = true; + i::v8_flags.log_code = true; + i::v8_flags.logfile = i::LogFile::kLogToTemporaryFile; + i::v8_flags.logfile_per_isolate = false; TestWithIsolate::SetUpTestSuite(); } }; @@ -498,11 +498,11 @@ TEST_F(LogTest, Issue539892) { class LogAllTest : public LogTest { public: static void SetUpTestSuite() { - i::FLAG_log_all = true; - i::FLAG_log_deopt = true; - i::FLAG_turbo_inlining = false; - i::FLAG_log_internal_timer_events = true; - i::FLAG_allow_natives_syntax = true; + i::v8_flags.log_all = true; + i::v8_flags.log_deopt = true; + i::v8_flags.turbo_inlining = false; + i::v8_flags.log_internal_timer_events = true; + i::v8_flags.allow_natives_syntax = true; LogTest::SetUpTestSuite(); } }; @@ -543,7 +543,7 @@ TEST_F(LogAllTest, LogAll) { CHECK(logger.ContainsLine({"code-creation,Script", ":1:1"})); CHECK(logger.ContainsLine({"code-creation,JS,", "testAddFn"})); - if (i::FLAG_turbofan && !i::FLAG_always_turbofan) { + if (i::v8_flags.turbofan && !i::v8_flags.always_turbofan) { CHECK(logger.ContainsLine({"code-deopt,", "not a Smi"})); CHECK(logger.ContainsLine({"timer-event-start", "V8.DeoptimizeCode"})); CHECK(logger.ContainsLine({"timer-event-end", "V8.DeoptimizeCode"})); @@ -554,7 +554,7 @@ TEST_F(LogAllTest, LogAll) { class LogInterpretedFramesNativeStackTest : public LogTest { public: static void SetUpTestSuite() { - i::FLAG_interpreted_frames_native_stack = true; + i::v8_flags.interpreted_frames_native_stack = true; LogTest::SetUpTestSuite(); } }; @@ -583,13 +583,13 @@ class LogInterpretedFramesNativeStackWithSerializationTest : array_buffer_allocator_( v8::ArrayBuffer::Allocator::NewDefaultAllocator()) {} static void SetUpTestSuite() { - i::FLAG_log = true; - i::FLAG_prof = true; - i::FLAG_log_code = true; - i::FLAG_logfile = i::LogFile::kLogToTemporaryFile; - i::FLAG_logfile_per_isolate = false; - i::FLAG_interpreted_frames_native_stack = true; - i::FLAG_always_turbofan = false; + i::v8_flags.log = true; + i::v8_flags.prof = true; + i::v8_flags.log_code = true; + i::v8_flags.logfile = i::LogFile::kLogToTemporaryFile; + i::v8_flags.logfile_per_isolate = false; + i::v8_flags.interpreted_frames_native_stack = true; + i::v8_flags.always_turbofan = false; TestWithPlatform::SetUpTestSuite(); } @@ -673,8 +673,8 @@ TEST_F(LogInterpretedFramesNativeStackWithSerializationTest, class LogExternalLogEventListenerTest : public TestWithIsolate { public: static void SetUpTestSuite() { - i::FLAG_log = false; - i::FLAG_prof = false; + i::v8_flags.log = false; + i::v8_flags.prof = false; TestWithIsolate::SetUpTestSuite(); } }; @@ -728,8 +728,8 @@ class LogExternalLogEventListenerInnerFunctionTest : public TestWithPlatform { : array_buffer_allocator_( v8::ArrayBuffer::Allocator::NewDefaultAllocator()) {} static void SetUpTestSuite() { - i::FLAG_log = false; - i::FLAG_prof = false; + i::v8_flags.log = false; + i::v8_flags.prof = false; TestWithPlatform::SetUpTestSuite(); } @@ -771,11 +771,11 @@ TEST_F(LogExternalLogEventListenerInnerFunctionTest, v8::ScriptCompiler::CompileUnboundScript(isolate1, &source) .ToLocalChecked(); CHECK_EQ(code_event_handler.CountLines("Function", "f1"), - 1 + (i::FLAG_stress_background_compile ? 1 : 0) + - (i::FLAG_always_sparkplug ? 1 : 0)); + 1 + (i::v8_flags.stress_background_compile ? 1 : 0) + + (i::v8_flags.always_sparkplug ? 1 : 0)); CHECK_EQ(code_event_handler.CountLines("Function", "f2"), - 1 + (i::FLAG_stress_background_compile ? 1 : 0) + - (i::FLAG_always_sparkplug ? 1 : 0)); + 1 + (i::v8_flags.stress_background_compile ? 1 : 0) + + (i::v8_flags.always_sparkplug ? 1 : 0)); cache = v8::ScriptCompiler::CreateCodeCache(script); } isolate1->Dispose(); @@ -811,9 +811,9 @@ TEST_F(LogExternalLogEventListenerInnerFunctionTest, class LogExternalInterpretedFramesNativeStackTest : public TestWithIsolate { public: static void SetUpTestSuite() { - i::FLAG_log = false; - i::FLAG_prof = false; - i::FLAG_interpreted_frames_native_stack = true; + i::v8_flags.log = false; + i::v8_flags.prof = false; + i::v8_flags.interpreted_frames_native_stack = true; TestWithIsolate::SetUpTestSuite(); } }; @@ -863,7 +863,7 @@ TEST_F(LogExternalInterpretedFramesNativeStackTest, class LogMapsTest : public LogTest { public: static void SetUpTestSuite() { - i::FLAG_log_maps = true; + i::v8_flags.log_maps = true; LogTest::SetUpTestSuite(); } }; @@ -949,8 +949,9 @@ void ValidateMapDetailsLogging(v8::Isolate* isolate, TEST_F(LogMapsTest, LogMapsDetailsStartup) { // Reusing map addresses might cause these tests to fail. - if (i::FLAG_gc_global || i::FLAG_stress_compaction || - i::FLAG_stress_incremental_marking || i::FLAG_enable_third_party_heap) { + if (i::v8_flags.gc_global || i::v8_flags.stress_compaction || + i::v8_flags.stress_incremental_marking || + i::v8_flags.enable_third_party_heap) { return; } // Test that all Map details from Maps in the snapshot are logged properly. @@ -964,16 +965,17 @@ TEST_F(LogMapsTest, LogMapsDetailsStartup) { class LogMapsCodeTest : public LogTest { public: static void SetUpTestSuite() { - i::FLAG_retain_maps_for_n_gc = 0xFFFFFFF; - i::FLAG_log_maps = true; + i::v8_flags.retain_maps_for_n_gc = 0xFFFFFFF; + i::v8_flags.log_maps = true; LogTest::SetUpTestSuite(); } }; TEST_F(LogMapsCodeTest, LogMapsDetailsCode) { // Reusing map addresses might cause these tests to fail. - if (i::FLAG_gc_global || i::FLAG_stress_compaction || - i::FLAG_stress_incremental_marking || i::FLAG_enable_third_party_heap) { + if (i::v8_flags.gc_global || i::v8_flags.stress_compaction || + i::v8_flags.stress_incremental_marking || + i::v8_flags.enable_third_party_heap) { return; } @@ -1061,8 +1063,9 @@ TEST_F(LogMapsCodeTest, LogMapsDetailsCode) { TEST_F(LogMapsTest, LogMapsDetailsContexts) { // Reusing map addresses might cause these tests to fail. - if (i::FLAG_gc_global || i::FLAG_stress_compaction || - i::FLAG_stress_incremental_marking || i::FLAG_enable_third_party_heap) { + if (i::v8_flags.gc_global || i::v8_flags.stress_compaction || + i::v8_flags.stress_incremental_marking || + i::v8_flags.enable_third_party_heap) { return; } // Test that all Map details from Maps in the snapshot are logged properly. @@ -1132,7 +1135,7 @@ class LogFunctionEventsTest : public LogTest { TEST_F(LogFunctionEventsTest, LogFunctionEvents) { // --always-turbofan will break the fine-grained log order. - if (i::FLAG_always_turbofan) return; + if (i::v8_flags.always_turbofan) return; { ScopedLoggerInitializer logger(isolate()); diff --git a/deps/v8/test/unittests/logging/runtime-call-stats-unittest.cc b/deps/v8/test/unittests/logging/runtime-call-stats-unittest.cc index f69127bf9efd3c..fdde4441a28e18 100644 --- a/deps/v8/test/unittests/logging/runtime-call-stats-unittest.cc +++ b/deps/v8/test/unittests/logging/runtime-call-stats-unittest.cc @@ -460,8 +460,8 @@ static void CustomCallback(const v8::FunctionCallbackInfo<v8::Value>& info) { } // namespace TEST_F(RuntimeCallStatsTest, CallbackFunction) { - FLAG_allow_natives_syntax = true; - FLAG_incremental_marking = false; + v8_flags.allow_natives_syntax = true; + v8_flags.incremental_marking = false; RuntimeCallCounter* callback_counter = stats()->GetCounter(RuntimeCallCounterId::kFunctionCallback); @@ -539,8 +539,8 @@ TEST_F(RuntimeCallStatsTest, CallbackFunction) { } TEST_F(RuntimeCallStatsTest, ApiGetter) { - FLAG_allow_natives_syntax = true; - FLAG_incremental_marking = false; + v8_flags.allow_natives_syntax = true; + v8_flags.incremental_marking = false; RuntimeCallCounter* callback_counter = stats()->GetCounter(RuntimeCallCounterId::kFunctionCallback); @@ -627,12 +627,12 @@ TEST_F(RuntimeCallStatsTest, ApiGetter) { } TEST_F(RuntimeCallStatsTest, GarbageCollection) { - if (FLAG_stress_incremental_marking) return; - FLAG_expose_gc = true; + if (v8_flags.stress_incremental_marking) return; + v8_flags.expose_gc = true; // Disable concurrent GC threads because otherwise they may continue // running after this test completes and race with is_runtime_stats_enabled() // updates. - FLAG_single_threaded_gc = true; + v8_flags.single_threaded_gc = true; FlagList::EnforceFlagImplications(); v8::Isolate* isolate = v8_isolate(); diff --git a/deps/v8/test/unittests/objects/concurrent-feedback-vector-unittest.cc b/deps/v8/test/unittests/objects/concurrent-feedback-vector-unittest.cc index f329e38495bc8a..9ed7e76ad36ba5 100644 --- a/deps/v8/test/unittests/objects/concurrent-feedback-vector-unittest.cc +++ b/deps/v8/test/unittests/objects/concurrent-feedback-vector-unittest.cc @@ -161,7 +161,7 @@ static void CheckedWait(base::Semaphore& semaphore) { // Verify that a LoadIC can be cycled through different states and safely // read on a background thread. TEST_F(ConcurrentFeedbackVectorTest, CheckLoadICStates) { - FLAG_lazy_feedback_allocation = false; + v8_flags.lazy_feedback_allocation = false; std::unique_ptr<PersistentHandles> ph = i_isolate()->NewPersistentHandles(); HandleScope handle_scope(i_isolate()); diff --git a/deps/v8/test/unittests/objects/concurrent-string-unittest.cc b/deps/v8/test/unittests/objects/concurrent-string-unittest.cc index 604b6f017cf4be..569bceb01c8dfa 100644 --- a/deps/v8/test/unittests/objects/concurrent-string-unittest.cc +++ b/deps/v8/test/unittests/objects/concurrent-string-unittest.cc @@ -234,9 +234,9 @@ TEST_F(ConcurrentStringTest, InspectTwoByteExternalizing) { // InspectOneByteExternalizing, but using thin strings. TEST_F(ConcurrentStringTest, InspectOneByteExternalizing_ThinString) { // We will not create a thin string if single_generation is turned on. - if (FLAG_single_generation) return; + if (v8_flags.single_generation) return; // We don't create ThinStrings immediately when using the forwarding table. - if (FLAG_always_use_string_forwarding_table) return; + if (v8_flags.always_use_string_forwarding_table) return; std::unique_ptr<PersistentHandles> ph = i_isolate()->NewPersistentHandles(); auto factory = i_isolate()->factory(); @@ -296,9 +296,9 @@ TEST_F(ConcurrentStringTest, InspectOneByteExternalizing_ThinString) { // strings. TEST_F(ConcurrentStringTest, InspectOneIntoTwoByteExternalizing_ThinString) { // We will not create a thin string if single_generation is turned on. - if (FLAG_single_generation) return; + if (v8_flags.single_generation) return; // We don't create ThinStrings immediately when using the forwarding table. - if (FLAG_always_use_string_forwarding_table) return; + if (v8_flags.always_use_string_forwarding_table) return; std::unique_ptr<PersistentHandles> ph = i_isolate()->NewPersistentHandles(); auto factory = i_isolate()->factory(); @@ -358,9 +358,9 @@ TEST_F(ConcurrentStringTest, InspectOneIntoTwoByteExternalizing_ThinString) { // InspectTwoByteExternalizing, but using thin strings. TEST_F(ConcurrentStringTest, InspectTwoByteExternalizing_ThinString) { // We will not create a thin string if single_generation is turned on. - if (FLAG_single_generation) return; + if (v8_flags.single_generation) return; // We don't create ThinStrings immediately when using the forwarding table. - if (FLAG_always_use_string_forwarding_table) return; + if (v8_flags.always_use_string_forwarding_table) return; std::unique_ptr<PersistentHandles> ph = i_isolate()->NewPersistentHandles(); auto factory = i_isolate()->factory(); diff --git a/deps/v8/test/unittests/objects/dictionary-unittest.cc b/deps/v8/test/unittests/objects/dictionary-unittest.cc index ecdf0ec7109f5c..28bda6116f767e 100644 --- a/deps/v8/test/unittests/objects/dictionary-unittest.cc +++ b/deps/v8/test/unittests/objects/dictionary-unittest.cc @@ -204,7 +204,7 @@ class DictionaryTest : public TestWithHeapInternalsAndContext { // Even though we simulate a full heap, generating an identity hash // code in subsequent calls will not request GC. - if (!FLAG_single_generation) { + if (!v8_flags.single_generation) { SimulateFullSpace(heap()->new_space()); } SimulateFullSpace(heap()->old_space()); @@ -276,9 +276,9 @@ TEST_F(DictionaryTest, HashTableRehash) { #ifdef DEBUG TEST_F(DictionaryTest, ObjectHashTableCausesGC) { - i::FLAG_stress_compaction = false; + i::v8_flags.stress_compaction = false; // For SimulateFullSpace in TestHashMapDoesNotCauseGC. - i::FLAG_stress_concurrent_allocation = false; + i::v8_flags.stress_concurrent_allocation = false; TestHashMapDoesNotCauseGC(ObjectHashTable::New(isolate(), 1)); } #endif diff --git a/deps/v8/test/unittests/objects/feedback-vector-unittest.cc b/deps/v8/test/unittests/objects/feedback-vector-unittest.cc index 4730536afb392e..c9eedf28b54cfb 100644 --- a/deps/v8/test/unittests/objects/feedback-vector-unittest.cc +++ b/deps/v8/test/unittests/objects/feedback-vector-unittest.cc @@ -144,9 +144,9 @@ TEST_F(FeedbackVectorTest, VectorICMetadata) { } TEST_F(FeedbackVectorTest, VectorCallICStates) { - if (!i::FLAG_use_ic) return; - if (i::FLAG_always_turbofan) return; - FLAG_allow_natives_syntax = true; + if (!i::v8_flags.use_ic) return; + if (i::v8_flags.always_turbofan) return; + v8_flags.allow_natives_syntax = true; v8::HandleScope scope(v8_isolate()); Isolate* isolate = i_isolate(); @@ -173,9 +173,9 @@ TEST_F(FeedbackVectorTest, VectorCallICStates) { // Test the Call IC states transfer with Function.prototype.apply TEST_F(FeedbackVectorTest, VectorCallICStateApply) { - if (!i::FLAG_use_ic) return; - if (i::FLAG_always_turbofan) return; - FLAG_allow_natives_syntax = true; + if (!i::v8_flags.use_ic) return; + if (i::v8_flags.always_turbofan) return; + v8_flags.allow_natives_syntax = true; v8::HandleScope scope(v8_isolate()); Isolate* isolate = i_isolate(); @@ -213,9 +213,9 @@ TEST_F(FeedbackVectorTest, VectorCallICStateApply) { } TEST_F(FeedbackVectorTest, VectorCallFeedback) { - if (!i::FLAG_use_ic) return; - if (i::FLAG_always_turbofan) return; - FLAG_allow_natives_syntax = true; + if (!i::v8_flags.use_ic) return; + if (i::v8_flags.always_turbofan) return; + v8_flags.allow_natives_syntax = true; v8::HandleScope scope(v8_isolate()); Isolate* isolate = i_isolate(); @@ -243,10 +243,10 @@ TEST_F(FeedbackVectorTest, VectorCallFeedback) { } TEST_F(FeedbackVectorTest, VectorPolymorphicCallFeedback) { - if (!i::FLAG_use_ic) return; - if (i::FLAG_always_turbofan) return; - FLAG_allow_natives_syntax = true; - FLAG_lazy_feedback_allocation = false; + if (!i::v8_flags.use_ic) return; + if (i::v8_flags.always_turbofan) return; + v8_flags.allow_natives_syntax = true; + v8_flags.lazy_feedback_allocation = false; v8::HandleScope scope(v8_isolate()); Isolate* isolate = i_isolate(); @@ -274,9 +274,9 @@ TEST_F(FeedbackVectorTest, VectorPolymorphicCallFeedback) { } TEST_F(FeedbackVectorTest, VectorCallFeedbackForArray) { - if (!i::FLAG_use_ic) return; - if (i::FLAG_always_turbofan) return; - FLAG_allow_natives_syntax = true; + if (!i::v8_flags.use_ic) return; + if (i::v8_flags.always_turbofan) return; + v8_flags.allow_natives_syntax = true; v8::HandleScope scope(v8_isolate()); Isolate* isolate = i_isolate(); @@ -303,9 +303,9 @@ TEST_F(FeedbackVectorTest, VectorCallFeedbackForArray) { } TEST_F(FeedbackVectorTest, VectorCallCounts) { - if (!i::FLAG_use_ic) return; - if (i::FLAG_always_turbofan) return; - FLAG_allow_natives_syntax = true; + if (!i::v8_flags.use_ic) return; + if (i::v8_flags.always_turbofan) return; + v8_flags.allow_natives_syntax = true; v8::HandleScope scope(v8_isolate()); Isolate* isolate = i_isolate(); @@ -334,9 +334,9 @@ TEST_F(FeedbackVectorTest, VectorCallCounts) { } TEST_F(FeedbackVectorTest, VectorConstructCounts) { - if (!i::FLAG_use_ic) return; - if (i::FLAG_always_turbofan) return; - FLAG_allow_natives_syntax = true; + if (!i::v8_flags.use_ic) return; + if (i::v8_flags.always_turbofan) return; + v8_flags.allow_natives_syntax = true; v8::HandleScope scope(v8_isolate()); Isolate* isolate = i_isolate(); @@ -367,9 +367,9 @@ TEST_F(FeedbackVectorTest, VectorConstructCounts) { } TEST_F(FeedbackVectorTest, VectorSpeculationMode) { - if (!i::FLAG_use_ic) return; - if (i::FLAG_always_turbofan) return; - FLAG_allow_natives_syntax = true; + if (!i::v8_flags.use_ic) return; + if (i::v8_flags.always_turbofan) return; + v8_flags.allow_natives_syntax = true; v8::HandleScope scope(v8_isolate()); Isolate* isolate = i_isolate(); @@ -401,11 +401,11 @@ TEST_F(FeedbackVectorTest, VectorSpeculationMode) { } TEST_F(FeedbackVectorTest, VectorCallSpeculationModeAndFeedbackContent) { - if (!i::FLAG_use_ic) return; - if (!i::FLAG_turbofan) return; - if (i::FLAG_always_turbofan) return; - if (i::FLAG_jitless) return; - FLAG_allow_natives_syntax = true; + if (!i::v8_flags.use_ic) return; + if (!i::v8_flags.turbofan) return; + if (i::v8_flags.always_turbofan) return; + if (i::v8_flags.jitless) return; + v8_flags.allow_natives_syntax = true; v8::HandleScope scope(v8_isolate()); Isolate* isolate = i_isolate(); @@ -436,9 +436,9 @@ TEST_F(FeedbackVectorTest, VectorCallSpeculationModeAndFeedbackContent) { } TEST_F(FeedbackVectorTest, VectorLoadICStates) { - if (!i::FLAG_use_ic) return; - if (i::FLAG_always_turbofan) return; - FLAG_allow_natives_syntax = true; + if (!i::v8_flags.use_ic) return; + if (i::v8_flags.always_turbofan) return; + v8_flags.allow_natives_syntax = true; v8::HandleScope scope(v8_isolate()); Isolate* isolate = i_isolate(); @@ -489,9 +489,9 @@ TEST_F(FeedbackVectorTest, VectorLoadICStates) { } TEST_F(FeedbackVectorTest, VectorLoadGlobalICSlotSharing) { - if (!i::FLAG_use_ic) return; - if (i::FLAG_always_turbofan) return; - FLAG_allow_natives_syntax = true; + if (!i::v8_flags.use_ic) return; + if (i::v8_flags.always_turbofan) return; + v8_flags.allow_natives_syntax = true; v8::HandleScope scope(v8_isolate()); Isolate* isolate = i_isolate(); @@ -525,9 +525,9 @@ TEST_F(FeedbackVectorTest, VectorLoadGlobalICSlotSharing) { } TEST_F(FeedbackVectorTest, VectorLoadICOnSmi) { - if (!i::FLAG_use_ic) return; - if (i::FLAG_always_turbofan) return; - FLAG_allow_natives_syntax = true; + if (!i::v8_flags.use_ic) return; + if (i::v8_flags.always_turbofan) return; + v8_flags.allow_natives_syntax = true; v8::HandleScope scope(v8_isolate()); Isolate* isolate = i_isolate(); @@ -581,9 +581,9 @@ TEST_F(FeedbackVectorTest, VectorLoadICOnSmi) { } TEST_F(FeedbackVectorTest, ReferenceContextAllocatesNoSlots) { - if (!i::FLAG_use_ic) return; - if (i::FLAG_always_turbofan) return; - FLAG_allow_natives_syntax = true; + if (!i::v8_flags.use_ic) return; + if (i::v8_flags.always_turbofan) return; + v8_flags.allow_natives_syntax = true; v8::HandleScope scope(v8_isolate()); Isolate* isolate = i_isolate(); @@ -725,9 +725,9 @@ TEST_F(FeedbackVectorTest, ReferenceContextAllocatesNoSlots) { } TEST_F(FeedbackVectorTest, VectorStoreICBasic) { - if (!i::FLAG_use_ic) return; - if (i::FLAG_always_turbofan) return; - FLAG_allow_natives_syntax = true; + if (!i::v8_flags.use_ic) return; + if (i::v8_flags.always_turbofan) return; + v8_flags.allow_natives_syntax = true; v8::HandleScope scope(v8_isolate()); @@ -751,9 +751,9 @@ TEST_F(FeedbackVectorTest, VectorStoreICBasic) { } TEST_F(FeedbackVectorTest, DefineNamedOwnIC) { - if (!i::FLAG_use_ic) return; - if (i::FLAG_always_turbofan) return; - FLAG_allow_natives_syntax = true; + if (!i::v8_flags.use_ic) return; + if (i::v8_flags.always_turbofan) return; + v8_flags.allow_natives_syntax = true; v8::HandleScope scope(v8_isolate()); diff --git a/deps/v8/test/unittests/objects/inobject-slack-tracking-unittest.cc b/deps/v8/test/unittests/objects/inobject-slack-tracking-unittest.cc index f2e1e12bb370ab..2be8c2ec8bb271 100644 --- a/deps/v8/test/unittests/objects/inobject-slack-tracking-unittest.cc +++ b/deps/v8/test/unittests/objects/inobject-slack-tracking-unittest.cc @@ -314,7 +314,7 @@ class InObjectSlackTrackingTest : public TestWithContext { void TestSubclassChain(const std::vector<int>& hierarchy_desc) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; CreateClassHierarchy(hierarchy_desc); TestClassHierarchy(hierarchy_desc, static_cast<int>(hierarchy_desc.size())); @@ -325,7 +325,7 @@ class InObjectSlackTrackingTest : public TestWithContext { TEST_F(InObjectSlackTrackingTest, JSObjectBasic) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; const char* source = "function A() {" " this.a = 42;" @@ -373,13 +373,13 @@ TEST_F(InObjectSlackTrackingTest, JSObjectBasic) { } TEST_F(InObjectSlackTrackingTest, JSObjectBasicNoInlineNew) { - FLAG_inline_new = false; + v8_flags.inline_new = false; TestJSObjectBasic(); } TEST_F(InObjectSlackTrackingTest, JSObjectComplex) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; const char* source = "function A(n) {" " if (n > 0) this.a = 42;" @@ -449,13 +449,13 @@ TEST_F(InObjectSlackTrackingTest, JSObjectComplex) { } TEST_F(InObjectSlackTrackingTest, JSObjectComplexNoInlineNew) { - FLAG_inline_new = false; + v8_flags.inline_new = false; TestJSObjectComplex(); } TEST_F(InObjectSlackTrackingTest, JSGeneratorObjectBasic) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; const char* source = "function* A() {" " var i = 0;" @@ -511,13 +511,13 @@ TEST_F(InObjectSlackTrackingTest, JSGeneratorObjectBasic) { } TEST_F(InObjectSlackTrackingTest, JSGeneratorObjectBasicNoInlineNew) { - FLAG_inline_new = false; + v8_flags.inline_new = false; TestJSGeneratorObjectBasic(); } TEST_F(InObjectSlackTrackingTest, SubclassBasicNoBaseClassInstances) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; // Check that base class' and subclass' slack tracking do not interfere with // each other. @@ -600,13 +600,13 @@ TEST_F(InObjectSlackTrackingTest, SubclassBasicNoBaseClassInstances) { TEST_F(InObjectSlackTrackingTest, SubclassBasicNoBaseClassInstancesNoInlineNew) { - FLAG_inline_new = false; + v8_flags.inline_new = false; TestSubclassBasicNoBaseClassInstances(); } TEST_F(InObjectSlackTrackingTest, SubclassBasic) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; // Check that base class' and subclass' slack tracking do not interfere with // each other. @@ -699,7 +699,7 @@ TEST_F(InObjectSlackTrackingTest, SubclassBasic) { } TEST_F(InObjectSlackTrackingTest, SubclassBasicNoInlineNew) { - FLAG_inline_new = false; + v8_flags.inline_new = false; TestSubclassBasic(); } @@ -738,7 +738,7 @@ TEST_F(InObjectSlackTrackingTest, LongSubclassChain3) { TEST_F(InObjectSlackTrackingTest, InobjectPropetiesCountOverflowInSubclass) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; std::vector<int> hierarchy_desc; const int kNoOverflowCount = 5; @@ -910,10 +910,10 @@ TEST_F(InObjectSlackTrackingTest, ObjectLiteralPropertyBackingStoreSize) { } TEST_F(InObjectSlackTrackingTest, SlowModeSubclass) { - if (FLAG_stress_concurrent_allocation) return; + if (v8_flags.stress_concurrent_allocation) return; // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; std::vector<int> hierarchy_desc; const int kNoOverflowCount = 5; @@ -969,46 +969,46 @@ TEST_F(InObjectSlackTrackingTest, SlowModeSubclass) { TEST_F(InObjectSlackTrackingTest, SubclassObjectBuiltin) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; TestSubclassBuiltin("A1", JS_OBJECT_TYPE, "Object", "true"); TestSubclassBuiltin("A2", JS_OBJECT_TYPE, "Object", "42"); TestSubclassBuiltin("A3", JS_OBJECT_TYPE, "Object", "'some string'"); } TEST_F(InObjectSlackTrackingTest, SubclassObjectBuiltinNoInlineNew) { - FLAG_inline_new = false; + v8_flags.inline_new = false; TestSubclassObjectBuiltin(); } TEST_F(InObjectSlackTrackingTest, SubclassFunctionBuiltin) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; TestSubclassBuiltin("A1", JS_FUNCTION_TYPE, "Function", "'return 153;'"); TestSubclassBuiltin("A2", JS_FUNCTION_TYPE, "Function", "'this.a = 44;'"); } TEST_F(InObjectSlackTrackingTest, SubclassFunctionBuiltinNoInlineNew) { - FLAG_inline_new = false; + v8_flags.inline_new = false; TestSubclassFunctionBuiltin(); } TEST_F(InObjectSlackTrackingTest, SubclassBooleanBuiltin) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; TestSubclassBuiltin("A1", JS_PRIMITIVE_WRAPPER_TYPE, "Boolean", "true"); TestSubclassBuiltin("A2", JS_PRIMITIVE_WRAPPER_TYPE, "Boolean", "false"); } TEST_F(InObjectSlackTrackingTest, SubclassBooleanBuiltinNoInlineNew) { - FLAG_inline_new = false; + v8_flags.inline_new = false; TestSubclassBooleanBuiltin(); } TEST_F(InObjectSlackTrackingTest, SubclassErrorBuiltin) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; const int first_field = 2; TestSubclassBuiltin("A1", JS_ERROR_TYPE, "Error", "'err'", first_field); @@ -1022,38 +1022,38 @@ TEST_F(InObjectSlackTrackingTest, SubclassErrorBuiltin) { } TEST_F(InObjectSlackTrackingTest, SubclassErrorBuiltinNoInlineNew) { - FLAG_inline_new = false; + v8_flags.inline_new = false; TestSubclassErrorBuiltin(); } TEST_F(InObjectSlackTrackingTest, SubclassNumberBuiltin) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; TestSubclassBuiltin("A1", JS_PRIMITIVE_WRAPPER_TYPE, "Number", "42"); TestSubclassBuiltin("A2", JS_PRIMITIVE_WRAPPER_TYPE, "Number", "4.2"); } TEST_F(InObjectSlackTrackingTest, SubclassNumberBuiltinNoInlineNew) { - FLAG_inline_new = false; + v8_flags.inline_new = false; TestSubclassNumberBuiltin(); } TEST_F(InObjectSlackTrackingTest, SubclassDateBuiltin) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; TestSubclassBuiltin("A1", JS_DATE_TYPE, "Date", "123456789"); } TEST_F(InObjectSlackTrackingTest, SubclassDateBuiltinNoInlineNew) { - FLAG_inline_new = false; + v8_flags.inline_new = false; TestSubclassDateBuiltin(); } TEST_F(InObjectSlackTrackingTest, SubclassStringBuiltin) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; TestSubclassBuiltin("A1", JS_PRIMITIVE_WRAPPER_TYPE, "String", "'some string'"); @@ -1061,12 +1061,12 @@ TEST_F(InObjectSlackTrackingTest, SubclassStringBuiltin) { } TEST_F(InObjectSlackTrackingTest, SubclassStringBuiltinNoInlineNew) { - FLAG_inline_new = false; + v8_flags.inline_new = false; } TEST_F(InObjectSlackTrackingTest, SubclassRegExpBuiltin) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; const int first_field = 1; TestSubclassBuiltin("A1", JS_REG_EXP_TYPE, "RegExp", "'o(..)h', 'g'", @@ -1074,25 +1074,25 @@ TEST_F(InObjectSlackTrackingTest, SubclassRegExpBuiltin) { } TEST_F(InObjectSlackTrackingTest, SubclassRegExpBuiltinNoInlineNew) { - FLAG_inline_new = false; + v8_flags.inline_new = false; TestSubclassRegExpBuiltin(); } TEST_F(InObjectSlackTrackingTest, SubclassArrayBuiltin) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; TestSubclassBuiltin("A1", JS_ARRAY_TYPE, "Array", "42"); } TEST_F(InObjectSlackTrackingTest, SubclassArrayBuiltinNoInlineNew) { - FLAG_inline_new = false; + v8_flags.inline_new = false; TestSubclassArrayBuiltin(); } TEST_F(InObjectSlackTrackingTest, SubclassTypedArrayBuiltin) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; #define TYPED_ARRAY_TEST_F(InObjectSlackTrackingTest, Type, type, TYPE, \ elementType) \ @@ -1104,13 +1104,13 @@ TEST_F(InObjectSlackTrackingTest, SubclassTypedArrayBuiltin) { } TEST_F(InObjectSlackTrackingTest, SubclassTypedArrayBuiltinNoInlineNew) { - FLAG_inline_new = false; + v8_flags.inline_new = false; TestSubclassTypedArrayBuiltin(); } TEST_F(InObjectSlackTrackingTest, SubclassCollectionBuiltin) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; CcTest::InitializeVM(); v8::HandleScope scope(CcTest::isolate()); @@ -1121,13 +1121,13 @@ TEST_F(InObjectSlackTrackingTest, SubclassCollectionBuiltin) { } TEST_F(InObjectSlackTrackingTest, SubclassCollectionBuiltinNoInlineNew) { - FLAG_inline_new = false; + v8_flags.inline_new = false; TestSubclassCollectionBuiltin(); } TEST_F(InObjectSlackTrackingTest, SubclassArrayBufferBuiltin) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; CcTest::InitializeVM(); v8::HandleScope scope(CcTest::isolate()); @@ -1137,13 +1137,13 @@ TEST_F(InObjectSlackTrackingTest, SubclassArrayBufferBuiltin) { } TEST_F(InObjectSlackTrackingTest, SubclassArrayBufferBuiltinNoInlineNew) { - FLAG_inline_new = false; + v8_flags.inline_new = false; TestSubclassArrayBufferBuiltin(); } TEST_F(InObjectSlackTrackingTest, SubclassPromiseBuiltin) { // Avoid eventual completion of in-object slack tracking. - FLAG_always_turbofan = false; + v8_flags.always_turbofan = false; CcTest::InitializeVM(); v8::HandleScope scope(CcTest::isolate()); @@ -1152,7 +1152,7 @@ TEST_F(InObjectSlackTrackingTest, SubclassPromiseBuiltin) { } TEST_F(InObjectSlackTrackingTest, SubclassPromiseBuiltinNoInlineNew) { - FLAG_inline_new = false; + v8_flags.inline_new = false; TestSubclassPromiseBuiltin(); } @@ -1362,7 +1362,7 @@ TEST_F(InObjectSlackTrackingTest, TEST_F(InObjectSlackTrackingTest, InstanceFieldsArePropertiesDefaultConstructorEager) { - i::FLAG_lazy = false; + i::v8_flags.lazy = false; CcTest::InitializeVM(); v8::HandleScope scope(CcTest::isolate()); @@ -1385,7 +1385,7 @@ TEST_F(InObjectSlackTrackingTest, TEST_F(InObjectSlackTrackingTest, InstanceFieldsArePropertiesFieldsAndConstructorEager) { - i::FLAG_lazy = false; + i::v8_flags.lazy = false; CcTest::InitializeVM(); v8::HandleScope scope(CcTest::isolate()); diff --git a/deps/v8/test/unittests/objects/modules-unittest.cc b/deps/v8/test/unittests/objects/modules-unittest.cc index 677ae0da0e3edd..d3ac95a3e352dc 100644 --- a/deps/v8/test/unittests/objects/modules-unittest.cc +++ b/deps/v8/test/unittests/objects/modules-unittest.cc @@ -177,8 +177,8 @@ MaybeLocal<Module> ResolveCallbackWithImportAssertions( } TEST_F(ModuleTest, ModuleInstantiationWithImportAssertions) { - bool prev_import_assertions = i::FLAG_harmony_import_assertions; - i::FLAG_harmony_import_assertions = true; + bool prev_import_assertions = i::v8_flags.harmony_import_assertions; + i::v8_flags.harmony_import_assertions = true; HandleScope scope(isolate()); v8::TryCatch try_catch(isolate()); @@ -269,7 +269,7 @@ TEST_F(ModuleTest, ModuleInstantiationWithImportAssertions) { CHECK_EQ(42, result->Int32Value(context()).FromJust()); } CHECK(!try_catch.HasCaught()); - i::FLAG_harmony_import_assertions = prev_import_assertions; + i::v8_flags.harmony_import_assertions = prev_import_assertions; } TEST_F(ModuleTest, ModuleInstantiationFailures2) { diff --git a/deps/v8/test/unittests/objects/roots-unittest.cc b/deps/v8/test/unittests/objects/roots-unittest.cc index a12ccdfb3fc4b4..6bb3bc16ee3384 100644 --- a/deps/v8/test/unittests/objects/roots-unittest.cc +++ b/deps/v8/test/unittests/objects/roots-unittest.cc @@ -59,6 +59,7 @@ bool IsInitiallyMutable(Factory* factory, Address object_address) { V(retaining_path_targets) \ V(serialized_global_proxy_sizes) \ V(serialized_objects) \ + IF_WASM(V, js_to_wasm_wrappers) \ IF_WASM(V, wasm_canonical_rtts) \ V(weak_refs_keep_during_job) diff --git a/deps/v8/test/unittests/objects/value-serializer-unittest.cc b/deps/v8/test/unittests/objects/value-serializer-unittest.cc index 0e1aa59a39656b..f433fd10f51394 100644 --- a/deps/v8/test/unittests/objects/value-serializer-unittest.cc +++ b/deps/v8/test/unittests/objects/value-serializer-unittest.cc @@ -1686,13 +1686,13 @@ TEST_F(ValueSerializerTest, DecodeRegExpDotAll) { } TEST_F(ValueSerializerTest, DecodeLinearRegExp) { - bool flag_was_enabled = i::FLAG_enable_experimental_regexp_engine; + bool flag_was_enabled = i::v8_flags.enable_experimental_regexp_engine; // The last byte encodes the regexp flags. std::vector<uint8_t> regexp_encoding = {0xFF, 0x09, 0x3F, 0x00, 0x52, 0x03, 0x66, 0x6F, 0x6F, 0x6D}; - i::FLAG_enable_experimental_regexp_engine = true; + i::v8_flags.enable_experimental_regexp_engine = true; // DecodeTestUpToVersion will overwrite the version number in the data but // it's fine. DecodeTestUpToVersion( @@ -1702,10 +1702,10 @@ TEST_F(ValueSerializerTest, DecodeLinearRegExp) { ExpectScriptTrue("result.toString() === '/foo/glmsy'"); }); - i::FLAG_enable_experimental_regexp_engine = false; + i::v8_flags.enable_experimental_regexp_engine = false; InvalidDecodeTest(regexp_encoding); - i::FLAG_enable_experimental_regexp_engine = flag_was_enabled; + i::v8_flags.enable_experimental_regexp_engine = flag_was_enabled; } TEST_F(ValueSerializerTest, DecodeHasIndicesRegExp) { @@ -2503,14 +2503,14 @@ class ValueSerializerTestWithSharedArrayBufferClone } static void SetUpTestSuite() { - flag_was_enabled_ = i::FLAG_harmony_sharedarraybuffer; - i::FLAG_harmony_sharedarraybuffer = true; + flag_was_enabled_ = i::v8_flags.harmony_sharedarraybuffer; + i::v8_flags.harmony_sharedarraybuffer = true; ValueSerializerTest::SetUpTestSuite(); } static void TearDownTestSuite() { ValueSerializerTest::TearDownTestSuite(); - i::FLAG_harmony_sharedarraybuffer = flag_was_enabled_; + i::v8_flags.harmony_sharedarraybuffer = flag_was_enabled_; flag_was_enabled_ = false; } @@ -2608,8 +2608,8 @@ TEST_F(ValueSerializerTestWithSharedArrayBufferClone, #if V8_ENABLE_WEBASSEMBLY TEST_F(ValueSerializerTestWithSharedArrayBufferClone, RoundTripWebAssemblyMemory) { - bool flag_was_enabled = i::FLAG_experimental_wasm_threads; - i::FLAG_experimental_wasm_threads = true; + bool flag_was_enabled = i::v8_flags.experimental_wasm_threads; + i::v8_flags.experimental_wasm_threads = true; std::vector<uint8_t> data = {0x00, 0x01, 0x80, 0xFF}; data.resize(65536); @@ -2636,7 +2636,7 @@ TEST_F(ValueSerializerTestWithSharedArrayBufferClone, ExpectScriptTrue( "new Uint8Array(result.buffer, 0, 4).toString() === '0,1,128,255'"); - i::FLAG_experimental_wasm_threads = flag_was_enabled; + i::v8_flags.experimental_wasm_threads = flag_was_enabled; } #endif // V8_ENABLE_WEBASSEMBLY @@ -2960,14 +2960,14 @@ class ValueSerializerTestWithWasm : public ValueSerializerTest { protected: static void SetUpTestSuite() { - g_saved_flag = i::FLAG_expose_wasm; - i::FLAG_expose_wasm = true; + g_saved_flag = i::v8_flags.expose_wasm; + i::v8_flags.expose_wasm = true; ValueSerializerTest::SetUpTestSuite(); } static void TearDownTestSuite() { ValueSerializerTest::TearDownTestSuite(); - i::FLAG_expose_wasm = g_saved_flag; + i::v8_flags.expose_wasm = g_saved_flag; g_saved_flag = false; } diff --git a/deps/v8/test/unittests/objects/weakmaps-unittest.cc b/deps/v8/test/unittests/objects/weakmaps-unittest.cc index cd2aad1269a469..9e839463dd9e0c 100644 --- a/deps/v8/test/unittests/objects/weakmaps-unittest.cc +++ b/deps/v8/test/unittests/objects/weakmaps-unittest.cc @@ -55,7 +55,7 @@ static void WeakPointerCallback(const v8::WeakCallbackInfo<void>& data) { } TEST_F(WeakMapsTest, Weakness) { - FLAG_incremental_marking = false; + v8_flags.incremental_marking = false; Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); HandleScope scope(isolate); @@ -83,14 +83,22 @@ TEST_F(WeakMapsTest, Weakness) { int32_t object_hash = object->GetOrCreateHash(isolate).value(); JSWeakCollection::Set(weakmap, object, smi, object_hash); } - CHECK_EQ(2, EphemeronHashTable::cast(weakmap->table()).NumberOfElements()); + // Put a symbol key into weak map. + { + HandleScope inner_scope(isolate); + Handle<Symbol> symbol = factory->NewSymbol(); + Handle<Smi> smi(Smi::FromInt(23), isolate); + JSWeakCollection::Set(weakmap, symbol, smi, symbol->hash()); + } + CHECK_EQ(3, EphemeronHashTable::cast(weakmap->table()).NumberOfElements()); // Force a full GC. PreciseCollectAllGarbage(); CHECK_EQ(0, NumberOfWeakCalls); + // Symbol key should be deleted. CHECK_EQ(2, EphemeronHashTable::cast(weakmap->table()).NumberOfElements()); CHECK_EQ( - 0, EphemeronHashTable::cast(weakmap->table()).NumberOfDeletedElements()); + 1, EphemeronHashTable::cast(weakmap->table()).NumberOfDeletedElements()); // Make the global reference to the key weak. std::pair<Handle<Object>*, int> handle_and_id(&key, 1234); @@ -103,7 +111,7 @@ TEST_F(WeakMapsTest, Weakness) { CHECK_EQ(1, NumberOfWeakCalls); CHECK_EQ(0, EphemeronHashTable::cast(weakmap->table()).NumberOfElements()); CHECK_EQ( - 2, EphemeronHashTable::cast(weakmap->table()).NumberOfDeletedElements()); + 3, EphemeronHashTable::cast(weakmap->table()).NumberOfDeletedElements()); } TEST_F(WeakMapsTest, Shrinking) { @@ -185,7 +193,8 @@ TEST_F(WeakMapsTest, WeakMapPromotionMarkCompact) { } TEST_F(WeakMapsTest, WeakMapScavenge) { - if (i::FLAG_single_generation || i::FLAG_stress_incremental_marking) return; + if (i::v8_flags.single_generation) return; + if (i::v8_flags.stress_incremental_marking) return; Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); HandleScope scope(isolate); @@ -203,7 +212,7 @@ TEST_F(WeakMapsTest, WeakMapScavenge) { CHECK(EphemeronHashTableContainsKey( EphemeronHashTable::cast(weakmap->table()), *object)); - if (!FLAG_minor_mc) { + if (!v8_flags.minor_mc) { GcAndSweep(NEW_SPACE); CHECK(ObjectInYoungGeneration(*object)); CHECK(!ObjectInYoungGeneration(weakmap->table())); @@ -221,10 +230,10 @@ TEST_F(WeakMapsTest, WeakMapScavenge) { // Test that weak map values on an evacuation candidate which are not reachable // by other paths are correctly recorded in the slots buffer. TEST_F(WeakMapsTest, Regress2060a) { - if (!i::FLAG_compact) return; - if (i::FLAG_enable_third_party_heap) return; - FLAG_compact_on_every_full_gc = true; - FLAG_stress_concurrent_allocation = false; // For SimulateFullSpace. + if (!i::v8_flags.compact) return; + if (i::v8_flags.enable_third_party_heap) return; + v8_flags.compact_on_every_full_gc = true; + v8_flags.stress_concurrent_allocation = false; // For SimulateFullSpace. Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); Heap* heap = isolate->heap(); @@ -245,7 +254,7 @@ TEST_F(WeakMapsTest, Regress2060a) { Handle<JSObject> object = factory->NewJSObject(function, AllocationType::kOld); CHECK(!Heap::InYoungGeneration(*object)); - CHECK_IMPLIES(!FLAG_enable_third_party_heap, + CHECK_IMPLIES(!v8_flags.enable_third_party_heap, !first_page->Contains(object->address())); int32_t hash = key->GetOrCreateHash(isolate).value(); JSWeakCollection::Set(weakmap, key, object, hash); @@ -253,19 +262,19 @@ TEST_F(WeakMapsTest, Regress2060a) { } // Force compacting garbage collection. - CHECK(FLAG_compact_on_every_full_gc); + CHECK(v8_flags.compact_on_every_full_gc); CollectAllGarbage(); } // Test that weak map keys on an evacuation candidate which are reachable by // other strong paths are correctly recorded in the slots buffer. TEST_F(WeakMapsTest, Regress2060b) { - if (!i::FLAG_compact) return; - FLAG_compact_on_every_full_gc = true; + if (!i::v8_flags.compact) return; + v8_flags.compact_on_every_full_gc = true; #ifdef VERIFY_HEAP - FLAG_verify_heap = true; + v8_flags.verify_heap = true; #endif - FLAG_stress_concurrent_allocation = false; // For SimulateFullSpace. + v8_flags.stress_concurrent_allocation = false; // For SimulateFullSpace. Isolate* isolate = i_isolate(); Factory* factory = isolate->factory(); @@ -283,7 +292,7 @@ TEST_F(WeakMapsTest, Regress2060b) { for (int i = 0; i < 32; i++) { keys[i] = factory->NewJSObject(function, AllocationType::kOld); CHECK(!Heap::InYoungGeneration(*keys[i])); - CHECK_IMPLIES(!FLAG_enable_third_party_heap, + CHECK_IMPLIES(!v8_flags.enable_third_party_heap, !first_page->Contains(keys[i]->address())); } Handle<JSWeakMap> weakmap = isolate->factory()->NewJSWeakMap(); @@ -295,14 +304,14 @@ TEST_F(WeakMapsTest, Regress2060b) { // Force compacting garbage collection. The subsequent collections are used // to verify that key references were actually updated. - CHECK(FLAG_compact_on_every_full_gc); + CHECK(v8_flags.compact_on_every_full_gc); CollectAllGarbage(); CollectAllGarbage(); CollectAllGarbage(); } TEST_F(WeakMapsTest, Regress399527) { - if (!FLAG_incremental_marking) return; + if (!v8_flags.incremental_marking) return; v8::HandleScope scope(v8_isolate()); Isolate* isolate = i_isolate(); Heap* heap = isolate->heap(); diff --git a/deps/v8/test/unittests/objects/weaksets-unittest.cc b/deps/v8/test/unittests/objects/weaksets-unittest.cc index e8cc95779561bc..b2a9df3a0cc380 100644 --- a/deps/v8/test/unittests/objects/weaksets-unittest.cc +++ b/deps/v8/test/unittests/objects/weaksets-unittest.cc @@ -71,7 +71,7 @@ static void WeakPointerCallback(const v8::WeakCallbackInfo<void>& data) { } TEST_F(WeakSetsTest, WeakSet_Weakness) { - FLAG_incremental_marking = false; + v8_flags.incremental_marking = false; Factory* factory = i_isolate()->factory(); HandleScope scope(i_isolate()); Handle<JSWeakSet> weakset = AllocateJSWeakSet(); @@ -156,10 +156,10 @@ TEST_F(WeakSetsTest, WeakSet_Shrinking) { // Test that weak set values on an evacuation candidate which are not reachable // by other paths are correctly recorded in the slots buffer. TEST_F(WeakSetsTest, WeakSet_Regress2060a) { - if (!i::FLAG_compact) return; - if (i::FLAG_enable_third_party_heap) return; - FLAG_compact_on_every_full_gc = true; - FLAG_stress_concurrent_allocation = false; // For SimulateFullSpace. + if (!i::v8_flags.compact) return; + if (i::v8_flags.enable_third_party_heap) return; + v8_flags.compact_on_every_full_gc = true; + v8_flags.stress_concurrent_allocation = false; // For SimulateFullSpace. Factory* factory = i_isolate()->factory(); Heap* heap = i_isolate()->heap(); HandleScope scope(i_isolate()); @@ -179,7 +179,7 @@ TEST_F(WeakSetsTest, WeakSet_Regress2060a) { Handle<JSObject> object = factory->NewJSObject(function, AllocationType::kOld); CHECK(!Heap::InYoungGeneration(*object)); - CHECK_IMPLIES(!FLAG_enable_third_party_heap, + CHECK_IMPLIES(!v8_flags.enable_third_party_heap, !first_page->Contains(object->address())); int32_t hash = key->GetOrCreateHash(i_isolate()).value(); JSWeakCollection::Set(weakset, key, object, hash); @@ -187,20 +187,20 @@ TEST_F(WeakSetsTest, WeakSet_Regress2060a) { } // Force compacting garbage collection. - CHECK(FLAG_compact_on_every_full_gc); + CHECK(v8_flags.compact_on_every_full_gc); CollectAllGarbage(); } // Test that weak set keys on an evacuation candidate which are reachable by // other strong paths are correctly recorded in the slots buffer. TEST_F(WeakSetsTest, WeakSet_Regress2060b) { - if (!i::FLAG_compact) return; - if (i::FLAG_enable_third_party_heap) return; - FLAG_compact_on_every_full_gc = true; + if (!i::v8_flags.compact) return; + if (i::v8_flags.enable_third_party_heap) return; + v8_flags.compact_on_every_full_gc = true; #ifdef VERIFY_HEAP - FLAG_verify_heap = true; + v8_flags.verify_heap = true; #endif - FLAG_stress_concurrent_allocation = false; // For SimulateFullSpace. + v8_flags.stress_concurrent_allocation = false; // For SimulateFullSpace. Factory* factory = i_isolate()->factory(); Heap* heap = i_isolate()->heap(); @@ -217,7 +217,7 @@ TEST_F(WeakSetsTest, WeakSet_Regress2060b) { for (int i = 0; i < 32; i++) { keys[i] = factory->NewJSObject(function, AllocationType::kOld); CHECK(!Heap::InYoungGeneration(*keys[i])); - CHECK_IMPLIES(!FLAG_enable_third_party_heap, + CHECK_IMPLIES(!v8_flags.enable_third_party_heap, !first_page->Contains(keys[i]->address())); } Handle<JSWeakSet> weakset = AllocateJSWeakSet(); @@ -229,7 +229,7 @@ TEST_F(WeakSetsTest, WeakSet_Regress2060b) { // Force compacting garbage collection. The subsequent collections are used // to verify that key references were actually updated. - CHECK(FLAG_compact_on_every_full_gc); + CHECK(v8_flags.compact_on_every_full_gc); CollectAllGarbage(); CollectAllGarbage(); CollectAllGarbage(); diff --git a/deps/v8/test/unittests/parser/decls-unittest.cc b/deps/v8/test/unittests/parser/decls-unittest.cc index ed35baf8ab1a44..8b12db12e3e554 100644 --- a/deps/v8/test/unittests/parser/decls-unittest.cc +++ b/deps/v8/test/unittests/parser/decls-unittest.cc @@ -466,7 +466,7 @@ TEST_F(DeclsTest, CrossScriptReferences) { } TEST_F(DeclsTest, CrossScriptReferences_Simple) { - i::FLAG_use_strict = true; + i::v8_flags.use_strict = true; HandleScope scope(isolate()); @@ -478,7 +478,7 @@ TEST_F(DeclsTest, CrossScriptReferences_Simple) { } TEST_F(DeclsTest, CrossScriptReferences_Simple2) { - i::FLAG_use_strict = true; + i::v8_flags.use_strict = true; HandleScope scope(isolate()); @@ -657,7 +657,7 @@ TEST_F(DeclsTest, CrossScriptReferencesHarmonyRegress) { } TEST_F(DeclsTest, GlobalLexicalOSR) { - i::FLAG_use_strict = true; + i::v8_flags.use_strict = true; HandleScope scope(isolate()); SimpleContext context; @@ -680,7 +680,7 @@ TEST_F(DeclsTest, GlobalLexicalOSR) { } TEST_F(DeclsTest, CrossScriptConflicts) { - i::FLAG_use_strict = true; + i::v8_flags.use_strict = true; HandleScope scope(isolate()); @@ -805,7 +805,7 @@ TEST_F(DeclsTest, CrossScriptStaticLookupUndeclared) { } TEST_F(DeclsTest, CrossScriptLoadICs) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; HandleScope handle_scope(isolate()); @@ -863,7 +863,7 @@ TEST_F(DeclsTest, CrossScriptLoadICs) { } TEST_F(DeclsTest, CrossScriptStoreICs) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; HandleScope handle_scope(isolate()); @@ -932,7 +932,7 @@ TEST_F(DeclsTest, CrossScriptStoreICs) { } TEST_F(DeclsTest, CrossScriptAssignmentToConst) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; HandleScope handle_scope(isolate()); @@ -953,7 +953,7 @@ TEST_F(DeclsTest, CrossScriptAssignmentToConst) { } TEST_F(DeclsTest, Regress425510) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; HandleScope handle_scope(isolate()); @@ -969,7 +969,7 @@ TEST_F(DeclsTest, Regress425510) { } TEST_F(DeclsTest, Regress3941) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; HandleScope handle_scope(isolate()); @@ -1009,7 +1009,7 @@ TEST_F(DeclsTest, Regress3941) { } TEST_F(DeclsTest, Regress3941_Reads) { - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; HandleScope handle_scope(isolate()); diff --git a/deps/v8/test/unittests/parser/parse-decision-unittest.cc b/deps/v8/test/unittests/parser/parse-decision-unittest.cc index 67c217d24fdf82..8ac2bf23c03042 100644 --- a/deps/v8/test/unittests/parser/parse-decision-unittest.cc +++ b/deps/v8/test/unittests/parser/parse-decision-unittest.cc @@ -55,7 +55,7 @@ void GetTopLevelFunctionInfo( } // anonymous namespace TEST_F(ParseDecisionTest, GetTopLevelFunctionInfo) { - if (!FLAG_lazy) return; + if (!v8_flags.lazy) return; HandleScope scope(i_isolate()); @@ -69,7 +69,7 @@ TEST_F(ParseDecisionTest, GetTopLevelFunctionInfo) { } TEST_F(ParseDecisionTest, EagerlyCompileImmediateUseFunctions) { - if (!FLAG_lazy) return; + if (!v8_flags.lazy) return; HandleScope scope(i_isolate()); @@ -100,7 +100,7 @@ TEST_F(ParseDecisionTest, EagerlyCompileImmediateUseFunctions) { } TEST_F(ParseDecisionTest, CommaFunctionSequence) { - if (!FLAG_lazy) return; + if (!v8_flags.lazy) return; HandleScope scope(i_isolate()); diff --git a/deps/v8/test/unittests/parser/parsing-unittest.cc b/deps/v8/test/unittests/parser/parsing-unittest.cc index 965a628571ae14..ac53e538fec9c1 100644 --- a/deps/v8/test/unittests/parser/parsing-unittest.cc +++ b/deps/v8/test/unittests/parser/parsing-unittest.cc @@ -51,7 +51,7 @@ enum ParserFlag { enum ParserSyncTestResult { kSuccessOrError, kSuccess, kError }; void SetGlobalFlags(base::EnumSet<ParserFlag> flags) { - i::FLAG_allow_natives_syntax = flags.contains(kAllowNatives); + i::v8_flags.allow_natives_syntax = flags.contains(kAllowNatives); } void SetParserFlags(i::UnoptimizedCompileFlags* compile_flags, @@ -226,7 +226,7 @@ class ParsingTest : public TestWithContextAndZone { i::UnoptimizedCompileFlags compile_flags = i::UnoptimizedCompileFlags::ForToplevelCompile( isolate, true, LanguageMode::kSloppy, REPLMode::kNo, - ScriptType::kClassic, FLAG_lazy); + ScriptType::kClassic, v8_flags.lazy); SetParserFlags(&compile_flags, flags); compile_flags.set_is_module(is_module); @@ -3122,7 +3122,7 @@ TEST_F(ParsingTest, InvalidLeftHandSide) { TEST_F(ParsingTest, FuncNameInferrerBasic) { // Tests that function names are inferred properly. - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; RunJS( "var foo1 = function() {}; " @@ -3160,7 +3160,7 @@ TEST_F(ParsingTest, FuncNameInferrerBasic) { TEST_F(ParsingTest, FuncNameInferrerTwoByte) { // Tests function name inferring in cases where some parts of the inferred // function name are two-byte strings. - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; v8::Isolate* isolate = v8_isolate(); uint16_t* two_byte_source = AsciiToTwoByteString( @@ -3183,7 +3183,7 @@ TEST_F(ParsingTest, FuncNameInferrerTwoByte) { TEST_F(ParsingTest, FuncNameInferrerEscaped) { // The same as FuncNameInferrerTwoByte, except that we express the two-byte // character as a Unicode escape. - i::FLAG_allow_natives_syntax = true; + i::v8_flags.allow_natives_syntax = true; v8::Isolate* isolate = v8_isolate(); uint16_t* two_byte_source = AsciiToTwoByteString( @@ -4164,7 +4164,7 @@ i::Scope* DeserializeFunctionScope(i::Isolate* isolate, i::Zone* zone, } // namespace TEST_F(ParsingTest, AsmModuleFlag) { - i::FLAG_validate_asm = false; + i::v8_flags.validate_asm = false; i::Isolate* isolate = i_isolate(); const char* src = @@ -4211,8 +4211,8 @@ TEST_F(ParsingTest, SloppyModeUseCount) { int use_counts[v8::Isolate::kUseCounterFeatureCount] = {}; global_use_counts = use_counts; // Force eager parsing (preparser doesn't update use counts). - i::FLAG_lazy = false; - i::FLAG_lazy_streaming = false; + i::v8_flags.lazy = false; + i::v8_flags.lazy_streaming = false; v8_isolate()->SetUseCounterCallback(MockUseCounterCallback); RunJS("function bar() { var baz = 1; }"); CHECK_LT(0, use_counts[v8::Isolate::kSloppyMode]); @@ -4222,8 +4222,8 @@ TEST_F(ParsingTest, SloppyModeUseCount) { TEST_F(ParsingTest, BothModesUseCount) { int use_counts[v8::Isolate::kUseCounterFeatureCount] = {}; global_use_counts = use_counts; - i::FLAG_lazy = false; - i::FLAG_lazy_streaming = false; + i::v8_flags.lazy = false; + i::v8_flags.lazy_streaming = false; v8_isolate()->SetUseCounterCallback(MockUseCounterCallback); RunJS("function bar() { 'use strict'; var baz = 1; }"); CHECK_LT(0, use_counts[v8::Isolate::kSloppyMode]); @@ -4647,7 +4647,7 @@ TEST_F(ParsingTest, ImportExpressionSuccess) { } TEST_F(ParsingTest, ImportExpressionWithImportAssertionSuccess) { - i::FLAG_harmony_import_assertions = true; + i::v8_flags.harmony_import_assertions = true; // clang-format off const char* context_data[][2] = { @@ -4701,9 +4701,8 @@ TEST_F(ParsingTest, ImportExpressionErrors) { "import{", "import{x", "import{x}", - "import(x, y)", + "import(x, y, z)", "import(...y)", - "import(x,)", "import(,)", "import(,y)", "import(;)", @@ -4778,7 +4777,7 @@ TEST_F(ParsingTest, ImportExpressionErrors) { TEST_F(ParsingTest, ImportExpressionWithImportAssertionErrors) { { - i::FLAG_harmony_import_assertions = true; + i::v8_flags.harmony_import_assertions = true; // clang-format off const char* context_data[][2] = { @@ -4880,7 +4879,7 @@ TEST_F(ParsingTest, BasicImportAssertionParsing) { }; // clang-format on - i::FLAG_harmony_import_assertions = true; + i::v8_flags.harmony_import_assertions = true; i::Isolate* isolate = i_isolate(); i::Factory* factory = isolate->factory(); @@ -4947,7 +4946,7 @@ TEST_F(ParsingTest, ImportAssertionParsingErrors) { }; // clang-format on - i::FLAG_harmony_import_assertions = true; + i::v8_flags.harmony_import_assertions = true; i::Isolate* isolate = i_isolate(); i::Factory* factory = isolate->factory(); @@ -7956,7 +7955,7 @@ TEST_F(ParsingTest, ModuleParsingInternals) { } TEST_F(ParsingTest, ModuleParsingInternalsWithImportAssertions) { - i::FLAG_harmony_import_assertions = true; + i::v8_flags.harmony_import_assertions = true; i::Isolate* isolate = i_isolate(); i::Factory* factory = isolate->factory(); isolate->stack_guard()->SetStackLimit(base::Stack::GetCurrentStackPosition() - @@ -8049,7 +8048,7 @@ TEST_F(ParsingTest, ModuleParsingInternalsWithImportAssertions) { } TEST_F(ParsingTest, ModuleParsingModuleRequestOrdering) { - i::FLAG_harmony_import_assertions = true; + i::v8_flags.harmony_import_assertions = true; i::Isolate* isolate = i_isolate(); i::Factory* factory = isolate->factory(); isolate->stack_guard()->SetStackLimit(base::Stack::GetCurrentStackPosition() - @@ -8317,7 +8316,7 @@ TEST_F(ParsingTest, ModuleParsingModuleRequestOrdering) { } TEST_F(ParsingTest, ModuleParsingImportAssertionKeySorting) { - i::FLAG_harmony_import_assertions = true; + i::v8_flags.harmony_import_assertions = true; i::Isolate* isolate = i_isolate(); i::Factory* factory = isolate->factory(); isolate->stack_guard()->SetStackLimit(base::Stack::GetCurrentStackPosition() - diff --git a/deps/v8/test/unittests/parser/scanner-streams-unittest.cc b/deps/v8/test/unittests/parser/scanner-streams-unittest.cc index c8f697c87cbba2..41355df10ff9d5 100644 --- a/deps/v8/test/unittests/parser/scanner-streams-unittest.cc +++ b/deps/v8/test/unittests/parser/scanner-streams-unittest.cc @@ -760,8 +760,8 @@ TEST_F(ScannerStreamsTest, TestOverlongAndInvalidSequences) { TEST_F(ScannerStreamsTest, RelocatingCharacterStream) { // This test relies on the invariant that the scavenger will move objects - if (i::FLAG_single_generation) return; - i::FLAG_manual_evacuation_candidates_selection = true; + if (i::v8_flags.single_generation) return; + i::v8_flags.manual_evacuation_candidates_selection = true; v8::internal::ManualGCScope manual_gc_scope(i_isolate()); v8::HandleScope scope(isolate()); @@ -798,8 +798,8 @@ TEST_F(ScannerStreamsTest, RelocatingCharacterStream) { TEST_F(ScannerStreamsTest, RelocatingUnbufferedCharacterStream) { // This test relies on the invariant that the scavenger will move objects - if (i::FLAG_single_generation) return; - i::FLAG_manual_evacuation_candidates_selection = true; + if (i::v8_flags.single_generation) return; + i::v8_flags.manual_evacuation_candidates_selection = true; v8::internal::ManualGCScope manual_gc_scope(i_isolate()); v8::HandleScope scope(isolate()); diff --git a/deps/v8/test/unittests/regexp/regexp-unittest.cc b/deps/v8/test/unittests/regexp/regexp-unittest.cc index f4e0f9577633f5..065eea336f4eb2 100644 --- a/deps/v8/test/unittests/regexp/regexp-unittest.cc +++ b/deps/v8/test/unittests/regexp/regexp-unittest.cc @@ -576,10 +576,10 @@ static void Execute(const char* input, bool multiline, bool unicode, #ifdef DEBUG TEST_F(RegExpTest, ParsePossessiveRepetition) { - bool old_flag_value = FLAG_regexp_possessive_quantifier; + bool old_flag_value = v8_flags.regexp_possessive_quantifier; // Enable possessive quantifier syntax. - FLAG_regexp_possessive_quantifier = true; + v8_flags.regexp_possessive_quantifier = true; CheckParseEq("a*+", "(# 0 - p 'a')"); CheckParseEq("a++", "(# 1 - p 'a')"); @@ -588,7 +588,7 @@ TEST_F(RegExpTest, ParsePossessiveRepetition) { CheckParseEq("za{10,20}+b", "(: 'z' (# 10 20 p 'a') 'b')"); // Disable possessive quantifier syntax. - FLAG_regexp_possessive_quantifier = false; + v8_flags.regexp_possessive_quantifier = false; CHECK_PARSE_ERROR("a*+"); CHECK_PARSE_ERROR("a++"); @@ -596,7 +596,7 @@ TEST_F(RegExpTest, ParsePossessiveRepetition) { CHECK_PARSE_ERROR("a{10,20}+"); CHECK_PARSE_ERROR("a{10,20}+b"); - FLAG_regexp_possessive_quantifier = old_flag_value; + v8_flags.regexp_possessive_quantifier = old_flag_value; } #endif @@ -1783,12 +1783,12 @@ TEST_F(RegExpTest, PeepholeNoChange) { Handle<String> source = factory->NewStringFromStaticChars("^foo"); - i::FLAG_regexp_peephole_optimization = false; + v8_flags.regexp_peephole_optimization = false; Handle<ByteArray> array = Handle<ByteArray>::cast(orig.GetCode(source)); int length = array->length(); byte* byte_array = array->GetDataStartAddress(); - i::FLAG_regexp_peephole_optimization = true; + v8_flags.regexp_peephole_optimization = true; Handle<ByteArray> array_optimized = Handle<ByteArray>::cast(opt.GetCode(source)); byte* byte_array_optimized = array_optimized->GetDataStartAddress(); @@ -1818,11 +1818,11 @@ TEST_F(RegExpTest, PeepholeSkipUntilChar) { Handle<String> source = factory->NewStringFromStaticChars("dummy"); - i::FLAG_regexp_peephole_optimization = false; + v8_flags.regexp_peephole_optimization = false; Handle<ByteArray> array = Handle<ByteArray>::cast(orig.GetCode(source)); int length = array->length(); - i::FLAG_regexp_peephole_optimization = true; + v8_flags.regexp_peephole_optimization = true; Handle<ByteArray> array_optimized = Handle<ByteArray>::cast(opt.GetCode(source)); int length_optimized = array_optimized->length(); @@ -1871,11 +1871,11 @@ TEST_F(RegExpTest, PeepholeSkipUntilBitInTable) { Handle<String> source = factory->NewStringFromStaticChars("dummy"); - i::FLAG_regexp_peephole_optimization = false; + v8_flags.regexp_peephole_optimization = false; Handle<ByteArray> array = Handle<ByteArray>::cast(orig.GetCode(source)); int length = array->length(); - i::FLAG_regexp_peephole_optimization = true; + v8_flags.regexp_peephole_optimization = true; Handle<ByteArray> array_optimized = Handle<ByteArray>::cast(opt.GetCode(source)); int length_optimized = array_optimized->length(); @@ -1918,11 +1918,11 @@ TEST_F(RegExpTest, PeepholeSkipUntilCharPosChecked) { Handle<String> source = factory->NewStringFromStaticChars("dummy"); - i::FLAG_regexp_peephole_optimization = false; + v8_flags.regexp_peephole_optimization = false; Handle<ByteArray> array = Handle<ByteArray>::cast(orig.GetCode(source)); int length = array->length(); - i::FLAG_regexp_peephole_optimization = true; + v8_flags.regexp_peephole_optimization = true; Handle<ByteArray> array_optimized = Handle<ByteArray>::cast(opt.GetCode(source)); int length_optimized = array_optimized->length(); @@ -1966,11 +1966,11 @@ TEST_F(RegExpTest, PeepholeSkipUntilCharAnd) { Handle<String> source = factory->NewStringFromStaticChars("dummy"); - i::FLAG_regexp_peephole_optimization = false; + v8_flags.regexp_peephole_optimization = false; Handle<ByteArray> array = Handle<ByteArray>::cast(orig.GetCode(source)); int length = array->length(); - i::FLAG_regexp_peephole_optimization = true; + v8_flags.regexp_peephole_optimization = true; Handle<ByteArray> array_optimized = Handle<ByteArray>::cast(opt.GetCode(source)); int length_optimized = array_optimized->length(); @@ -2014,11 +2014,11 @@ TEST_F(RegExpTest, PeepholeSkipUntilCharOrChar) { Handle<String> source = factory->NewStringFromStaticChars("dummy"); - i::FLAG_regexp_peephole_optimization = false; + v8_flags.regexp_peephole_optimization = false; Handle<ByteArray> array = Handle<ByteArray>::cast(orig.GetCode(source)); int length = array->length(); - i::FLAG_regexp_peephole_optimization = true; + v8_flags.regexp_peephole_optimization = true; Handle<ByteArray> array_optimized = Handle<ByteArray>::cast(opt.GetCode(source)); int length_optimized = array_optimized->length(); @@ -2073,11 +2073,11 @@ TEST_F(RegExpTest, PeepholeSkipUntilGtOrNotBitInTable) { Handle<String> source = factory->NewStringFromStaticChars("dummy"); - i::FLAG_regexp_peephole_optimization = false; + v8_flags.regexp_peephole_optimization = false; Handle<ByteArray> array = Handle<ByteArray>::cast(orig.GetCode(source)); int length = array->length(); - i::FLAG_regexp_peephole_optimization = true; + v8_flags.regexp_peephole_optimization = true; Handle<ByteArray> array_optimized = Handle<ByteArray>::cast(opt.GetCode(source)); int length_optimized = array_optimized->length(); @@ -2152,7 +2152,7 @@ TEST_F(RegExpTest, PeepholeLabelFixupsInside) { Handle<String> source = factory->NewStringFromStaticChars("dummy"); - i::FLAG_regexp_peephole_optimization = false; + v8_flags.regexp_peephole_optimization = false; Handle<ByteArray> array = Handle<ByteArray>::cast(orig.GetCode(source)); for (int label_idx = 0; label_idx < 3; label_idx++) { @@ -2162,7 +2162,7 @@ TEST_F(RegExpTest, PeepholeLabelFixupsInside) { } } - i::FLAG_regexp_peephole_optimization = true; + v8_flags.regexp_peephole_optimization = true; Handle<ByteArray> array_optimized = Handle<ByteArray>::cast(opt.GetCode(source)); @@ -2258,7 +2258,7 @@ TEST_F(RegExpTest, PeepholeLabelFixupsComplex) { Handle<String> source = factory->NewStringFromStaticChars("dummy"); - i::FLAG_regexp_peephole_optimization = false; + v8_flags.regexp_peephole_optimization = false; Handle<ByteArray> array = Handle<ByteArray>::cast(orig.GetCode(source)); for (int label_idx = 0; label_idx < 4; label_idx++) { @@ -2268,7 +2268,7 @@ TEST_F(RegExpTest, PeepholeLabelFixupsComplex) { } } - i::FLAG_regexp_peephole_optimization = true; + v8_flags.regexp_peephole_optimization = true; Handle<ByteArray> array_optimized = Handle<ByteArray>::cast(opt.GetCode(source)); @@ -2300,7 +2300,7 @@ TEST_F(RegExpTest, PeepholeLabelFixupsComplex) { } TEST_F(RegExpTestWithContext, UnicodePropertyEscapeCodeSize) { - i::FlagScope<bool> f(&v8::internal::FLAG_regexp_tier_up, false); + FlagScope<bool> f(&v8_flags.regexp_tier_up, false); v8::HandleScope scope(isolate()); i::Handle<i::JSRegExp> re = Utils::OpenHandle( @@ -2347,8 +2347,8 @@ void ReenterRegExp(v8::Isolate* isolate, void* data) { // Tests reentrant irregexp calls. TEST_F(RegExpTestWithContext, RegExpInterruptReentrantExecution) { - CHECK(!i::FLAG_jitless); - i::FLAG_regexp_tier_up = false; // Enter irregexp, not the interpreter. + CHECK(!v8_flags.jitless); + v8_flags.regexp_tier_up = false; // Enter irregexp, not the interpreter. v8::HandleScope scope(isolate()); diff --git a/deps/v8/test/unittests/runtime/runtime-debug-unittest.cc b/deps/v8/test/unittests/runtime/runtime-debug-unittest.cc index a73795bb401d7e..6e99365335d7e4 100644 --- a/deps/v8/test/unittests/runtime/runtime-debug-unittest.cc +++ b/deps/v8/test/unittests/runtime/runtime-debug-unittest.cc @@ -7,6 +7,7 @@ #include "include/v8-object.h" #include "include/v8-template.h" #include "src/api/api.h" +#include "src/objects/js-array-inl.h" #include "src/objects/objects-inl.h" #include "src/runtime/runtime.h" #include "test/unittests/test-utils.h" @@ -56,6 +57,27 @@ TEST_F(RuntimeTest, DoesNotReturnPrototypeWhenInacessible) { EXPECT_EQ(0u, result->Length()); } +#if V8_ENABLE_WEBASSEMBLY +TEST_F(RuntimeTest, WasmTableWithoutInstance) { + uint32_t initial = 1u; + bool has_maximum = false; + uint32_t maximum = std::numeric_limits<uint32_t>::max(); + Handle<FixedArray> elements; + Handle<WasmTableObject> table = WasmTableObject::New( + i_isolate(), Handle<WasmInstanceObject>(), wasm::kWasmAnyRef, initial, + has_maximum, maximum, &elements, i_isolate()->factory()->null_value()); + MaybeHandle<JSArray> result = + Runtime::GetInternalProperties(i_isolate(), table); + ASSERT_FALSE(result.is_null()); + // ["[[Prototype]]", <map>, "[[Entries]]", <entries>] + ASSERT_EQ(4, result.ToHandleChecked()->elements().length()); + Handle<Object> entries = + FixedArrayBase::GetElement(i_isolate(), result.ToHandleChecked(), 3) + .ToHandleChecked(); + EXPECT_EQ(1, JSArray::cast(*entries).elements().length()); +} +#endif + } // namespace } // namespace internal } // namespace v8 diff --git a/deps/v8/test/unittests/strings/unicode-unittest.cc b/deps/v8/test/unittests/strings/unicode-unittest.cc index d32ed8871f5bb1..5d647279732aaa 100644 --- a/deps/v8/test/unittests/strings/unicode-unittest.cc +++ b/deps/v8/test/unittests/strings/unicode-unittest.cc @@ -499,7 +499,8 @@ class UnicodeWithGCTest : public TestWithHeapInternals {}; #define GC_INSIDE_NEW_STRING_FROM_UTF8_SUB_STRING(NAME, STRING) \ TEST_F(UnicodeWithGCTest, GCInsideNewStringFromUtf8SubStringWith##NAME) { \ - FLAG_stress_concurrent_allocation = false; /* For SimulateFullSpace. */ \ + v8_flags.stress_concurrent_allocation = \ + false; /* For SimulateFullSpace. */ \ v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate())); \ Factory* factory = isolate()->factory(); \ /* Length must be bigger than the buffer size of the Utf8Decoder. */ \ @@ -510,7 +511,7 @@ class UnicodeWithGCTest : public TestWithHeapInternals {}; ->NewStringFromOneByte(v8::base::Vector<const uint8_t>( \ reinterpret_cast<const uint8_t*>(buf), len)) \ .ToHandleChecked(); \ - if (FLAG_single_generation) { \ + if (v8_flags.single_generation) { \ CHECK(!Heap::InYoungGeneration(*main_string)); \ SimulateFullSpace(heap()->old_space()); \ } else { \ diff --git a/deps/v8/test/unittests/tasks/background-compile-task-unittest.cc b/deps/v8/test/unittests/tasks/background-compile-task-unittest.cc index 5ad5279a72fee9..1dba91de98fc69 100644 --- a/deps/v8/test/unittests/tasks/background-compile-task-unittest.cc +++ b/deps/v8/test/unittests/tasks/background-compile-task-unittest.cc @@ -50,11 +50,12 @@ class BackgroundCompileTaskTest : public TestWithNativeContext { BackgroundCompileTask* NewBackgroundCompileTask( Isolate* isolate, Handle<SharedFunctionInfo> shared, - size_t stack_size = FLAG_stack_size) { + size_t stack_size = v8_flags.stack_size) { return new BackgroundCompileTask( isolate, shared, test::SourceCharacterStreamForShared(isolate, shared), isolate->counters()->worker_thread_runtime_call_stats(), - isolate->counters()->compile_function_on_background(), FLAG_stack_size); + isolate->counters()->compile_function_on_background(), + v8_flags.stack_size); } private: diff --git a/deps/v8/test/unittests/temporal/temporal-parser-unittest.cc b/deps/v8/test/unittests/temporal/temporal-parser-unittest.cc index 6277c7a3f1da0e..319bfd012ef8ca 100644 --- a/deps/v8/test/unittests/temporal/temporal-parser-unittest.cc +++ b/deps/v8/test/unittests/temporal/temporal-parser-unittest.cc @@ -158,15 +158,30 @@ class TemporalParserTest : public TestWithIsolate { } } - void VerifyParseTemporalCalendarStringSuccess( - const char* str, const std::string& calendar_name) { + void VerifyParseCalendarNameSuccess(const char* str) { Handle<String> input = MakeString(str); base::Optional<ParsedISO8601Result> result = - TemporalParser::ParseTemporalCalendarString(i_isolate(), input); + TemporalParser::ParseCalendarName(i_isolate(), input); CHECK(result.has_value()); ParsedISO8601Result actual = *result; - CheckCalendar(i_isolate(), input, actual.calendar_name_start, - actual.calendar_name_length, calendar_name); + // For ParseCalendarName, we just validate the input fully match + // CalendarName, therefore, the test pass if the start is 0 and + // the calendar_name_length is the same as the length of the input. + CHECK_EQ(actual.calendar_name_start, 0); + CHECK_EQ(actual.calendar_name_length, input->length()); + } + + void VerifyParseTimeZoneIdentifierSuccess(const char* str) { + Handle<String> input = MakeString(str); + base::Optional<ParsedISO8601Result> result = + TemporalParser::ParseTimeZoneIdentifier(i_isolate(), input); + CHECK(result.has_value()); + ParsedISO8601Result actual = *result; + // For ParseTimeZoneIdentifier, we just validate the input fully match + // TimeZoneIdentifier, therefore, the test pass if the start is 0 and + // the tzi_name_length is the same as the length of the input. + CHECK_EQ(actual.tzi_name_start, 0); + CHECK_EQ(actual.tzi_name_length, input->length()); } void VerifyParseTemporalTimeStringSuccess(const char* str, int32_t time_hour, @@ -1601,47 +1616,6 @@ TEST_F(TemporalParserTest, TemporalZonedDateTimeStringIllegal) { VERIFY_PARSE_FAIL_ON_ZONED_DATE_TIME(TemporalZonedDateTimeString); } -TEST_F(TemporalParserTest, TemporalCalendarStringSuccess) { - // CalendarName - VerifyParseTemporalCalendarStringSuccess("chinese", "chinese"); - VerifyParseTemporalCalendarStringSuccess("roc", "roc"); - VerifyParseTemporalCalendarStringSuccess("indian", "indian"); - VerifyParseTemporalCalendarStringSuccess("persian", "persian"); - VerifyParseTemporalCalendarStringSuccess("abcd-efghi", "abcd-efghi"); - VerifyParseTemporalCalendarStringSuccess("abcd-efghi", "abcd-efghi"); - VerifyParseTemporalCalendarStringSuccess( - "a2345678-b2345678-c2345678-d7654321", - "a2345678-b2345678-c2345678-d7654321"); - // TemporalInstantString - VerifyParseTemporalCalendarStringSuccess("2021-11-08z[ABCD]", ""); - // CalendarDateTime - VerifyParseTemporalCalendarStringSuccess("2021-11-08[u-ca=chinese]", - "chinese"); - VerifyParseTemporalCalendarStringSuccess("2021-11-08[ABCDEFG][u-ca=chinese]", - "chinese"); - VerifyParseTemporalCalendarStringSuccess( - "2021-11-08[ABCDEFG/hijklmn][u-ca=roc]", "roc"); - // Time - VerifyParseTemporalCalendarStringSuccess("23:45:59", ""); - // DateSpecYearMonth - VerifyParseTemporalCalendarStringSuccess("2021-12", ""); - // DateSpecMonthDay - VerifyParseTemporalCalendarStringSuccess("--12-31", ""); - VerifyParseTemporalCalendarStringSuccess("12-31", ""); - VerifyParseTemporalCalendarStringSuccess("--1231", ""); -} - -TEST_F(TemporalParserTest, TemporalCalendarStringIllegal) { - VERIFY_PARSE_FAIL(TemporalCalendarString, "20210304[u-ca=]"); - VERIFY_PARSE_FAIL(TemporalCalendarString, "20210304[u-ca=a]"); - VERIFY_PARSE_FAIL(TemporalCalendarString, "20210304[u-ca=ab]"); - VERIFY_PARSE_FAIL(TemporalCalendarString, "20210304[u-ca=abcdef-ab]"); - VERIFY_PARSE_FAIL(TemporalCalendarString, "20210304[u-ca=abcdefghijkl]"); - // It is a Syntax Error if DateExtendedYear is "-000000" - VERIFY_PARSE_FAIL(TemporalCalendarString, "-0000000304[u-ca=abcdef-ab]"); - VERIFY_PARSE_FAIL(TemporalCalendarString, "\u22120000000304[u-ca=abcdef-ab]"); -} - constexpr int64_t empty = ParsedISO8601Duration::kEmpty; // Test basic cases. @@ -2060,5 +2034,241 @@ TEST_F(TemporalParserTest, TimeZoneNumericUTCOffsetIllegal) { VERIFY_PARSE_FAIL(TimeZoneNumericUTCOffset, "+073401,9876543219"); } +TEST_F(TemporalParserTest, TimeZoneIdentifierSucccess) { + // TimeZoneIANAName: + // Etc/GMT ASCIISign UnpaddedHour: + VerifyParseTimeZoneIdentifierSuccess("Etc/GMT+0"); + VerifyParseTimeZoneIdentifierSuccess("Etc/GMT+1"); + VerifyParseTimeZoneIdentifierSuccess("Etc/GMT+11"); + VerifyParseTimeZoneIdentifierSuccess("Etc/GMT+23"); + // TimeZoneIANANameTail + VerifyParseTimeZoneIdentifierSuccess("_"); + VerifyParseTimeZoneIdentifierSuccess("_/_"); + VerifyParseTimeZoneIdentifierSuccess("a."); + VerifyParseTimeZoneIdentifierSuccess("a.."); + VerifyParseTimeZoneIdentifierSuccess("a_"); + VerifyParseTimeZoneIdentifierSuccess("a-"); + VerifyParseTimeZoneIdentifierSuccess("a-b"); + VerifyParseTimeZoneIdentifierSuccess("a-b/c"); + VerifyParseTimeZoneIdentifierSuccess("abcdefghijklmn"); + VerifyParseTimeZoneIdentifierSuccess("abcdefghijklmn/ABCDEFGHIJKLMN"); + + // TimeZoneIANALegacyName + VerifyParseTimeZoneIdentifierSuccess("Etc/GMT0"); + VerifyParseTimeZoneIdentifierSuccess("GMT0"); + VerifyParseTimeZoneIdentifierSuccess("GMT-0"); + VerifyParseTimeZoneIdentifierSuccess("GMT+0"); + VerifyParseTimeZoneIdentifierSuccess("EST5EDT"); + VerifyParseTimeZoneIdentifierSuccess("CST6CDT"); + VerifyParseTimeZoneIdentifierSuccess("MST7MDT"); + VerifyParseTimeZoneIdentifierSuccess("PST8PDT"); + + // TimeZoneUTCOffsetName + // Sign Hour + VerifyParseTimeZoneIdentifierSuccess("+00"); + VerifyParseTimeZoneIdentifierSuccess("+23"); + VerifyParseTimeZoneIdentifierSuccess("-00"); + VerifyParseTimeZoneIdentifierSuccess("-23"); + VerifyParseTimeZoneIdentifierSuccess("\u221200"); + VerifyParseTimeZoneIdentifierSuccess("\u221223"); + // Sign Hour : MinuteSecond + VerifyParseTimeZoneIdentifierSuccess("+00:00"); + VerifyParseTimeZoneIdentifierSuccess("+23:59"); + VerifyParseTimeZoneIdentifierSuccess("-00:00"); + VerifyParseTimeZoneIdentifierSuccess("-23:59"); + VerifyParseTimeZoneIdentifierSuccess("\u221200:00"); + VerifyParseTimeZoneIdentifierSuccess("\u221223:59"); + // Sign Hour MinuteSecond + VerifyParseTimeZoneIdentifierSuccess("+0000"); + VerifyParseTimeZoneIdentifierSuccess("+2359"); + VerifyParseTimeZoneIdentifierSuccess("-0000"); + VerifyParseTimeZoneIdentifierSuccess("-2359"); + VerifyParseTimeZoneIdentifierSuccess("\u22120000"); + VerifyParseTimeZoneIdentifierSuccess("\u22122359"); + + // Sign Hour : MinuteSecond : MinuteSecond Fractionopt + VerifyParseTimeZoneIdentifierSuccess("+00:00:00"); + VerifyParseTimeZoneIdentifierSuccess("+23:59:59"); + VerifyParseTimeZoneIdentifierSuccess("-00:00:00"); + VerifyParseTimeZoneIdentifierSuccess("-23:59:59"); + VerifyParseTimeZoneIdentifierSuccess("\u221200:00:00"); + VerifyParseTimeZoneIdentifierSuccess("\u221223:59:59"); + + VerifyParseTimeZoneIdentifierSuccess("+00:00:00.0"); + VerifyParseTimeZoneIdentifierSuccess("+00:00:00,0"); + VerifyParseTimeZoneIdentifierSuccess("+00:00:00.10"); + VerifyParseTimeZoneIdentifierSuccess("+00:00:00,01"); + VerifyParseTimeZoneIdentifierSuccess("+00:00:00.012"); + VerifyParseTimeZoneIdentifierSuccess("+00:00:00,010"); + VerifyParseTimeZoneIdentifierSuccess("+00:00:00.0123"); + VerifyParseTimeZoneIdentifierSuccess("+00:00:00,0120"); + VerifyParseTimeZoneIdentifierSuccess("+00:00:00.01234"); + VerifyParseTimeZoneIdentifierSuccess("+00:00:00,01230"); + VerifyParseTimeZoneIdentifierSuccess("+00:00:00.012345"); + VerifyParseTimeZoneIdentifierSuccess("+00:00:00,012340"); + VerifyParseTimeZoneIdentifierSuccess("+00:00:00.0123450"); + VerifyParseTimeZoneIdentifierSuccess("+00:00:00,0123456"); + VerifyParseTimeZoneIdentifierSuccess("+00:00:00,01234567"); + VerifyParseTimeZoneIdentifierSuccess("+00:00:00.01234560"); + VerifyParseTimeZoneIdentifierSuccess("+00:00:00,012345678"); + VerifyParseTimeZoneIdentifierSuccess("+00:00:00.012345680"); + + // Sign Hour MinuteSecond MinuteSecond Fractionopt + VerifyParseTimeZoneIdentifierSuccess("+000000"); + VerifyParseTimeZoneIdentifierSuccess("+235959"); + VerifyParseTimeZoneIdentifierSuccess("-000000"); + VerifyParseTimeZoneIdentifierSuccess("-235959"); + VerifyParseTimeZoneIdentifierSuccess("\u2212000000"); + VerifyParseTimeZoneIdentifierSuccess("\u2212235959"); + + VerifyParseTimeZoneIdentifierSuccess("-000000.0"); + VerifyParseTimeZoneIdentifierSuccess("-000000,0"); + VerifyParseTimeZoneIdentifierSuccess("-000000.10"); + VerifyParseTimeZoneIdentifierSuccess("-000000,01"); + VerifyParseTimeZoneIdentifierSuccess("-000000.012"); + VerifyParseTimeZoneIdentifierSuccess("-000000,010"); + VerifyParseTimeZoneIdentifierSuccess("-000000.0123"); + VerifyParseTimeZoneIdentifierSuccess("-000000,0120"); + VerifyParseTimeZoneIdentifierSuccess("-000000.01234"); + VerifyParseTimeZoneIdentifierSuccess("-000000,01230"); + VerifyParseTimeZoneIdentifierSuccess("-000000.012345"); + VerifyParseTimeZoneIdentifierSuccess("-000000,012340"); + VerifyParseTimeZoneIdentifierSuccess("-000000.0123450"); + VerifyParseTimeZoneIdentifierSuccess("-000000,0123456"); + VerifyParseTimeZoneIdentifierSuccess("-000000,01234567"); + VerifyParseTimeZoneIdentifierSuccess("-000000.01234560"); + VerifyParseTimeZoneIdentifierSuccess("-000000,012345678"); + VerifyParseTimeZoneIdentifierSuccess("-000000.012345680"); +} +TEST_F(TemporalParserTest, TimeZoneIdentifierIllegal) { + // Etc/GMT ASCIISign Hour: + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "[Etc/GMT+1]"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "Etc/GMT+01"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "Etc/GMT+24"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "."); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, ".."); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "A/.."); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "A/."); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "-ab"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "abcdefghijklmno"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "abcdefghijklmno/ABCDEFGHIJKLMN"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "abcdefghijklmn/ABCDEFGHIJKLMNO"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "1"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "a1"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "Etc/GMT1"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "GMT1"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "GMT+1"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "GMT-1"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "EDT5EST"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "CDT6CST"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "MDT7MST"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "PDT8PST"); + + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "+2"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "+24"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "-24"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "\u221224"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "+0:60"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "+00:5"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "+00:60"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "-00:60"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "\u221200:60"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "+24:59"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "-24:59"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "\u221224:59"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "+0060"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "+00590"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "-0060"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "\u22120060"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "+2459"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "-2459"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "\u22122459"); + + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "+00:0000"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "+0000:00"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "+23:0000"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "+2300:00"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "+00:5900"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "+0059:00"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "+00:0059"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "+0000:59"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "-00:0000"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "-0000:00"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "-23:0000"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "-2300:00"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "-00:5900"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "-0059:00"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "-00:0059"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "-0000:59"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "\u221200:0000"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "\u22120000:00"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "\u221223:0000"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "\u22122300:00"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "\u221200:5900"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "\u22120059:00"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "\u221200:0059"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "\u22120000:59"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "-00059"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "-0:0059"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "-00:059"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "-000:59"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "-0005:9"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "-0000000"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "-00000000"); + + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "+240000"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "+006000"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "+000060"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "-240000"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "-006000"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "-000060"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "\u2212240000"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "\u2212006000"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "\u2212000060"); + + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "+00:00:00.0000000000"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "-00:00:00.0000000000"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "\u221200:00:00.0000000000"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "+000000.0000000000"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "-000000.0000000000"); + VERIFY_PARSE_FAIL(TimeZoneIdentifier, "\u2212000000.0000000000"); +} + +TEST_F(TemporalParserTest, CalendarNameSuccess) { + // CalendarName + VerifyParseCalendarNameSuccess("chinese"); + VerifyParseCalendarNameSuccess("roc"); + VerifyParseCalendarNameSuccess("indian"); + VerifyParseCalendarNameSuccess("persian"); + VerifyParseCalendarNameSuccess("abcd-efghi"); + VerifyParseCalendarNameSuccess("abcd-efghi"); + VerifyParseCalendarNameSuccess("a2345678-b2345678-c2345678-d7654321"); +} + +TEST_F(TemporalParserTest, CalendarNameIllegal) { + VERIFY_PARSE_FAIL(CalendarName, "20210304[u-ca=]"); + VERIFY_PARSE_FAIL(CalendarName, "20210304[u-ca=a]"); + VERIFY_PARSE_FAIL(CalendarName, "20210304[u-ca=ab]"); + VERIFY_PARSE_FAIL(CalendarName, "20210304[u-ca=abcdef-ab]"); + VERIFY_PARSE_FAIL(CalendarName, "20210304[u-ca=abcdefghijkl]"); + // It is a Syntax Error if DateExtendedYear is "-000000" + VERIFY_PARSE_FAIL(CalendarName, "-0000000304[u-ca=abcdef-ab]"); + VERIFY_PARSE_FAIL(CalendarName, "\u22120000000304[u-ca=abcdef-ab]"); + // TemporalInstantString + VERIFY_PARSE_FAIL(CalendarName, "2021-11-08z[ABCD]"); + // CalendarDateTime + VERIFY_PARSE_FAIL(CalendarName, "2021-11-08[u-ca=chinese]"); + VERIFY_PARSE_FAIL(CalendarName, "2021-11-08[ABCDEFG][u-ca=chinese]"); + VERIFY_PARSE_FAIL(CalendarName, "2021-11-08[ABCDEFG/hijklmn][u-ca=roc]"); + // Time + VERIFY_PARSE_FAIL(CalendarName, "23:45:59"); + // DateSpecYearMonth + VERIFY_PARSE_FAIL(CalendarName, "2021-12"); + // DateSpecMonthDay + VERIFY_PARSE_FAIL(CalendarName, "--12-31"); + VERIFY_PARSE_FAIL(CalendarName, "12-31"); + VERIFY_PARSE_FAIL(CalendarName, "--1231"); +} + } // namespace internal } // namespace v8 diff --git a/deps/v8/test/unittests/test-utils.cc b/deps/v8/test/unittests/test-utils.cc index f6f9ac78b1894f..1e2ab73e7ba08c 100644 --- a/deps/v8/test/unittests/test-utils.cc +++ b/deps/v8/test/unittests/test-utils.cc @@ -68,7 +68,7 @@ namespace internal { SaveFlags::SaveFlags() { // For each flag, save the current flag value. #define FLAG_MODE_APPLY(ftype, ctype, nam, def, cmt) \ - SAVED_##nam = FLAG_##nam.value(); + SAVED_##nam = v8_flags.nam.value(); #include "src/flags/flag-definitions.h" #undef FLAG_MODE_APPLY } @@ -77,8 +77,8 @@ SaveFlags::~SaveFlags() { // For each flag, set back the old flag value if it changed (don't write the // flag if it didn't change, to keep TSAN happy). #define FLAG_MODE_APPLY(ftype, ctype, nam, def, cmt) \ - if (SAVED_##nam != FLAG_##nam.value()) { \ - FLAG_##nam = SAVED_##nam; \ + if (SAVED_##nam != v8_flags.nam.value()) { \ + v8_flags.nam = SAVED_##nam; \ } #include "src/flags/flag-definitions.h" // NOLINT #undef FLAG_MODE_APPLY @@ -95,13 +95,15 @@ ManualGCScope::ManualGCScope(i::Isolate* isolate) { isolate->heap()->CompleteSweepingFull(); } - i::FLAG_concurrent_marking = false; - i::FLAG_concurrent_sweeping = false; - i::FLAG_stress_incremental_marking = false; - i::FLAG_stress_concurrent_allocation = false; + i::v8_flags.concurrent_marking = false; + i::v8_flags.concurrent_sweeping = false; + i::v8_flags.concurrent_minor_mc_marking = false; + i::v8_flags.concurrent_minor_mc_sweeping = false; + i::v8_flags.stress_incremental_marking = false; + i::v8_flags.stress_concurrent_allocation = false; // Parallel marking has a dependency on concurrent marking. - i::FLAG_parallel_marking = false; - i::FLAG_detect_ineffective_gcs_near_heap_limit = false; + i::v8_flags.parallel_marking = false; + i::v8_flags.detect_ineffective_gcs_near_heap_limit = false; } } // namespace internal diff --git a/deps/v8/test/unittests/test-utils.h b/deps/v8/test/unittests/test-utils.h index 967b70bfd04959..56e4f4538b6f18 100644 --- a/deps/v8/test/unittests/test-utils.h +++ b/deps/v8/test/unittests/test-utils.h @@ -13,6 +13,7 @@ #include "include/v8-context.h" #include "include/v8-extension.h" #include "include/v8-local-handle.h" +#include "include/v8-object.h" #include "include/v8-primitive.h" #include "include/v8-template.h" #include "src/api/api-inl.h" @@ -41,7 +42,7 @@ class WithDefaultPlatformMixin : public TMixin { // Allow changing flags in unit tests. // TODO(12887): Fix tests to avoid changing flag values after // initialization. - i::FLAG_freeze_flags_after_init = false; + i::v8_flags.freeze_flags_after_init = false; v8::V8::Initialize(); } @@ -60,7 +61,7 @@ class WithDefaultPlatformMixin : public TMixin { template <typename TMixin> class WithJSSharedMemoryFeatureFlagsMixin : public TMixin { public: - WithJSSharedMemoryFeatureFlagsMixin() { i::FLAG_harmony_struct = true; } + WithJSSharedMemoryFeatureFlagsMixin() { i::v8_flags.harmony_struct = true; } }; using CounterMap = std::map<std::string, int>; @@ -80,6 +81,9 @@ class IsolateWrapper final { IsolateWrapper& operator=(const IsolateWrapper&) = delete; v8::Isolate* isolate() const { return isolate_; } + i::Isolate* i_isolate() const { + return reinterpret_cast<i::Isolate*>(isolate_); + } private: std::unique_ptr<v8::ArrayBuffer::Allocator> array_buffer_allocator_; @@ -97,6 +101,18 @@ class WithIsolateMixin : public TMixin { v8::Isolate* v8_isolate() const { return isolate_wrapper_.isolate(); } + Local<Value> RunJS(const char* source, Local<Context> context) { + return RunJS( + v8::String::NewFromUtf8(this->v8_isolate(), source).ToLocalChecked(), + context); + } + + Local<Value> RunJS(Local<String> source, Local<Context> context) { + Local<Script> script = + v8::Script::Compile(context, source).ToLocalChecked(); + return script->Run(context).ToLocalChecked(); + } + private: v8::IsolateWrapper isolate_wrapper_; }; @@ -131,6 +147,12 @@ class WithIsolateScopeMixin : public TMixin { v8::String::NewFromUtf8(this->v8_isolate(), source).ToLocalChecked()); } + Local<Value> RunJS(Local<Context> context, const char* source) { + return RunJS( + context, + v8::String::NewFromUtf8(this->v8_isolate(), source).ToLocalChecked()); + } + MaybeLocal<Value> TryRunJS(const char* source) { return TryRunJS( v8::String::NewFromUtf8(this->v8_isolate(), source).ToLocalChecked()); @@ -138,6 +160,11 @@ class WithIsolateScopeMixin : public TMixin { static MaybeLocal<Value> TryRunJS(Isolate* isolate, Local<String> source) { auto context = isolate->GetCurrentContext(); + return TryRunJS(context, source); + } + + static MaybeLocal<Value> TryRunJS(Local<Context> context, + Local<String> source) { v8::Local<v8::Value> result; Local<Script> script = v8::Script::Compile(context, source).ToLocalChecked(); @@ -207,6 +234,10 @@ class WithIsolateScopeMixin : public TMixin { return TryRunJS(source).ToLocalChecked(); } + Local<Value> RunJS(Local<Context> context, Local<String> source) { + return TryRunJS(context, source).ToLocalChecked(); + } + MaybeLocal<Value> TryRunJS(Local<String> source) { return TryRunJS(this->v8_isolate(), source); } diff --git a/deps/v8/test/unittests/unittests.status b/deps/v8/test/unittests/unittests.status index 3242ed72ceb35d..a195afc30c7770 100644 --- a/deps/v8/test/unittests/unittests.status +++ b/deps/v8/test/unittests/unittests.status @@ -286,6 +286,7 @@ 'RegExpTest.Graph': [SKIP], 'SloppyEqualityTest.*' : [SKIP], 'DisasmX64Test.*': [SKIP], + 'RunBytecodeGraphBuilderTest.*': [SKIP], 'RunJSBranchesTest.*': [SKIP], 'RunJSCallsTest.*': [SKIP], 'RunJSExceptionsTest.*': [SKIP], diff --git a/deps/v8/test/unittests/utils/identity-map-unittest.cc b/deps/v8/test/unittests/utils/identity-map-unittest.cc index 337ef8e5ff0773..f61559f7503792 100644 --- a/deps/v8/test/unittests/utils/identity-map-unittest.cc +++ b/deps/v8/test/unittests/utils/identity-map-unittest.cc @@ -785,9 +785,9 @@ TEST_F(IdentityMapTest, CanonicalHandleScope) { } TEST_F(IdentityMapTest, GCShortCutting) { - if (FLAG_single_generation) return; + if (v8_flags.single_generation) return; // We don't create ThinStrings immediately when using the forwarding table. - if (FLAG_always_use_string_forwarding_table) return; + if (v8_flags.always_use_string_forwarding_table) return; ManualGCScope manual_gc_scope(isolate()); IdentityMapTester t(isolate()->heap(), zone()); Factory* factory = isolate()->factory(); @@ -816,7 +816,7 @@ TEST_F(IdentityMapTest, GCShortCutting) { // Do an explicit, real GC, this should short-cut the thin string to point // to the internalized string (this is not implemented for MinorMC). CollectGarbage(i::NEW_SPACE); - DCHECK_IMPLIES(!FLAG_minor_mc && !FLAG_optimize_for_size, + DCHECK_IMPLIES(!v8_flags.minor_mc && !v8_flags.optimize_for_size, *thin_string == *internalized_string); // Check that getting the object points to one of the handles. diff --git a/deps/v8/test/unittests/wasm/function-body-decoder-unittest.cc b/deps/v8/test/unittests/wasm/function-body-decoder-unittest.cc index 52f0e4696f969f..1b031200b50ccc 100644 --- a/deps/v8/test/unittests/wasm/function-body-decoder-unittest.cc +++ b/deps/v8/test/unittests/wasm/function-body-decoder-unittest.cc @@ -9,10 +9,10 @@ #include "src/wasm/function-body-decoder-impl.h" #include "src/wasm/leb-helper.h" #include "src/wasm/local-decl-encoder.h" -#include "src/wasm/signature-map.h" #include "src/wasm/wasm-limits.h" #include "src/wasm/wasm-module.h" #include "src/wasm/wasm-opcodes-inl.h" +#include "src/wasm/wasm-subtyping.h" #include "src/zone/zone.h" #include "test/common/wasm/flag-utils.h" #include "test/common/wasm/test-signatures.h" @@ -184,7 +184,6 @@ class TestModuleBuilder { static_cast<uint32_t>(mod.functions.size()), // func_index sig_index, // sig_index {0, 0}, // code - 0, // feedback slots false, // import false, // export declared}); // declared @@ -262,8 +261,8 @@ class FunctionBodyDecoderTestBase : public WithZoneMixin<BaseTest> { FunctionBody body(sig, 0, code.begin(), code.end()); WasmFeatures unused_detected_features = WasmFeatures::None(); DecodeResult result = - VerifyWasmCode(this->zone()->allocator(), enabled_features_, module, - &unused_detected_features, body); + ValidateFunctionBody(this->zone()->allocator(), enabled_features_, + module, &unused_detected_features, body); std::ostringstream str; if (result.failed()) { @@ -1153,8 +1152,13 @@ TEST_F(FunctionBodyDecoderTest, UnreachableRefTypes) { {WASM_UNREACHABLE, WASM_GC_OP(kExprArrayNewDefault), array_index, kExprDrop}); + ExpectValidates( + sigs.i_v(), + {WASM_UNREACHABLE, WASM_GC_OP(kExprRefTestDeprecated), struct_index}); ExpectValidates(sigs.i_v(), {WASM_UNREACHABLE, WASM_GC_OP(kExprRefTest), struct_index}); + ExpectValidates(sigs.i_v(), + {WASM_UNREACHABLE, WASM_GC_OP(kExprRefTest), kEqRefCode}); ExpectValidates(sigs.v_v(), {WASM_UNREACHABLE, WASM_GC_OP(kExprRefCast), struct_index, kExprDrop}); @@ -3287,8 +3291,8 @@ TEST_F(FunctionBodyDecoderTest, Regression709741) { FunctionBody body(sigs.v_v(), 0, code, code + i); WasmFeatures unused_detected_features; DecodeResult result = - VerifyWasmCode(this->zone()->allocator(), WasmFeatures::All(), nullptr, - &unused_detected_features, body); + ValidateFunctionBody(this->zone()->allocator(), WasmFeatures::All(), + nullptr, &unused_detected_features, body); if (result.ok()) { std::ostringstream str; str << "Expected verification to fail"; @@ -4305,24 +4309,31 @@ TEST_F(FunctionBodyDecoderTest, RefTestCast) { HeapType::Representation func_heap_2 = static_cast<HeapType::Representation>(builder.AddSignature(sigs.i_v())); - std::tuple<HeapType::Representation, HeapType::Representation, bool> tests[] = - {std::make_tuple(HeapType::kData, array_heap, true), - std::make_tuple(HeapType::kData, super_struct_heap, true), - std::make_tuple(HeapType::kFunc, func_heap_1, true), - std::make_tuple(func_heap_1, func_heap_1, true), - std::make_tuple(func_heap_1, func_heap_2, true), - std::make_tuple(super_struct_heap, sub_struct_heap, true), - std::make_tuple(array_heap, sub_struct_heap, true), - std::make_tuple(super_struct_heap, func_heap_1, true), - std::make_tuple(HeapType::kEq, super_struct_heap, false), - std::make_tuple(HeapType::kExtern, func_heap_1, false), - std::make_tuple(HeapType::kAny, array_heap, false), - std::make_tuple(HeapType::kI31, array_heap, false)}; + std::tuple<HeapType::Representation, HeapType::Representation, bool, bool> + tests[] = { + std::make_tuple(HeapType::kData, array_heap, true, true), + std::make_tuple(HeapType::kData, super_struct_heap, true, true), + std::make_tuple(HeapType::kFunc, func_heap_1, true, true), + std::make_tuple(func_heap_1, func_heap_1, true, true), + std::make_tuple(func_heap_1, func_heap_2, true, true), + std::make_tuple(super_struct_heap, sub_struct_heap, true, true), + std::make_tuple(array_heap, sub_struct_heap, true, true), + std::make_tuple(super_struct_heap, func_heap_1, true, false), + std::make_tuple(HeapType::kEq, super_struct_heap, false, true), + std::make_tuple(HeapType::kExtern, func_heap_1, false, false), + std::make_tuple(HeapType::kAny, array_heap, false, true), + std::make_tuple(HeapType::kI31, array_heap, false, true), + std::make_tuple(HeapType::kNone, array_heap, true, true), + std::make_tuple(HeapType::kNone, func_heap_1, true, false), + }; for (auto test : tests) { HeapType from_heap = HeapType(std::get<0>(test)); HeapType to_heap = HeapType(std::get<1>(test)); bool should_pass = std::get<2>(test); + bool should_pass_ref_test = std::get<3>(test); + SCOPED_TRACE("from_heap = " + from_heap.name() + + ", to_heap = " + to_heap.name()); ValueType test_reps[] = {kWasmI32, ValueType::RefNull(from_heap)}; FunctionSig test_sig(1, 1, test_reps); @@ -4332,7 +4343,8 @@ TEST_F(FunctionBodyDecoderTest, RefTestCast) { FunctionSig cast_sig(1, 1, cast_reps); if (should_pass) { - ExpectValidates(&test_sig, {WASM_REF_TEST(WASM_LOCAL_GET(0), + ExpectValidates(&test_sig, + {WASM_REF_TEST_DEPRECATED(WASM_LOCAL_GET(0), WASM_HEAP_TYPE(to_heap))}); ExpectValidates(&cast_sig, {WASM_REF_CAST(WASM_LOCAL_GET(0), WASM_HEAP_TYPE(to_heap))}); @@ -4342,20 +4354,40 @@ TEST_F(FunctionBodyDecoderTest, RefTestCast) { "local.get of type " + test_reps[1].name(); ExpectFailure(&test_sig, - {WASM_REF_TEST(WASM_LOCAL_GET(0), WASM_HEAP_TYPE(to_heap))}, + {WASM_REF_TEST_DEPRECATED(WASM_LOCAL_GET(0), + WASM_HEAP_TYPE(to_heap))}, kAppendEnd, ("ref.test" + error_message).c_str()); ExpectFailure(&cast_sig, {WASM_REF_CAST(WASM_LOCAL_GET(0), WASM_HEAP_TYPE(to_heap))}, kAppendEnd, ("ref.cast" + error_message).c_str()); } + + if (should_pass_ref_test) { + ExpectValidates(&test_sig, {WASM_REF_TEST(WASM_LOCAL_GET(0), + WASM_HEAP_TYPE(to_heap))}); + } else { + std::string error_message = + "Invalid types for ref.test: local.get of type " + + cast_reps[1].name() + + " has to be in the same reference type hierarchy as (ref " + + to_heap.name() + ")"; + ExpectFailure(&test_sig, + {WASM_REF_TEST(WASM_LOCAL_GET(0), WASM_HEAP_TYPE(to_heap))}, + kAppendEnd, error_message.c_str()); + } } // Trivial type error. ExpectFailure(sigs.v_v(), - {WASM_REF_TEST(WASM_I32V(1), array_heap), kExprDrop}, + {WASM_REF_TEST_DEPRECATED(WASM_I32V(1), array_heap), kExprDrop}, kAppendEnd, "ref.test[0] expected subtype of (ref null func) or " "(ref null data), found i32.const of type i32"); + ExpectFailure(sigs.v_v(), + {WASM_REF_TEST(WASM_I32V(1), array_heap), kExprDrop}, + kAppendEnd, + "Invalid types for ref.test: i32.const of type i32 has to be " + "in the same reference type hierarchy as (ref 0)"); ExpectFailure(sigs.v_v(), {WASM_REF_CAST(WASM_I32V(1), array_heap), kExprDrop}, kAppendEnd, @@ -4691,13 +4723,16 @@ class WasmOpcodeLengthTest : public TestWithZone { DCHECK_LE(static_cast<uint32_t>(opcode), 0xFF); bytes[0] = static_cast<byte>(opcode); // Special case: select_with_type insists on a {1} immediate. - if (opcode == kExprSelectWithType) bytes[1] = 1; + if (opcode == kExprSelectWithType) { + bytes[1] = 1; + bytes[2] = kAnyRefCode; + } } WasmFeatures detected; - WasmDecoder<Decoder::kNoValidation> decoder( + WasmDecoder<Decoder::kBooleanValidation> decoder( this->zone(), nullptr, WasmFeatures::All(), &detected, nullptr, bytes, bytes + sizeof(bytes), 0); - WasmDecoder<Decoder::kNoValidation>::OpcodeLength(&decoder, bytes); + WasmDecoder<Decoder::kBooleanValidation>::OpcodeLength(&decoder, bytes); EXPECT_TRUE(decoder.ok()) << opcode << " aka " << WasmOpcodes::OpcodeName(opcode) << ": " << decoder.error().message(); @@ -4956,17 +4991,15 @@ TEST_F(TypeReaderTest, HeapTypeDecodingTest) { } } -using TypesOfLocals = ZoneVector<ValueType>; - class LocalDeclDecoderTest : public TestWithZone { public: v8::internal::AccountingAllocator allocator; WasmFeatures enabled_features_; - size_t ExpectRun(TypesOfLocals map, size_t pos, ValueType expected, + size_t ExpectRun(ValueType* local_types, size_t pos, ValueType expected, size_t count) { for (size_t i = 0; i < count; i++) { - EXPECT_EQ(expected, map[pos++]); + EXPECT_EQ(expected, local_types[pos++]); } return pos; } @@ -4975,27 +5008,27 @@ class LocalDeclDecoderTest : public TestWithZone { const byte* end) { WasmModule module; return i::wasm::DecodeLocalDecls(enabled_features_, decls, &module, start, - end); + end, zone()); } }; TEST_F(LocalDeclDecoderTest, EmptyLocals) { - BodyLocalDecls decls(zone()); + BodyLocalDecls decls; bool result = DecodeLocalDecls(&decls, nullptr, nullptr); EXPECT_FALSE(result); } TEST_F(LocalDeclDecoderTest, NoLocals) { static const byte data[] = {0}; - BodyLocalDecls decls(zone()); + BodyLocalDecls decls; bool result = DecodeLocalDecls(&decls, data, data + sizeof(data)); EXPECT_TRUE(result); - EXPECT_TRUE(decls.type_list.empty()); + EXPECT_EQ(0u, decls.num_locals); } TEST_F(LocalDeclDecoderTest, WrongLocalDeclsCount1) { static const byte data[] = {1}; - BodyLocalDecls decls(zone()); + BodyLocalDecls decls; bool result = DecodeLocalDecls(&decls, data, data + sizeof(data)); EXPECT_FALSE(result); } @@ -5003,7 +5036,7 @@ TEST_F(LocalDeclDecoderTest, WrongLocalDeclsCount1) { TEST_F(LocalDeclDecoderTest, WrongLocalDeclsCount2) { static const byte data[] = {2, 1, static_cast<byte>(kWasmI32.value_type_code())}; - BodyLocalDecls decls(zone()); + BodyLocalDecls decls; bool result = DecodeLocalDecls(&decls, data, data + sizeof(data)); EXPECT_FALSE(result); } @@ -5012,13 +5045,12 @@ TEST_F(LocalDeclDecoderTest, OneLocal) { for (size_t i = 0; i < arraysize(kValueTypes); i++) { ValueType type = kValueTypes[i]; const byte data[] = {1, 1, static_cast<byte>(type.value_type_code())}; - BodyLocalDecls decls(zone()); + BodyLocalDecls decls; bool result = DecodeLocalDecls(&decls, data, data + sizeof(data)); EXPECT_TRUE(result); - EXPECT_EQ(1u, decls.type_list.size()); + EXPECT_EQ(1u, decls.num_locals); - TypesOfLocals map = decls.type_list; - EXPECT_EQ(type, map[0]); + EXPECT_EQ(type, decls.local_types[0]); } } @@ -5026,15 +5058,12 @@ TEST_F(LocalDeclDecoderTest, FiveLocals) { for (size_t i = 0; i < arraysize(kValueTypes); i++) { ValueType type = kValueTypes[i]; const byte data[] = {1, 5, static_cast<byte>(type.value_type_code())}; - BodyLocalDecls decls(zone()); + BodyLocalDecls decls; bool result = DecodeLocalDecls(&decls, data, data + sizeof(data)); EXPECT_TRUE(result); EXPECT_EQ(sizeof(data), decls.encoded_size); - EXPECT_EQ(5u, decls.type_list.size()); - - TypesOfLocals map = decls.type_list; - EXPECT_EQ(5u, map.size()); - ExpectRun(map, 0, type, 5); + EXPECT_EQ(5u, decls.num_locals); + ExpectRun(decls.local_types, 0, type, 5); } } @@ -5045,20 +5074,17 @@ TEST_F(LocalDeclDecoderTest, MixedLocals) { for (byte d = 0; d < 3; d++) { const byte data[] = {4, a, kI32Code, b, kI64Code, c, kF32Code, d, kF64Code}; - BodyLocalDecls decls(zone()); + BodyLocalDecls decls; bool result = DecodeLocalDecls(&decls, data, data + sizeof(data)); EXPECT_TRUE(result); EXPECT_EQ(sizeof(data), decls.encoded_size); - EXPECT_EQ(static_cast<uint32_t>(a + b + c + d), - decls.type_list.size()); - - TypesOfLocals map = decls.type_list; + EXPECT_EQ(static_cast<uint32_t>(a + b + c + d), decls.num_locals); size_t pos = 0; - pos = ExpectRun(map, pos, kWasmI32, a); - pos = ExpectRun(map, pos, kWasmI64, b); - pos = ExpectRun(map, pos, kWasmF32, c); - pos = ExpectRun(map, pos, kWasmF64, d); + pos = ExpectRun(decls.local_types, pos, kWasmI32, a); + pos = ExpectRun(decls.local_types, pos, kWasmI64, b); + pos = ExpectRun(decls.local_types, pos, kWasmF32, c); + pos = ExpectRun(decls.local_types, pos, kWasmF64, d); } } } @@ -5075,16 +5101,15 @@ TEST_F(LocalDeclDecoderTest, UseEncoder) { local_decls.AddLocals(212, kWasmI64); local_decls.Prepend(zone(), &data, &end); - BodyLocalDecls decls(zone()); + BodyLocalDecls decls; bool result = DecodeLocalDecls(&decls, data, end); EXPECT_TRUE(result); - EXPECT_EQ(5u + 1337u + 212u, decls.type_list.size()); + EXPECT_EQ(5u + 1337u + 212u, decls.num_locals); - TypesOfLocals map = decls.type_list; size_t pos = 0; - pos = ExpectRun(map, pos, kWasmF32, 5); - pos = ExpectRun(map, pos, kWasmI32, 1337); - pos = ExpectRun(map, pos, kWasmI64, 212); + pos = ExpectRun(decls.local_types, pos, kWasmF32, 5); + pos = ExpectRun(decls.local_types, pos, kWasmI32, 1337); + pos = ExpectRun(decls.local_types, pos, kWasmI64, 212); } TEST_F(LocalDeclDecoderTest, InvalidTypeIndex) { @@ -5095,7 +5120,7 @@ TEST_F(LocalDeclDecoderTest, InvalidTypeIndex) { LocalDeclEncoder local_decls(zone()); local_decls.AddLocals(1, ValueType::RefNull(0)); - BodyLocalDecls decls(zone()); + BodyLocalDecls decls; bool result = DecodeLocalDecls(&decls, data, end); EXPECT_FALSE(result); } @@ -5160,8 +5185,8 @@ TEST_F(BytecodeIteratorTest, ForeachOffset) { TEST_F(BytecodeIteratorTest, WithLocalDecls) { byte code[] = {1, 1, kI32Code, WASM_I32V_1(9), WASM_I32V_1(11)}; - BodyLocalDecls decls(zone()); - BytecodeIterator iter(code, code + sizeof(code), &decls); + BodyLocalDecls decls; + BytecodeIterator iter(code, code + sizeof(code), &decls, zone()); EXPECT_EQ(3u, decls.encoded_size); EXPECT_EQ(3u, iter.pc_offset()); diff --git a/deps/v8/test/unittests/wasm/memory-protection-unittest.cc b/deps/v8/test/unittests/wasm/memory-protection-unittest.cc index a6b2feaa6c6f40..4c137757033644 100644 --- a/deps/v8/test/unittests/wasm/memory-protection-unittest.cc +++ b/deps/v8/test/unittests/wasm/memory-protection-unittest.cc @@ -128,13 +128,12 @@ class MemoryProtectionTest : public TestWithNativeContext { DecodingMethod::kSync, GetWasmEngine()->allocator()); CHECK(result.ok()); - Handle<FixedArray> export_wrappers; ErrorThrower thrower(isolate(), ""); constexpr int kNoCompilationId = 0; std::shared_ptr<NativeModule> native_module = CompileToNativeModule( isolate(), WasmFeatures::All(), &thrower, std::move(result).value(), - ModuleWireBytes{base::ArrayVector(module_bytes)}, &export_wrappers, - kNoCompilationId, v8::metrics::Recorder::ContextId::Empty()); + ModuleWireBytes{base::ArrayVector(module_bytes)}, kNoCompilationId, + v8::metrics::Recorder::ContextId::Empty()); CHECK(!thrower.error()); CHECK_NOT_NULL(native_module); diff --git a/deps/v8/test/unittests/wasm/module-decoder-unittest.cc b/deps/v8/test/unittests/wasm/module-decoder-unittest.cc index cd72ac75c06cd6..9b0dd9c809e382 100644 --- a/deps/v8/test/unittests/wasm/module-decoder-unittest.cc +++ b/deps/v8/test/unittests/wasm/module-decoder-unittest.cc @@ -1029,7 +1029,6 @@ TEST_F(WasmModuleVerifyTest, InvalidArrayTypeDef) { TEST_F(WasmModuleVerifyTest, TypeCanonicalization) { WASM_FEATURE_SCOPE(typed_funcref); WASM_FEATURE_SCOPE(gc); - FLAG_SCOPE(wasm_type_canonicalization); static const byte identical_group[] = { SECTION(Type, // -- ENTRY_COUNT(2), // two identical rec. groups @@ -1070,7 +1069,6 @@ TEST_F(WasmModuleVerifyTest, TypeCanonicalization) { TEST_F(WasmModuleVerifyTest, InvalidSupertypeInRecGroup) { WASM_FEATURE_SCOPE(typed_funcref); WASM_FEATURE_SCOPE(gc); - FLAG_SCOPE(wasm_type_canonicalization); static const byte invalid_supertype[] = { SECTION(Type, ENTRY_COUNT(1), // -- kWasmRecursiveTypeGroupCode, ENTRY_COUNT(2), // -- @@ -1310,16 +1308,13 @@ TEST_F(WasmModuleVerifyTest, CanonicalTypeIds) { const WasmModule* module = result.value().get(); EXPECT_EQ(5u, module->types.size()); - EXPECT_EQ(5u, module->per_module_canonical_type_ids.size()); - EXPECT_EQ(2u, module->signature_map.size()); - - // No canonicalization for structs. - EXPECT_EQ(0u, module->per_module_canonical_type_ids[0]); - EXPECT_EQ(0u, module->per_module_canonical_type_ids[1]); - EXPECT_EQ(1u, module->per_module_canonical_type_ids[2]); - EXPECT_EQ(0u, module->per_module_canonical_type_ids[3]); - // No canonicalization for arrays. - EXPECT_EQ(0u, module->per_module_canonical_type_ids[4]); + EXPECT_EQ(5u, module->isorecursive_canonical_type_ids.size()); + + EXPECT_EQ(0u, module->isorecursive_canonical_type_ids[0]); + EXPECT_EQ(1u, module->isorecursive_canonical_type_ids[1]); + EXPECT_EQ(2u, module->isorecursive_canonical_type_ids[2]); + EXPECT_EQ(1u, module->isorecursive_canonical_type_ids[3]); + EXPECT_EQ(3u, module->isorecursive_canonical_type_ids[4]); } TEST_F(WasmModuleVerifyTest, DataSegmentWithImmutableImportedGlobal) { diff --git a/deps/v8/test/unittests/wasm/subtyping-unittest.cc b/deps/v8/test/unittests/wasm/subtyping-unittest.cc index caf04321a10471..81648716ceed5a 100644 --- a/deps/v8/test/unittests/wasm/subtyping-unittest.cc +++ b/deps/v8/test/unittests/wasm/subtyping-unittest.cc @@ -174,9 +174,7 @@ TEST_F(WasmSubtypingTest, Subtyping) { TypeInModule(type_result, module_result)) for (WasmModule* module : {module1, module2}) { - // For cross module subtyping, we need to enable type canonicalization. // Type judgements across modules should work the same as within one module. - FLAG_VALUE_SCOPE(wasm_type_canonicalization, module == module2); // Value types are unrelated, except if they are equal. for (ValueType subtype : numeric_types) { @@ -311,7 +309,6 @@ TEST_F(WasmSubtypingTest, Subtyping) { { // Canonicalization tests. - FLAG_SCOPE(wasm_type_canonicalization); // Groups should only be canonicalized to identical groups. IDENTICAL(18, 22); diff --git a/deps/v8/test/unittests/web-snapshot/web-snapshot-unittest.cc b/deps/v8/test/unittests/web-snapshot/web-snapshot-unittest.cc index 9c15622183dbde..1f1081805cdac0 100644 --- a/deps/v8/test/unittests/web-snapshot/web-snapshot-unittest.cc +++ b/deps/v8/test/unittests/web-snapshot/web-snapshot-unittest.cc @@ -503,8 +503,8 @@ TEST_F(WebSnapshotTest, SFIDeduplicationClasses) { } TEST_F(WebSnapshotTest, SFIDeduplicationAfterBytecodeFlushing) { - FLAG_stress_flush_code = true; - FLAG_flush_bytecode = true; + v8_flags.stress_flush_code = true; + v8_flags.flush_bytecode = true; v8::Isolate* isolate = v8_isolate(); WebSnapshotData snapshot_data; @@ -587,8 +587,8 @@ TEST_F(WebSnapshotTest, SFIDeduplicationAfterBytecodeFlushing) { } TEST_F(WebSnapshotTest, SFIDeduplicationAfterBytecodeFlushingClasses) { - FLAG_stress_flush_code = true; - FLAG_flush_bytecode = true; + v8_flags.stress_flush_code = true; + v8_flags.flush_bytecode = true; v8::Isolate* isolate = v8_isolate(); WebSnapshotData snapshot_data; @@ -1079,5 +1079,57 @@ TEST_F(WebSnapshotTest, ConstructorFunctionKinds) { } } +TEST_F(WebSnapshotTest, SlackElementsInObjects) { + v8::Isolate* isolate = v8_isolate(); + v8::HandleScope scope(isolate); + + WebSnapshotData snapshot_data; + { + v8::Local<v8::Context> new_context = v8::Context::New(isolate); + v8::Context::Scope context_scope(new_context); + const char* snapshot_source = + "var foo = {};" + "for (let i = 0; i < 100; ++i) {" + " foo[i] = i;" + "}" + "var bar = {};" + "for (let i = 0; i < 100; ++i) {" + " bar[i] = {};" + "}"; + + RunJS(snapshot_source); + v8::Local<v8::PrimitiveArray> exports = v8::PrimitiveArray::New(isolate, 2); + exports->Set(isolate, 0, + v8::String::NewFromUtf8(isolate, "foo").ToLocalChecked()); + exports->Set(isolate, 1, + v8::String::NewFromUtf8(isolate, "bar").ToLocalChecked()); + WebSnapshotSerializer serializer(isolate); + CHECK(serializer.TakeSnapshot(new_context, exports, snapshot_data)); + CHECK(!serializer.has_error()); + CHECK_NOT_NULL(snapshot_data.buffer); + } + + { + v8::Local<v8::Context> new_context = v8::Context::New(isolate); + v8::Context::Scope context_scope(new_context); + WebSnapshotDeserializer deserializer(isolate, snapshot_data.buffer, + snapshot_data.buffer_size); + CHECK(deserializer.Deserialize()); + CHECK(!deserializer.has_error()); + + Handle<JSObject> foo = + Handle<JSObject>::cast(Utils::OpenHandle<v8::Object, JSReceiver>( + RunJS("foo").As<v8::Object>())); + CHECK_EQ(100, foo->elements().length()); + CHECK_EQ(HOLEY_ELEMENTS, foo->GetElementsKind()); + + Handle<JSObject> bar = + Handle<JSObject>::cast(Utils::OpenHandle<v8::Object, JSReceiver>( + RunJS("bar").As<v8::Object>())); + CHECK_EQ(100, bar->elements().length()); + CHECK_EQ(HOLEY_ELEMENTS, bar->GetElementsKind()); + } +} + } // namespace internal } // namespace v8 diff --git a/deps/v8/third_party/inspector_protocol/BUILD.gn b/deps/v8/third_party/inspector_protocol/BUILD.gn index d3fb166a0fd069..73097ad359b140 100644 --- a/deps/v8/third_party/inspector_protocol/BUILD.gn +++ b/deps/v8/third_party/inspector_protocol/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright 2019 the V8 project authors. All rights reserved. +# Copyright 2019 the V8 project Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/LICENSE b/deps/v8/third_party/inspector_protocol/LICENSE index 800468e5763479..1e5610962e92dd 100644 --- a/deps/v8/third_party/inspector_protocol/LICENSE +++ b/deps/v8/third_party/inspector_protocol/LICENSE @@ -1,4 +1,4 @@ -// Copyright 2016 The Chromium Authors. All rights reserved. +// Copyright 2016 The Chromium Authors // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are diff --git a/deps/v8/third_party/inspector_protocol/README.v8 b/deps/v8/third_party/inspector_protocol/README.v8 index 8e9cd9c12746e0..be83565de2b7ea 100644 --- a/deps/v8/third_party/inspector_protocol/README.v8 +++ b/deps/v8/third_party/inspector_protocol/README.v8 @@ -2,7 +2,7 @@ Name: inspector protocol Short Name: inspector_protocol URL: https://chromium.googlesource.com/deps/inspector_protocol/ Version: 0 -Revision: 134539780e606a77d660d58bf95b5ab55875bc3c +Revision: dec7ec1932f5277b933ed8a675cc6eb7cfc36f88 License: BSD License File: LICENSE Security Critical: no diff --git a/deps/v8/third_party/inspector_protocol/check_protocol_compatibility.py b/deps/v8/third_party/inspector_protocol/check_protocol_compatibility.py index 9b6223dd96a41b..0d40f26e14a39d 100755 --- a/deps/v8/third_party/inspector_protocol/check_protocol_compatibility.py +++ b/deps/v8/third_party/inspector_protocol/check_protocol_compatibility.py @@ -1,31 +1,7 @@ #!/usr/bin/env python3 -# Copyright (c) 2011 Google Inc. All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# Copyright 2011 The Chromium Authors +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. # # Inspector protocol validator. # diff --git a/deps/v8/third_party/inspector_protocol/code_generator.py b/deps/v8/third_party/inspector_protocol/code_generator.py index c3768b8d3e054e..b9a0b644efee88 100755 --- a/deps/v8/third_party/inspector_protocol/code_generator.py +++ b/deps/v8/third_party/inspector_protocol/code_generator.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -279,15 +279,16 @@ def create_any_type_definition(): def create_string_type_definition(): # pylint: disable=W0622 return { - "return_type": "String", - "pass_type": "const String&", - "to_pass_type": "%s", - "to_raw_type": "%s", - "to_rvalue": "%s", - "type": "String", - "raw_type": "String", - "raw_pass_type": "const String&", - "raw_return_type": "String", + "return_type": "String", + "pass_type": "const String&", + "to_pass_type": "%s", + "to_raw_type": "%s", + "to_rvalue": "%s", + "type": "String", + "raw_type": "String", + "raw_pass_type": "const String&", + "raw_return_type": "String", + "is_primitive": True } @@ -324,19 +325,19 @@ def create_primitive_type_definition(type): "boolean": "TypeBoolean", } return { - "return_type": typedefs[type], - "pass_type": typedefs[type], - "to_pass_type": "%s", - "to_raw_type": "%s", - "to_rvalue": "%s", - "type": typedefs[type], - "raw_type": typedefs[type], - "raw_pass_type": typedefs[type], - "raw_return_type": typedefs[type], - "default_value": defaults[type] + "return_type": typedefs[type], + "pass_type": typedefs[type], + "to_pass_type": "%s", + "to_raw_type": "%s", + "to_rvalue": "%s", + "type": typedefs[type], + "raw_type": typedefs[type], + "raw_pass_type": typedefs[type], + "raw_return_type": typedefs[type], + "default_value": defaults[type], + "is_primitive": True } - def wrap_array_definition(type): # pylint: disable=W0622 return { @@ -424,6 +425,22 @@ def all_references(self, json): refs.add(json["$ref"]) return refs + def check_if_dependency_declared(self, domain, refs): + dependencies = domain.get('dependencies', set()) + for ref in refs: + type_definition = self.type_definitions[ref] + if type_definition.get('is_primitive', False): + continue + domain_match = re.match(r'^(.*)[.]', ref) + if domain_match: + referenced_domain_name = domain_match.group(1) + if referenced_domain_name != domain[ + 'domain'] and not referenced_domain_name in dependencies: + sys.stderr.write(( + "Domains [%s] uses type [%s] from domain [%s], but did not declare the dependency\n\n" + ) % (domain["domain"], ref, referenced_domain_name)) + exit(1) + def generate_used_types(self): all_refs = set() for domain in self.json_api["domains"]: @@ -431,11 +448,19 @@ def generate_used_types(self): if "commands" in domain: for command in domain["commands"]: if self.generate_command(domain_name, command["name"]): - all_refs |= self.all_references(command) + all_refs_command = self.all_references(command) + # If the command has a redirect, it is as if it didn't exist on this domain. + if not command.get('redirect', False): + self.check_if_dependency_declared(domain, all_refs_command) + all_refs |= all_refs_command + if "events" in domain: for event in domain["events"]: if self.generate_event(domain_name, event["name"]): - all_refs |= self.all_references(event) + all_refs_event = self.all_references(event) + self.check_if_dependency_declared(domain, all_refs_event) + all_refs |= all_refs_event + dependencies = self.generate_type_dependencies() queue = set(all_refs) diff --git a/deps/v8/third_party/inspector_protocol/concatenate_protocols.py b/deps/v8/third_party/inspector_protocol/concatenate_protocols.py index a2e869431c3ac6..11f1fed06c49f6 100755 --- a/deps/v8/third_party/inspector_protocol/concatenate_protocols.py +++ b/deps/v8/third_party/inspector_protocol/concatenate_protocols.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/convert_protocol_to_json.py b/deps/v8/third_party/inspector_protocol/convert_protocol_to_json.py index 23f8622d3e4890..e099c346e37389 100755 --- a/deps/v8/third_party/inspector_protocol/convert_protocol_to_json.py +++ b/deps/v8/third_party/inspector_protocol/convert_protocol_to_json.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/crdtp/cbor.cc b/deps/v8/third_party/inspector_protocol/crdtp/cbor.cc index b2d5c2e1037a77..d6359f72a6fc4a 100644 --- a/deps/v8/third_party/inspector_protocol/crdtp/cbor.cc +++ b/deps/v8/third_party/inspector_protocol/crdtp/cbor.cc @@ -1,4 +1,4 @@ -// Copyright 2019 The Chromium Authors. All rights reserved. +// Copyright 2019 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. @@ -344,7 +344,7 @@ void EncodeDouble(double value, std::vector<uint8_t>* out) { void EnvelopeEncoder::EncodeStart(std::vector<uint8_t>* out) { assert(byte_size_pos_ == 0); out->push_back(kInitialByteForEnvelope); - // TODO(caseq): encode tag as an additional byte here. + out->push_back(kCBOREnvelopeTag); out->push_back(kInitialByteFor32BitLengthByteString); byte_size_pos_ = out->size(); out->resize(out->size() + sizeof(uint32_t)); diff --git a/deps/v8/third_party/inspector_protocol/crdtp/cbor.h b/deps/v8/third_party/inspector_protocol/crdtp/cbor.h index 6e3fcef30996f4..9e71e07163458d 100644 --- a/deps/v8/third_party/inspector_protocol/crdtp/cbor.h +++ b/deps/v8/third_party/inspector_protocol/crdtp/cbor.h @@ -1,4 +1,4 @@ -// Copyright 2019 The Chromium Authors. All rights reserved. +// Copyright 2019 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/crdtp/cbor_test.cc b/deps/v8/third_party/inspector_protocol/crdtp/cbor_test.cc index 3fcf702eb173ec..abf64d3a92fbad 100644 --- a/deps/v8/third_party/inspector_protocol/crdtp/cbor_test.cc +++ b/deps/v8/third_party/inspector_protocol/crdtp/cbor_test.cc @@ -1,4 +1,4 @@ -// Copyright 2018 The Chromium Authors. All rights reserved. +// Copyright 2018 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. @@ -727,9 +727,9 @@ TEST(JsonCborRoundtrip, EncodingDecoding) { span<uint8_t> ascii_in = SpanFrom(json); json::ParseJSON(ascii_in, encoder.get()); std::vector<uint8_t> expected = { - 0xd8, // envelope - 0x5a, // byte string with 32 bit length - 0, 0, 0, 94, // length is 94 bytes + 0xd8, 0x18, // envelope + 0x5a, // byte string with 32 bit length + 0, 0, 0, 95, // length is 95 bytes }; expected.push_back(0xbf); // indef length map start EncodeString8(SpanFrom("string"), &expected); @@ -752,7 +752,8 @@ TEST(JsonCborRoundtrip, EncodingDecoding) { EncodeString8(SpanFrom("null"), &expected); expected.push_back(7 << 5 | 22); // RFC 7049 Section 2.3, Table 2: null EncodeString8(SpanFrom("array"), &expected); - expected.push_back(0xd8); // envelope + expected.push_back(0xd8); // envelope (tag first byte) + expected.push_back(0x18); // envelope (tag second byte) expected.push_back(0x5a); // byte string with 32 bit length // the length is 5 bytes (that's up to end indef length array below). for (uint8_t ch : std::array<uint8_t, 4>{{0, 0, 0, 5}}) diff --git a/deps/v8/third_party/inspector_protocol/crdtp/dispatch.cc b/deps/v8/third_party/inspector_protocol/crdtp/dispatch.cc index ff0d291d4242dd..c0939f0e740acc 100644 --- a/deps/v8/third_party/inspector_protocol/crdtp/dispatch.cc +++ b/deps/v8/third_party/inspector_protocol/crdtp/dispatch.cc @@ -1,4 +1,4 @@ -// Copyright 2020 The Chromium Authors. All rights reserved. +// Copyright 2020 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/crdtp/dispatch.h b/deps/v8/third_party/inspector_protocol/crdtp/dispatch.h index f57a9c6ff02b01..b035e21930cbc6 100644 --- a/deps/v8/third_party/inspector_protocol/crdtp/dispatch.h +++ b/deps/v8/third_party/inspector_protocol/crdtp/dispatch.h @@ -1,4 +1,4 @@ -// Copyright 2020 The Chromium Authors. All rights reserved. +// Copyright 2020 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/crdtp/dispatch_test.cc b/deps/v8/third_party/inspector_protocol/crdtp/dispatch_test.cc index 8eacdee93e7c9e..faef35503bea18 100644 --- a/deps/v8/third_party/inspector_protocol/crdtp/dispatch_test.cc +++ b/deps/v8/third_party/inspector_protocol/crdtp/dispatch_test.cc @@ -1,4 +1,4 @@ -// Copyright 2020 The Chromium Authors. All rights reserved. +// Copyright 2020 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. @@ -267,8 +267,8 @@ TEST(DispatchableTest, FaultyCBORTrailingJunk) { Dispatchable dispatchable(SpanFrom(cbor)); EXPECT_FALSE(dispatchable.ok()); EXPECT_EQ(DispatchCode::PARSE_ERROR, dispatchable.DispatchError().Code()); - EXPECT_EQ(56u, trailing_junk_pos); - EXPECT_EQ("CBOR: trailing junk at position 56", + EXPECT_EQ(57u, trailing_junk_pos); + EXPECT_EQ("CBOR: trailing junk at position 57", dispatchable.DispatchError().Message()); } diff --git a/deps/v8/third_party/inspector_protocol/crdtp/error_support.cc b/deps/v8/third_party/inspector_protocol/crdtp/error_support.cc index 6fc6a033d55976..caf3009a7aee2c 100644 --- a/deps/v8/third_party/inspector_protocol/crdtp/error_support.cc +++ b/deps/v8/third_party/inspector_protocol/crdtp/error_support.cc @@ -1,4 +1,4 @@ -// Copyright 2020 The Chromium Authors. All rights reserved. +// Copyright 2020 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/crdtp/error_support.h b/deps/v8/third_party/inspector_protocol/crdtp/error_support.h index 34e2ce21186e1b..45d2ef7d9da3a3 100644 --- a/deps/v8/third_party/inspector_protocol/crdtp/error_support.h +++ b/deps/v8/third_party/inspector_protocol/crdtp/error_support.h @@ -1,4 +1,4 @@ -// Copyright 2020 The Chromium Authors. All rights reserved. +// Copyright 2020 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/crdtp/error_support_test.cc b/deps/v8/third_party/inspector_protocol/crdtp/error_support_test.cc index f7c075d6e7bdef..af86f70fe6a122 100644 --- a/deps/v8/third_party/inspector_protocol/crdtp/error_support_test.cc +++ b/deps/v8/third_party/inspector_protocol/crdtp/error_support_test.cc @@ -1,4 +1,4 @@ -// Copyright 2020 The Chromium Authors. All rights reserved. +// Copyright 2020 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/crdtp/export.h b/deps/v8/third_party/inspector_protocol/crdtp/export.h index a97722e746d02c..b814e16e5e12aa 100644 --- a/deps/v8/third_party/inspector_protocol/crdtp/export.h +++ b/deps/v8/third_party/inspector_protocol/crdtp/export.h @@ -1,4 +1,4 @@ -// Copyright 2019 The Chromium Authors. All rights reserved. +// Copyright 2019 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/crdtp/find_by_first.h b/deps/v8/third_party/inspector_protocol/crdtp/find_by_first.h index ae4224141364cc..2469bd7654435e 100644 --- a/deps/v8/third_party/inspector_protocol/crdtp/find_by_first.h +++ b/deps/v8/third_party/inspector_protocol/crdtp/find_by_first.h @@ -1,4 +1,4 @@ -// Copyright 2020 The Chromium Authors. All rights reserved. +// Copyright 2020 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/crdtp/find_by_first_test.cc b/deps/v8/third_party/inspector_protocol/crdtp/find_by_first_test.cc index 67d5114869e820..b62ec72167fd1b 100644 --- a/deps/v8/third_party/inspector_protocol/crdtp/find_by_first_test.cc +++ b/deps/v8/third_party/inspector_protocol/crdtp/find_by_first_test.cc @@ -1,4 +1,4 @@ -// Copyright 2020 The Chromium Authors. All rights reserved. +// Copyright 2020 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/crdtp/frontend_channel.h b/deps/v8/third_party/inspector_protocol/crdtp/frontend_channel.h index eba8b2acbdaa55..9f12930a1de4df 100644 --- a/deps/v8/third_party/inspector_protocol/crdtp/frontend_channel.h +++ b/deps/v8/third_party/inspector_protocol/crdtp/frontend_channel.h @@ -1,4 +1,4 @@ -// Copyright 2020 The Chromium Authors. All rights reserved. +// Copyright 2020 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/crdtp/glue.h b/deps/v8/third_party/inspector_protocol/crdtp/glue.h index 359c355846c1a0..3a69a5409899b5 100644 --- a/deps/v8/third_party/inspector_protocol/crdtp/glue.h +++ b/deps/v8/third_party/inspector_protocol/crdtp/glue.h @@ -1,4 +1,4 @@ -// Copyright 2019 The Chromium Authors. All rights reserved. +// Copyright 2019 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/crdtp/glue_test.cc b/deps/v8/third_party/inspector_protocol/crdtp/glue_test.cc index 95160352ca7482..2a2c74c20fb6f3 100644 --- a/deps/v8/third_party/inspector_protocol/crdtp/glue_test.cc +++ b/deps/v8/third_party/inspector_protocol/crdtp/glue_test.cc @@ -1,4 +1,4 @@ -// Copyright 2019 The Chromium Authors. All rights reserved. +// Copyright 2019 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/crdtp/json.cc b/deps/v8/third_party/inspector_protocol/crdtp/json.cc index ed27d72e90b27c..47ada8518b29d4 100644 --- a/deps/v8/third_party/inspector_protocol/crdtp/json.cc +++ b/deps/v8/third_party/inspector_protocol/crdtp/json.cc @@ -1,4 +1,4 @@ -// Copyright 2019 The Chromium Authors. All rights reserved. +// Copyright 2019 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/crdtp/json.h b/deps/v8/third_party/inspector_protocol/crdtp/json.h index 63fce408bdf8a3..3600b9038a9972 100644 --- a/deps/v8/third_party/inspector_protocol/crdtp/json.h +++ b/deps/v8/third_party/inspector_protocol/crdtp/json.h @@ -1,4 +1,4 @@ -// Copyright 2019 The Chromium Authors. All rights reserved. +// Copyright 2019 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/crdtp/json_platform.h b/deps/v8/third_party/inspector_protocol/crdtp/json_platform.h index 5a3be592855634..80e57df25f9795 100644 --- a/deps/v8/third_party/inspector_protocol/crdtp/json_platform.h +++ b/deps/v8/third_party/inspector_protocol/crdtp/json_platform.h @@ -1,4 +1,4 @@ -// Copyright 2019 The Chromium Authors. All rights reserved. +// Copyright 2019 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/crdtp/json_platform_v8.cc b/deps/v8/third_party/inspector_protocol/crdtp/json_platform_v8.cc index 497bb708e5885f..891aa3e8d42eaa 100644 --- a/deps/v8/third_party/inspector_protocol/crdtp/json_platform_v8.cc +++ b/deps/v8/third_party/inspector_protocol/crdtp/json_platform_v8.cc @@ -1,4 +1,4 @@ -// Copyright 2019 The V8 Authors. All rights reserved. +// Copyright 2019 The V8 Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/crdtp/json_test.cc b/deps/v8/third_party/inspector_protocol/crdtp/json_test.cc index 4dce7e89bb4d95..7e25888faeb1f0 100644 --- a/deps/v8/third_party/inspector_protocol/crdtp/json_test.cc +++ b/deps/v8/third_party/inspector_protocol/crdtp/json_test.cc @@ -1,4 +1,4 @@ -// Copyright 2018 The Chromium Authors. All rights reserved. +// Copyright 2018 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/crdtp/maybe.h b/deps/v8/third_party/inspector_protocol/crdtp/maybe.h index 97844b65a180f6..a476dd58101ff5 100644 --- a/deps/v8/third_party/inspector_protocol/crdtp/maybe.h +++ b/deps/v8/third_party/inspector_protocol/crdtp/maybe.h @@ -1,4 +1,4 @@ -// Copyright 2019 The Chromium Authors. All rights reserved. +// Copyright 2019 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef V8_CRDTP_MAYBE_H_ diff --git a/deps/v8/third_party/inspector_protocol/crdtp/maybe_test.cc b/deps/v8/third_party/inspector_protocol/crdtp/maybe_test.cc index 8e8690a1c13288..e8b549a5d83fe8 100644 --- a/deps/v8/third_party/inspector_protocol/crdtp/maybe_test.cc +++ b/deps/v8/third_party/inspector_protocol/crdtp/maybe_test.cc @@ -1,4 +1,4 @@ -// Copyright 2019 The Chromium Authors. All rights reserved. +// Copyright 2019 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/crdtp/parser_handler.h b/deps/v8/third_party/inspector_protocol/crdtp/parser_handler.h index 47bbe6d5e8a1e7..c583e35ad440b0 100644 --- a/deps/v8/third_party/inspector_protocol/crdtp/parser_handler.h +++ b/deps/v8/third_party/inspector_protocol/crdtp/parser_handler.h @@ -1,4 +1,4 @@ -// Copyright 2019 The Chromium Authors. All rights reserved. +// Copyright 2019 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/crdtp/protocol_core.cc b/deps/v8/third_party/inspector_protocol/crdtp/protocol_core.cc index 9263c50fd3e479..c432a91a4e8578 100644 --- a/deps/v8/third_party/inspector_protocol/crdtp/protocol_core.cc +++ b/deps/v8/third_party/inspector_protocol/crdtp/protocol_core.cc @@ -1,4 +1,4 @@ -// Copyright 2020 The Chromium Authors. All rights reserved. +// Copyright 2020 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/crdtp/protocol_core.h b/deps/v8/third_party/inspector_protocol/crdtp/protocol_core.h index f61bb5783b1b97..486bf7b2d6b5f8 100644 --- a/deps/v8/third_party/inspector_protocol/crdtp/protocol_core.h +++ b/deps/v8/third_party/inspector_protocol/crdtp/protocol_core.h @@ -1,4 +1,4 @@ -// Copyright 2020 The Chromium Authors. All rights reserved. +// Copyright 2020 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. @@ -9,6 +9,7 @@ #include <memory> #include <string> +#include <tuple> #include <vector> #include "cbor.h" diff --git a/deps/v8/third_party/inspector_protocol/crdtp/protocol_core_test.cc b/deps/v8/third_party/inspector_protocol/crdtp/protocol_core_test.cc index fcf9c2f9919641..9f92b3ae0b30e8 100644 --- a/deps/v8/third_party/inspector_protocol/crdtp/protocol_core_test.cc +++ b/deps/v8/third_party/inspector_protocol/crdtp/protocol_core_test.cc @@ -1,4 +1,4 @@ -// Copyright 2020 The Chromium Authors. All rights reserved. +// Copyright 2020 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/crdtp/serializable.cc b/deps/v8/third_party/inspector_protocol/crdtp/serializable.cc index 7a21ffd35fed0a..8c17569d970afc 100644 --- a/deps/v8/third_party/inspector_protocol/crdtp/serializable.cc +++ b/deps/v8/third_party/inspector_protocol/crdtp/serializable.cc @@ -1,4 +1,4 @@ -// Copyright 2019 The Chromium Authors. All rights reserved. +// Copyright 2019 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/crdtp/serializable.h b/deps/v8/third_party/inspector_protocol/crdtp/serializable.h index 66386711b0d79d..01c1f8b46151ba 100644 --- a/deps/v8/third_party/inspector_protocol/crdtp/serializable.h +++ b/deps/v8/third_party/inspector_protocol/crdtp/serializable.h @@ -1,4 +1,4 @@ -// Copyright 2019 The Chromium Authors. All rights reserved. +// Copyright 2019 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/crdtp/serializable_test.cc b/deps/v8/third_party/inspector_protocol/crdtp/serializable_test.cc index d28ca69edd363c..cea9a2ec06caba 100644 --- a/deps/v8/third_party/inspector_protocol/crdtp/serializable_test.cc +++ b/deps/v8/third_party/inspector_protocol/crdtp/serializable_test.cc @@ -1,4 +1,4 @@ -// Copyright 2019 The Chromium Authors. All rights reserved. +// Copyright 2019 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/crdtp/span.cc b/deps/v8/third_party/inspector_protocol/crdtp/span.cc index 5953ba128755e7..7f6e7ec4019dd3 100644 --- a/deps/v8/third_party/inspector_protocol/crdtp/span.cc +++ b/deps/v8/third_party/inspector_protocol/crdtp/span.cc @@ -1,4 +1,4 @@ -// Copyright 2020 The Chromium Authors. All rights reserved. +// Copyright 2020 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/crdtp/span.h b/deps/v8/third_party/inspector_protocol/crdtp/span.h index ea3a6f4962f045..8f132636f3b317 100644 --- a/deps/v8/third_party/inspector_protocol/crdtp/span.h +++ b/deps/v8/third_party/inspector_protocol/crdtp/span.h @@ -1,4 +1,4 @@ -// Copyright 2019 The Chromium Authors. All rights reserved. +// Copyright 2019 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/crdtp/span_test.cc b/deps/v8/third_party/inspector_protocol/crdtp/span_test.cc index 0f31e5a6bab760..0d4fd79760ff96 100644 --- a/deps/v8/third_party/inspector_protocol/crdtp/span_test.cc +++ b/deps/v8/third_party/inspector_protocol/crdtp/span_test.cc @@ -1,4 +1,4 @@ -// Copyright 2018 The Chromium Authors. All rights reserved. +// Copyright 2018 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/crdtp/status.cc b/deps/v8/third_party/inspector_protocol/crdtp/status.cc index 8d04e8e17f36a6..703eacc3286e6e 100644 --- a/deps/v8/third_party/inspector_protocol/crdtp/status.cc +++ b/deps/v8/third_party/inspector_protocol/crdtp/status.cc @@ -1,4 +1,4 @@ -// Copyright 2019 The Chromium Authors. All rights reserved. +// Copyright 2019 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/crdtp/status.h b/deps/v8/third_party/inspector_protocol/crdtp/status.h index 2439b1bcbed3ac..10745f83e024be 100644 --- a/deps/v8/third_party/inspector_protocol/crdtp/status.h +++ b/deps/v8/third_party/inspector_protocol/crdtp/status.h @@ -1,4 +1,4 @@ -// Copyright 2019 The Chromium Authors. All rights reserved. +// Copyright 2019 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/crdtp/status_test.cc b/deps/v8/third_party/inspector_protocol/crdtp/status_test.cc index 420686c72fe4a7..8fdfb6282357be 100644 --- a/deps/v8/third_party/inspector_protocol/crdtp/status_test.cc +++ b/deps/v8/third_party/inspector_protocol/crdtp/status_test.cc @@ -1,4 +1,4 @@ -// Copyright 2018 The Chromium Authors. All rights reserved. +// Copyright 2018 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/crdtp/status_test_support.cc b/deps/v8/third_party/inspector_protocol/crdtp/status_test_support.cc index 00362db24b6ede..c36bbae9557961 100644 --- a/deps/v8/third_party/inspector_protocol/crdtp/status_test_support.cc +++ b/deps/v8/third_party/inspector_protocol/crdtp/status_test_support.cc @@ -1,4 +1,4 @@ -// Copyright 2020 The Chromium Authors. All rights reserved. +// Copyright 2020 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/crdtp/status_test_support.h b/deps/v8/third_party/inspector_protocol/crdtp/status_test_support.h index e9eef19a6852e0..3ae8eaa3de3649 100644 --- a/deps/v8/third_party/inspector_protocol/crdtp/status_test_support.h +++ b/deps/v8/third_party/inspector_protocol/crdtp/status_test_support.h @@ -1,4 +1,4 @@ -// Copyright 2020 The Chromium Authors. All rights reserved. +// Copyright 2020 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/crdtp/test_platform.h b/deps/v8/third_party/inspector_protocol/crdtp/test_platform.h index c7dc7a8a7aa7b4..e6698f252e2097 100644 --- a/deps/v8/third_party/inspector_protocol/crdtp/test_platform.h +++ b/deps/v8/third_party/inspector_protocol/crdtp/test_platform.h @@ -1,4 +1,4 @@ -// Copyright 2019 The V8 Authors. All rights reserved. +// Copyright 2019 The V8 Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/crdtp/test_platform_v8.cc b/deps/v8/third_party/inspector_protocol/crdtp/test_platform_v8.cc index 51752772edb85e..c9d89eaa42f1d8 100644 --- a/deps/v8/third_party/inspector_protocol/crdtp/test_platform_v8.cc +++ b/deps/v8/third_party/inspector_protocol/crdtp/test_platform_v8.cc @@ -1,4 +1,4 @@ -// Copyright 2019 The V8 Authors. All rights reserved. +// Copyright 2019 The V8 Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/inspector_protocol.gni b/deps/v8/third_party/inspector_protocol/inspector_protocol.gni index 1e2a30cf5b77fa..8002f0794c5251 100644 --- a/deps/v8/third_party/inspector_protocol/inspector_protocol.gni +++ b/deps/v8/third_party/inspector_protocol/inspector_protocol.gni @@ -1,4 +1,4 @@ -# Copyright 2016 The Chromium Authors. All rights reserved. +# Copyright 2016 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/lib/Forward_h.template b/deps/v8/third_party/inspector_protocol/lib/Forward_h.template index 4a1a6d707da683..4de0eeb803174b 100644 --- a/deps/v8/third_party/inspector_protocol/lib/Forward_h.template +++ b/deps/v8/third_party/inspector_protocol/lib/Forward_h.template @@ -1,6 +1,6 @@ // This file is generated by Forward_h.template. -// Copyright 2016 The Chromium Authors. All rights reserved. +// Copyright 2016 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/lib/Object_cpp.template b/deps/v8/third_party/inspector_protocol/lib/Object_cpp.template index b639b1bb776ad0..2b08cee303d335 100644 --- a/deps/v8/third_party/inspector_protocol/lib/Object_cpp.template +++ b/deps/v8/third_party/inspector_protocol/lib/Object_cpp.template @@ -1,6 +1,6 @@ // This file is generated by Object_cpp.template. -// Copyright 2016 The Chromium Authors. All rights reserved. +// Copyright 2016 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/lib/Object_h.template b/deps/v8/third_party/inspector_protocol/lib/Object_h.template index f0dce5d1b7f198..fe4df6b9cc7d5e 100644 --- a/deps/v8/third_party/inspector_protocol/lib/Object_h.template +++ b/deps/v8/third_party/inspector_protocol/lib/Object_h.template @@ -1,6 +1,6 @@ // This file is generated by Object_h.template. -// Copyright 2016 The Chromium Authors. All rights reserved. +// Copyright 2016 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/lib/Protocol_cpp.template b/deps/v8/third_party/inspector_protocol/lib/Protocol_cpp.template index 88303a27ab9e7e..84bd6e70d6b200 100644 --- a/deps/v8/third_party/inspector_protocol/lib/Protocol_cpp.template +++ b/deps/v8/third_party/inspector_protocol/lib/Protocol_cpp.template @@ -1,6 +1,6 @@ // This file is generated by Protocol_cpp.template. -// Copyright 2016 The Chromium Authors. All rights reserved. +// Copyright 2016 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/lib/ValueConversions_cpp.template b/deps/v8/third_party/inspector_protocol/lib/ValueConversions_cpp.template index 998808be6c6dc3..d2ab41a2dbab6a 100644 --- a/deps/v8/third_party/inspector_protocol/lib/ValueConversions_cpp.template +++ b/deps/v8/third_party/inspector_protocol/lib/ValueConversions_cpp.template @@ -1,6 +1,6 @@ // This file is generated by ValueConversions_cpp.template. -// Copyright 2020 The Chromium Authors. All rights reserved. +// Copyright 2020 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/lib/ValueConversions_h.template b/deps/v8/third_party/inspector_protocol/lib/ValueConversions_h.template index 0fac003d90cbc1..bf05014f10154f 100644 --- a/deps/v8/third_party/inspector_protocol/lib/ValueConversions_h.template +++ b/deps/v8/third_party/inspector_protocol/lib/ValueConversions_h.template @@ -1,6 +1,6 @@ // This file is generated by ValueConversions_h.template. -// Copyright 2016 The Chromium Authors. All rights reserved. +// Copyright 2016 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/lib/Values_cpp.template b/deps/v8/third_party/inspector_protocol/lib/Values_cpp.template index ac35475c3865af..dbc51694db2fa6 100644 --- a/deps/v8/third_party/inspector_protocol/lib/Values_cpp.template +++ b/deps/v8/third_party/inspector_protocol/lib/Values_cpp.template @@ -1,6 +1,6 @@ // This file is generated by Values_cpp.template. -// Copyright 2016 The Chromium Authors. All rights reserved. +// Copyright 2016 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/lib/Values_h.template b/deps/v8/third_party/inspector_protocol/lib/Values_h.template index 53087c914ca8c9..30cd0e16323517 100644 --- a/deps/v8/third_party/inspector_protocol/lib/Values_h.template +++ b/deps/v8/third_party/inspector_protocol/lib/Values_h.template @@ -1,6 +1,6 @@ // This file is generated by Values_h.template. -// Copyright 2016 The Chromium Authors. All rights reserved. +// Copyright 2016 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/pdl.py b/deps/v8/third_party/inspector_protocol/pdl.py index 9a9fec9898823d..6b448c07443c7e 100644 --- a/deps/v8/third_party/inspector_protocol/pdl.py +++ b/deps/v8/third_party/inspector_protocol/pdl.py @@ -1,4 +1,4 @@ -# Copyright 2018 The Chromium Authors. All rights reserved. +# Copyright 2018 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/roll.py b/deps/v8/third_party/inspector_protocol/roll.py index 1b7bc54e3695d3..0e3520ad70f430 100755 --- a/deps/v8/third_party/inspector_protocol/roll.py +++ b/deps/v8/third_party/inspector_protocol/roll.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copcright 2019 The Chromium Authors. All rights reserved. +# Copyright 2019 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/templates/Exported_h.template b/deps/v8/third_party/inspector_protocol/templates/Exported_h.template index 6481eef50c6889..f00875ac77a2a1 100644 --- a/deps/v8/third_party/inspector_protocol/templates/Exported_h.template +++ b/deps/v8/third_party/inspector_protocol/templates/Exported_h.template @@ -1,6 +1,6 @@ // This file is generated by Exported_h.template. -// Copyright (c) 2016 The Chromium Authors. All rights reserved. +// Copyright 2016 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/templates/Imported_h.template b/deps/v8/third_party/inspector_protocol/templates/Imported_h.template index 47f27ac1081a53..5161d27351be47 100644 --- a/deps/v8/third_party/inspector_protocol/templates/Imported_h.template +++ b/deps/v8/third_party/inspector_protocol/templates/Imported_h.template @@ -1,6 +1,6 @@ // This file is generated by Imported_h.template. -// Copyright (c) 2016 The Chromium Authors. All rights reserved. +// Copyright 2016 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/templates/TypeBuilder_cpp.template b/deps/v8/third_party/inspector_protocol/templates/TypeBuilder_cpp.template index 1ddd98dc593fc6..42e832133b1e7c 100644 --- a/deps/v8/third_party/inspector_protocol/templates/TypeBuilder_cpp.template +++ b/deps/v8/third_party/inspector_protocol/templates/TypeBuilder_cpp.template @@ -1,6 +1,6 @@ // This file is generated by TypeBuilder_cpp.template. -// Copyright (c) 2016 The Chromium Authors. All rights reserved. +// Copyright 2016 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/inspector_protocol/templates/TypeBuilder_h.template b/deps/v8/third_party/inspector_protocol/templates/TypeBuilder_h.template index c2e21a2d31eea8..9b42833014e57d 100644 --- a/deps/v8/third_party/inspector_protocol/templates/TypeBuilder_h.template +++ b/deps/v8/third_party/inspector_protocol/templates/TypeBuilder_h.template @@ -1,6 +1,6 @@ // This file is generated by TypeBuilder_h.template. -// Copyright (c) 2016 The Chromium Authors. All rights reserved. +// Copyright 2016 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/zlib/BUILD.gn b/deps/v8/third_party/zlib/BUILD.gn index ee7483e9ef6a4f..b85067a12bd86d 100644 --- a/deps/v8/third_party/zlib/BUILD.gn +++ b/deps/v8/third_party/zlib/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Copyright 2013 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -144,7 +144,7 @@ if (use_arm_neon_optimizations) { if (!is_win && !is_clang) { assert(!use_thin_lto, "ThinLTO fails mixing different module-level targets") - cflags_c = [ "-march=armv8-a+crc" ] + cflags_c = [ "-march=armv8-a+aes+crc" ] } sources = [ diff --git a/deps/v8/third_party/zlib/adler32_simd.c b/deps/v8/third_party/zlib/adler32_simd.c index 1354915cc099ad..58966eecf0b800 100644 --- a/deps/v8/third_party/zlib/adler32_simd.c +++ b/deps/v8/third_party/zlib/adler32_simd.c @@ -1,6 +1,6 @@ /* adler32_simd.c * - * Copyright 2017 The Chromium Authors. All rights reserved. + * Copyright 2017 The Chromium Authors * Use of this source code is governed by a BSD-style license that can be * found in the Chromium source repository LICENSE file. * diff --git a/deps/v8/third_party/zlib/adler32_simd.h b/deps/v8/third_party/zlib/adler32_simd.h index 52bb14d16751c8..0b2361a5174f7a 100644 --- a/deps/v8/third_party/zlib/adler32_simd.h +++ b/deps/v8/third_party/zlib/adler32_simd.h @@ -1,6 +1,6 @@ /* adler32_simd.h * - * Copyright 2017 The Chromium Authors. All rights reserved. + * Copyright 2017 The Chromium Authors * Use of this source code is governed by a BSD-style license that can be * found in the Chromium source repository LICENSE file. */ diff --git a/deps/v8/third_party/zlib/chromeconf.h b/deps/v8/third_party/zlib/chromeconf.h index 7c2241aac4dc40..5b91c86442105a 100644 --- a/deps/v8/third_party/zlib/chromeconf.h +++ b/deps/v8/third_party/zlib/chromeconf.h @@ -1,4 +1,4 @@ -/* Copyright 2017 The Chromium Authors. All rights reserved. +/* Copyright 2017 The Chromium Authors * Use of this source code is governed by a BSD-style license that can be * found in the LICENSE file. */ diff --git a/deps/v8/third_party/zlib/contrib/optimizations/chunkcopy.h b/deps/v8/third_party/zlib/contrib/optimizations/chunkcopy.h index 9c0b7cb06c2dcc..88022ef5fc2b05 100644 --- a/deps/v8/third_party/zlib/contrib/optimizations/chunkcopy.h +++ b/deps/v8/third_party/zlib/contrib/optimizations/chunkcopy.h @@ -1,6 +1,6 @@ /* chunkcopy.h -- fast chunk copy and set operations * Copyright (C) 2017 ARM, Inc. - * Copyright 2017 The Chromium Authors. All rights reserved. + * Copyright 2017 The Chromium Authors * Use of this source code is governed by a BSD-style license that can be * found in the Chromium source repository LICENSE file. */ diff --git a/deps/v8/third_party/zlib/contrib/optimizations/insert_string.h b/deps/v8/third_party/zlib/contrib/optimizations/insert_string.h index a7f24a340812cc..2a04f699349435 100644 --- a/deps/v8/third_party/zlib/contrib/optimizations/insert_string.h +++ b/deps/v8/third_party/zlib/contrib/optimizations/insert_string.h @@ -1,6 +1,6 @@ /* insert_string.h * - * Copyright 2019 The Chromium Authors. All rights reserved. + * Copyright 2019 The Chromium Authors * Use of this source code is governed by a BSD-style license that can be * found in the Chromium source repository LICENSE file. */ diff --git a/deps/v8/third_party/zlib/cpu_features.c b/deps/v8/third_party/zlib/cpu_features.c index 9391d7b4febcec..877d5f23d0ceae 100644 --- a/deps/v8/third_party/zlib/cpu_features.c +++ b/deps/v8/third_party/zlib/cpu_features.c @@ -1,6 +1,6 @@ /* cpu_features.c -- Processor features detection. * - * Copyright 2018 The Chromium Authors. All rights reserved. + * Copyright 2018 The Chromium Authors * Use of this source code is governed by a BSD-style license that can be * found in the Chromium source repository LICENSE file. */ diff --git a/deps/v8/third_party/zlib/cpu_features.h b/deps/v8/third_party/zlib/cpu_features.h index c7b15c5597623f..279246c859e87e 100644 --- a/deps/v8/third_party/zlib/cpu_features.h +++ b/deps/v8/third_party/zlib/cpu_features.h @@ -1,6 +1,6 @@ /* cpu_features.h -- Processor features detection. * - * Copyright 2018 The Chromium Authors. All rights reserved. + * Copyright 2018 The Chromium Authors * Use of this source code is governed by a BSD-style license that can be * found in the Chromium source repository LICENSE file. */ diff --git a/deps/v8/third_party/zlib/crc32_simd.c b/deps/v8/third_party/zlib/crc32_simd.c index 14a8534220538c..d80beba39c0397 100644 --- a/deps/v8/third_party/zlib/crc32_simd.c +++ b/deps/v8/third_party/zlib/crc32_simd.c @@ -1,6 +1,6 @@ /* crc32_simd.c * - * Copyright 2017 The Chromium Authors. All rights reserved. + * Copyright 2017 The Chromium Authors * Use of this source code is governed by a BSD-style license that can be * found in the Chromium source repository LICENSE file. */ @@ -160,6 +160,13 @@ uint32_t ZLIB_INTERNAL crc32_sse42_simd_( /* SSE4.2+PCLMUL */ */ #if defined(__clang__) +/* We need some extra types for using PMULL. + */ +#if defined(__aarch64__) +#include <arm_neon.h> +#include <arm_acle.h> +#endif + /* CRC32 intrinsics are #ifdef'ed out of arm_acle.h unless we build with an * armv8 target, which is incompatible with ThinLTO optimizations on Android. * (Namely, mixing and matching different module-level targets makes ThinLTO @@ -175,6 +182,10 @@ uint32_t ZLIB_INTERNAL crc32_sse42_simd_( /* SSE4.2+PCLMUL */ * NOTE: clang currently complains that "'+soft-float-abi' is not a recognized * feature for this target (ignoring feature)." This appears to be a harmless * bug in clang. + * + * These definitions must appear *after* including arm_acle.h otherwise that + * header may end up defining functions named __builtin_arm_crc32* that call + * themselves, creating an infinite loop when the intrinsic is called. */ /* XXX: Cannot hook into builtins with XCode for arm64. */ #if !defined(ARMV8_OS_MACOS) @@ -184,13 +195,6 @@ uint32_t ZLIB_INTERNAL crc32_sse42_simd_( /* SSE4.2+PCLMUL */ #define __crc32cw __builtin_arm_crc32cw #endif -/* We need some extra types for using PMULL. - */ -#if defined(__aarch64__) -#include <arm_neon.h> -#include <arm_acle.h> -#endif - #if defined(__aarch64__) #define TARGET_ARMV8_WITH_CRC __attribute__((target("aes,crc"))) #else // !defined(__aarch64__) @@ -202,6 +206,7 @@ uint32_t ZLIB_INTERNAL crc32_sse42_simd_( /* SSE4.2+PCLMUL */ * allowed. We can just include arm_acle.h. */ #include <arm_acle.h> +#include <arm_neon.h> #define TARGET_ARMV8_WITH_CRC #else // !defined(__GNUC__) && !defined(_aarch64__) #error ARM CRC32 SIMD extensions only supported for Clang and GCC diff --git a/deps/v8/third_party/zlib/crc32_simd.h b/deps/v8/third_party/zlib/crc32_simd.h index 6985cbb1cd81d5..c0346dc3d903e3 100644 --- a/deps/v8/third_party/zlib/crc32_simd.h +++ b/deps/v8/third_party/zlib/crc32_simd.h @@ -1,6 +1,6 @@ /* crc32_simd.h * - * Copyright 2017 The Chromium Authors. All rights reserved. + * Copyright 2017 The Chromium Authors * Use of this source code is governed by a BSD-style license that can be * found in the Chromium source repository LICENSE file. */ diff --git a/deps/v8/third_party/zlib/deflate.h b/deps/v8/third_party/zlib/deflate.h index 1407931971adf1..ad3cef7e45da4f 100644 --- a/deps/v8/third_party/zlib/deflate.h +++ b/deps/v8/third_party/zlib/deflate.h @@ -151,11 +151,6 @@ typedef struct internal_state { * hash_shift * MIN_MATCH >= hash_bits */ - uInt chromium_zlib_hash; - /* 0 if Rabin-Karp rolling hash is enabled, non-zero if chromium zlib - * hash is enabled. - */ - long block_start; /* Window position at the beginning of the current output block. Gets * negative when the window is moved backwards. @@ -273,6 +268,11 @@ typedef struct internal_state { * updated to the new high water mark. */ + uInt chromium_zlib_hash; + /* 0 if Rabin-Karp rolling hash is enabled, non-zero if chromium zlib + * hash is enabled. + */ + } FAR deflate_state; /* Output a byte on the stream. diff --git a/deps/v8/third_party/zlib/google/BUILD.gn b/deps/v8/third_party/zlib/google/BUILD.gn index e996b167dba856..35ba1daf2dfea3 100644 --- a/deps/v8/third_party/zlib/google/BUILD.gn +++ b/deps/v8/third_party/zlib/google/BUILD.gn @@ -1,4 +1,4 @@ -# Copyright 2017 The Chromium Authors. All rights reserved. +# Copyright 2017 The Chromium Authors # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. diff --git a/deps/v8/third_party/zlib/google/compression_utils.cc b/deps/v8/third_party/zlib/google/compression_utils.cc index 0c4cf08108b599..279ea0732980c3 100644 --- a/deps/v8/third_party/zlib/google/compression_utils.cc +++ b/deps/v8/third_party/zlib/google/compression_utils.cc @@ -1,4 +1,4 @@ -// Copyright 2014 The Chromium Authors. All rights reserved. +// Copyright 2014 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/zlib/google/compression_utils.h b/deps/v8/third_party/zlib/google/compression_utils.h index cca47be1efef6a..ea399816f60351 100644 --- a/deps/v8/third_party/zlib/google/compression_utils.h +++ b/deps/v8/third_party/zlib/google/compression_utils.h @@ -1,4 +1,4 @@ -// Copyright 2014 The Chromium Authors. All rights reserved. +// Copyright 2014 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/zlib/google/compression_utils_portable.cc b/deps/v8/third_party/zlib/google/compression_utils_portable.cc index 331e41e1257253..49d6bfe9ea6ed4 100644 --- a/deps/v8/third_party/zlib/google/compression_utils_portable.cc +++ b/deps/v8/third_party/zlib/google/compression_utils_portable.cc @@ -1,6 +1,6 @@ /* compression_utils_portable.cc * - * Copyright 2019 The Chromium Authors. All rights reserved. + * Copyright 2019 The Chromium Authors * Use of this source code is governed by a BSD-style license that can be * found in the Chromium source repository LICENSE file. */ diff --git a/deps/v8/third_party/zlib/google/compression_utils_portable.h b/deps/v8/third_party/zlib/google/compression_utils_portable.h index c1f377571fbba0..92b033e889f7f8 100644 --- a/deps/v8/third_party/zlib/google/compression_utils_portable.h +++ b/deps/v8/third_party/zlib/google/compression_utils_portable.h @@ -1,6 +1,6 @@ /* compression_utils_portable.h * - * Copyright 2019 The Chromium Authors. All rights reserved. + * Copyright 2019 The Chromium Authors * Use of this source code is governed by a BSD-style license that can be * found in the Chromium source repository LICENSE file. */ diff --git a/deps/v8/third_party/zlib/google/compression_utils_unittest.cc b/deps/v8/third_party/zlib/google/compression_utils_unittest.cc index 76572e5a47eac4..8e24387ee341da 100644 --- a/deps/v8/third_party/zlib/google/compression_utils_unittest.cc +++ b/deps/v8/third_party/zlib/google/compression_utils_unittest.cc @@ -1,4 +1,4 @@ -// Copyright 2014 The Chromium Authors. All rights reserved. +// Copyright 2014 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/zlib/google/redact.h b/deps/v8/third_party/zlib/google/redact.h index ea7da16a52751c..df6bcafc2af8dd 100644 --- a/deps/v8/third_party/zlib/google/redact.h +++ b/deps/v8/third_party/zlib/google/redact.h @@ -1,4 +1,4 @@ -// Copyright (c) 2022 The Chromium Authors. All rights reserved. +// Copyright 2022 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef THIRD_PARTY_ZLIB_GOOGLE_REDACT_H_ diff --git a/deps/v8/third_party/zlib/google/zip.cc b/deps/v8/third_party/zlib/google/zip.cc index 7d5404b4bf707e..490dcee34e1e42 100644 --- a/deps/v8/third_party/zlib/google/zip.cc +++ b/deps/v8/third_party/zlib/google/zip.cc @@ -1,4 +1,4 @@ -// Copyright (c) 2012 The Chromium Authors. All rights reserved. +// Copyright 2012 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/zlib/google/zip.h b/deps/v8/third_party/zlib/google/zip.h index 0d9af0cbd935e0..e3036c809c2376 100644 --- a/deps/v8/third_party/zlib/google/zip.h +++ b/deps/v8/third_party/zlib/google/zip.h @@ -1,4 +1,4 @@ -// Copyright (c) 2011 The Chromium Authors. All rights reserved. +// Copyright 2011 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/zlib/google/zip_internal.cc b/deps/v8/third_party/zlib/google/zip_internal.cc index 1adf2e6d0e8fef..e65d7ce6b5f464 100644 --- a/deps/v8/third_party/zlib/google/zip_internal.cc +++ b/deps/v8/third_party/zlib/google/zip_internal.cc @@ -1,4 +1,4 @@ -// Copyright (c) 2011 The Chromium Authors. All rights reserved. +// Copyright 2011 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/zlib/google/zip_internal.h b/deps/v8/third_party/zlib/google/zip_internal.h index 92833fa1702130..f107d7fe883541 100644 --- a/deps/v8/third_party/zlib/google/zip_internal.h +++ b/deps/v8/third_party/zlib/google/zip_internal.h @@ -1,4 +1,4 @@ -// Copyright (c) 2011 The Chromium Authors. All rights reserved. +// Copyright 2011 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/zlib/google/zip_reader.cc b/deps/v8/third_party/zlib/google/zip_reader.cc index 075ba70aa8aca4..e97027a0bbb2fb 100644 --- a/deps/v8/third_party/zlib/google/zip_reader.cc +++ b/deps/v8/third_party/zlib/google/zip_reader.cc @@ -1,4 +1,4 @@ -// Copyright (c) 2012 The Chromium Authors. All rights reserved. +// Copyright 2012 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/zlib/google/zip_reader.h b/deps/v8/third_party/zlib/google/zip_reader.h index 286ddfd9bd4d13..48244c8238368e 100644 --- a/deps/v8/third_party/zlib/google/zip_reader.h +++ b/deps/v8/third_party/zlib/google/zip_reader.h @@ -1,4 +1,4 @@ -// Copyright (c) 2011 The Chromium Authors. All rights reserved. +// Copyright 2011 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef THIRD_PARTY_ZLIB_GOOGLE_ZIP_READER_H_ diff --git a/deps/v8/third_party/zlib/google/zip_reader_unittest.cc b/deps/v8/third_party/zlib/google/zip_reader_unittest.cc index 31dceaccad30de..52dab200a3494c 100644 --- a/deps/v8/third_party/zlib/google/zip_reader_unittest.cc +++ b/deps/v8/third_party/zlib/google/zip_reader_unittest.cc @@ -1,4 +1,4 @@ -// Copyright (c) 2011 The Chromium Authors. All rights reserved. +// Copyright 2011 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/zlib/google/zip_unittest.cc b/deps/v8/third_party/zlib/google/zip_unittest.cc index 435d7b02ee2344..b639e8e8437799 100644 --- a/deps/v8/third_party/zlib/google/zip_unittest.cc +++ b/deps/v8/third_party/zlib/google/zip_unittest.cc @@ -1,4 +1,4 @@ -// Copyright (c) 2011 The Chromium Authors. All rights reserved. +// Copyright 2011 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. @@ -1358,10 +1358,10 @@ TEST_F(ZipTest, NestedZip) { // performing this test (android-asan, android-11-x86-rel, // android-marshmallow-x86-rel-non-cq). // Some Mac, Linux and Debug (dbg) bots tend to time out when performing this -// test (crbug.com/1299736, crbug.com/1300448). +// test (crbug.com/1299736, crbug.com/1300448, crbug.com/1369958). #if defined(THREAD_SANITIZER) || BUILDFLAG(IS_FUCHSIA) || \ BUILDFLAG(IS_ANDROID) || BUILDFLAG(IS_MAC) || BUILDFLAG(IS_LINUX) || \ - BUILDFLAG(IS_CHROMEOS_LACROS) || !defined(NDEBUG) + BUILDFLAG(IS_CHROMEOS) || !defined(NDEBUG) TEST_F(ZipTest, DISABLED_BigFile) { #else TEST_F(ZipTest, BigFile) { diff --git a/deps/v8/third_party/zlib/google/zip_writer.cc b/deps/v8/third_party/zlib/google/zip_writer.cc index e3f677fe328082..31161ae86c3b7a 100644 --- a/deps/v8/third_party/zlib/google/zip_writer.cc +++ b/deps/v8/third_party/zlib/google/zip_writer.cc @@ -1,4 +1,4 @@ -// Copyright 2017 The Chromium Authors. All rights reserved. +// Copyright 2017 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/zlib/google/zip_writer.h b/deps/v8/third_party/zlib/google/zip_writer.h index aa3c965d911599..dd109293da0a4f 100644 --- a/deps/v8/third_party/zlib/google/zip_writer.h +++ b/deps/v8/third_party/zlib/google/zip_writer.h @@ -1,4 +1,4 @@ -// Copyright 2017 The Chromium Authors. All rights reserved. +// Copyright 2017 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. diff --git a/deps/v8/third_party/zlib/slide_hash_simd.h b/deps/v8/third_party/zlib/slide_hash_simd.h index 6f715bcde176a9..1000b774ccb60a 100644 --- a/deps/v8/third_party/zlib/slide_hash_simd.h +++ b/deps/v8/third_party/zlib/slide_hash_simd.h @@ -1,6 +1,6 @@ /* slide_hash_simd.h * - * Copyright 2022 The Chromium Authors. All rights reserved. + * Copyright 2022 The Chromium Authors * Use of this source code is governed by a BSD-style license that can be * found in the Chromium source repository LICENSE file. */ diff --git a/deps/v8/tools/builtins-pgo/arm.profile b/deps/v8/tools/builtins-pgo/arm.profile new file mode 100644 index 00000000000000..8c6d20982ca6b1 --- /dev/null +++ b/deps/v8/tools/builtins-pgo/arm.profile @@ -0,0 +1,6359 @@ +block_hint,RecordWriteSaveFP,6,7,1 +block_hint,RecordWriteSaveFP,19,20,1 +block_hint,RecordWriteSaveFP,9,10,1 +block_hint,RecordWriteSaveFP,32,33,0 +block_hint,RecordWriteSaveFP,36,37,1 +block_hint,RecordWriteSaveFP,34,35,1 +block_hint,RecordWriteSaveFP,25,26,0 +block_hint,RecordWriteSaveFP,15,16,0 +block_hint,RecordWriteSaveFP,17,18,1 +block_hint,RecordWriteIgnoreFP,6,7,1 +block_hint,RecordWriteIgnoreFP,19,20,1 +block_hint,RecordWriteIgnoreFP,9,10,1 +block_hint,RecordWriteIgnoreFP,25,26,0 +block_hint,RecordWriteIgnoreFP,15,16,0 +block_hint,Call_ReceiverIsNullOrUndefined_Baseline_Compact,19,20,1 +block_hint,Call_ReceiverIsNullOrUndefined_Baseline_Compact,43,44,0 +block_hint,Call_ReceiverIsNullOrUndefined_Baseline_Compact,83,84,0 +block_hint,Call_ReceiverIsNullOrUndefined_Baseline_Compact,80,81,0 +block_hint,Call_ReceiverIsNullOrUndefined_Baseline_Compact,63,64,0 +block_hint,Call_ReceiverIsNullOrUndefined_Baseline_Compact,35,36,0 +block_hint,Call_ReceiverIsNullOrUndefined_Baseline_Compact,67,68,1 +block_hint,Call_ReceiverIsNullOrUndefined_Baseline_Compact,50,51,0 +block_hint,Call_ReceiverIsNullOrUndefined_Baseline_Compact,29,30,1 +block_hint,Call_ReceiverIsNullOrUndefined_Baseline_Compact,56,57,0 +block_hint,Call_ReceiverIsNullOrUndefined_Baseline_Compact,7,8,1 +block_hint,Call_ReceiverIsNullOrUndefined_Baseline_Compact,61,62,0 +block_hint,Call_ReceiverIsNullOrUndefined_Baseline_Compact,14,15,1 +block_hint,Call_ReceiverIsNullOrUndefined_Baseline_Compact,16,17,0 +block_hint,Call_ReceiverIsNullOrUndefined_Baseline_Compact,69,70,0 +block_hint,Call_ReceiverIsNullOrUndefined_Baseline_Compact,54,55,1 +block_hint,Call_ReceiverIsNotNullOrUndefined_Baseline_Compact,19,20,1 +block_hint,Call_ReceiverIsNotNullOrUndefined_Baseline_Compact,83,84,0 +block_hint,Call_ReceiverIsNotNullOrUndefined_Baseline_Compact,80,81,0 +block_hint,Call_ReceiverIsNotNullOrUndefined_Baseline_Compact,63,64,0 +block_hint,Call_ReceiverIsNotNullOrUndefined_Baseline_Compact,5,6,1 +block_hint,Call_ReceiverIsNotNullOrUndefined_Baseline_Compact,46,47,1 +block_hint,Call_ReceiverIsNotNullOrUndefined_Baseline_Compact,25,26,1 +block_hint,Call_ReceiverIsNotNullOrUndefined_Baseline_Compact,67,68,1 +block_hint,Call_ReceiverIsNotNullOrUndefined_Baseline_Compact,50,51,0 +block_hint,Call_ReceiverIsNotNullOrUndefined_Baseline_Compact,29,30,1 +block_hint,Call_ReceiverIsNotNullOrUndefined_Baseline_Compact,7,8,1 +block_hint,Call_ReceiverIsNotNullOrUndefined_Baseline_Compact,61,62,0 +block_hint,Call_ReceiverIsNotNullOrUndefined_Baseline_Compact,14,15,1 +block_hint,Call_ReceiverIsNotNullOrUndefined_Baseline_Compact,16,17,0 +block_hint,Call_ReceiverIsNotNullOrUndefined_Baseline_Compact,69,70,0 +block_hint,Call_ReceiverIsNotNullOrUndefined_Baseline_Compact,54,55,1 +block_hint,Call_ReceiverIsAny_Baseline_Compact,19,20,1 +block_hint,Call_ReceiverIsAny_Baseline_Compact,21,22,1 +block_hint,CallProxy,38,39,1 +block_hint,CallProxy,22,23,1 +block_hint,CallProxy,18,19,1 +block_hint,CallWithSpread,51,52,1 +block_hint,CallWithSpread,31,32,1 +block_hint,CallWithSpread,6,7,1 +block_hint,CallWithSpread,57,58,1 +block_hint,CallWithSpread,59,60,0 +block_hint,CallWithSpread,43,44,0 +block_hint,CallWithSpread,45,46,1 +block_hint,CallWithSpread,33,34,0 +block_hint,CallWithSpread,38,39,1 +block_hint,CallWithSpread,40,41,1 +block_hint,CallWithSpread,22,23,1 +block_hint,CallWithSpread,24,25,1 +block_hint,CallWithSpread,26,27,0 +block_hint,CallWithSpread,53,54,1 +block_hint,CallWithSpread,47,48,1 +block_hint,CallWithSpread,9,10,0 +block_hint,CallWithSpread,11,12,1 +block_hint,CallWithSpread,13,14,0 +block_hint,CallWithSpread,49,50,0 +block_hint,CallWithSpread,55,56,1 +block_hint,CallWithSpread,15,16,1 +block_hint,CallWithSpread_Baseline,115,116,1 +block_hint,CallWithSpread_Baseline,60,61,0 +block_hint,CallWithSpread_Baseline,113,114,0 +block_hint,CallWithSpread_Baseline,106,107,0 +block_hint,CallWithSpread_Baseline,81,82,0 +block_hint,CallWithSpread_Baseline,48,49,0 +block_hint,CallWithSpread_Baseline,138,139,1 +block_hint,CallWithSpread_Baseline,130,131,0 +block_hint,CallWithSpread_Baseline,119,120,1 +block_hint,CallWithSpread_Baseline,85,86,1 +block_hint,CallWithSpread_Baseline,12,13,1 +block_hint,CallWithSpread_Baseline,100,101,1 +block_hint,CallWithSpread_Baseline,102,103,0 +block_hint,CallWithSpread_Baseline,68,69,0 +block_hint,CallWithSpread_Baseline,33,34,1 +block_hint,CallWithSpread_Baseline,53,54,0 +block_hint,CallWithSpread_Baseline,63,64,1 +block_hint,CallWithSpread_Baseline,65,66,1 +block_hint,CallWithSpread_Baseline,37,38,1 +block_hint,CallWithArrayLike,28,29,1 +block_hint,CallWithArrayLike,30,31,1 +block_hint,CallWithArrayLike,32,33,1 +block_hint,CallWithArrayLike,6,7,1 +block_hint,CallWithArrayLike,8,9,1 +block_hint,CallWithArrayLike,61,62,0 +block_hint,CallWithArrayLike,53,54,1 +block_hint,CallWithArrayLike,46,47,0 +block_hint,CallWithArrayLike,10,11,1 +block_hint,CallWithArrayLike,63,64,1 +block_hint,CallWithArrayLike,55,56,0 +block_hint,CallWithArrayLike,57,58,1 +block_hint,CallWithArrayLike,12,13,0 +block_hint,ConstructWithSpread_Baseline,90,91,1 +block_hint,ConstructWithSpread_Baseline,70,71,1 +block_hint,ConstructWithSpread_Baseline,45,46,1 +block_hint,ConstructWithSpread_Baseline,11,12,1 +block_hint,ConstructWithSpread_Baseline,81,82,1 +block_hint,ConstructWithSpread_Baseline,83,84,0 +block_hint,ConstructWithSpread_Baseline,58,59,0 +block_hint,ConstructWithSpread_Baseline,27,28,1 +block_hint,Construct_Baseline,48,49,0 +block_hint,Construct_Baseline,46,47,1 +block_hint,Construct_Baseline,38,39,1 +block_hint,Construct_Baseline,23,24,1 +block_hint,Construct_Baseline,5,6,1 +block_hint,Construct_Baseline,20,21,1 +block_hint,Construct_Baseline,25,26,1 +block_hint,Construct_Baseline,7,8,1 +block_hint,Construct_Baseline,36,37,0 +block_hint,Construct_Baseline,11,12,1 +block_hint,Construct_Baseline,13,14,0 +block_hint,Construct_Baseline,40,41,0 +block_hint,Construct_Baseline,27,28,1 +block_hint,FastNewObject,38,39,1 +block_hint,FastNewObject,40,41,1 +block_hint,FastNewObject,42,43,1 +block_hint,FastNewObject,44,45,1 +block_hint,FastNewObject,53,54,0 +block_hint,FastNewObject,55,56,1 +block_hint,FastNewObject,48,49,0 +block_hint,FastNewObject,23,24,0 +block_hint,FastNewObject,27,28,0 +block_hint,FastNewObject,31,32,1 +block_hint,FastNewClosure,15,16,0 +block_hint,FastNewClosure,4,5,1 +block_hint,FastNewClosure,19,20,1 +block_hint,FastNewClosure,8,9,1 +block_hint,StringEqual,57,58,0 +block_hint,StringEqual,29,30,1 +block_hint,StringEqual,55,56,0 +block_hint,StringEqual,45,46,1 +block_hint,StringEqual,81,82,1 +block_hint,StringEqual,69,70,0 +block_hint,StringEqual,51,52,0 +block_hint,StringEqual,23,24,1 +block_hint,StringEqual,79,80,0 +block_hint,StringEqual,65,66,0 +block_hint,StringEqual,47,48,0 +block_hint,StringEqual,39,40,0 +block_hint,StringEqual,71,72,0 +block_hint,StringEqual,53,54,0 +block_hint,StringGreaterThanOrEqual,40,41,1 +block_hint,StringGreaterThanOrEqual,30,31,1 +block_hint,StringGreaterThanOrEqual,36,37,0 +block_hint,StringGreaterThanOrEqual,12,13,0 +block_hint,StringLessThan,22,23,0 +block_hint,StringLessThan,40,41,1 +block_hint,StringLessThan,36,37,0 +block_hint,StringLessThan,24,25,0 +block_hint,StringLessThanOrEqual,40,41,1 +block_hint,StringLessThanOrEqual,30,31,1 +block_hint,StringLessThanOrEqual,36,37,0 +block_hint,StringLessThanOrEqual,12,13,0 +block_hint,StringSubstring,87,88,0 +block_hint,StringSubstring,29,30,0 +block_hint,StringSubstring,63,64,1 +block_hint,StringSubstring,58,59,1 +block_hint,StringSubstring,56,57,1 +block_hint,StringSubstring,110,111,0 +block_hint,StringSubstring,19,20,0 +block_hint,StringSubstring,21,22,0 +block_hint,StringSubstring,114,115,1 +block_hint,StringSubstring,102,103,1 +block_hint,StringSubstring,38,39,0 +block_hint,StringSubstring,17,18,0 +block_hint,StringSubstring,116,117,1 +block_hint,StringSubstring,104,105,1 +block_hint,StringSubstring,42,43,0 +block_hint,StringSubstring,75,76,1 +block_hint,StringSubstring,127,128,0 +block_hint,StringSubstring,34,35,1 +block_hint,StringSubstring,31,32,0 +block_hint,OrderedHashTableHealIndex,5,6,1 +block_hint,OrderedHashTableHealIndex,9,10,0 +block_hint,CompileLazy,42,43,1 +block_hint,CompileLazy,22,23,0 +block_hint,CompileLazy,40,41,0 +block_hint,CompileLazy,8,9,0 +block_hint,CompileLazy,10,11,0 +block_hint,CompileLazy,15,16,0 +block_hint,CompileLazy,3,4,0 +block_hint,CompileLazy,18,19,1 +block_hint,AllocateInYoungGeneration,2,3,1 +block_hint,AllocateRegularInYoungGeneration,2,3,1 +block_hint,AllocateRegularInOldGeneration,2,3,1 +block_hint,CopyFastSmiOrObjectElements,12,13,1 +block_hint,CopyFastSmiOrObjectElements,18,19,1 +block_hint,CopyFastSmiOrObjectElements,9,10,0 +block_hint,CopyFastSmiOrObjectElements,23,24,1 +block_hint,CopyFastSmiOrObjectElements,21,22,1 +block_hint,CopyFastSmiOrObjectElements,15,16,0 +block_hint,GrowFastDoubleElements,18,19,0 +block_hint,GrowFastDoubleElements,20,21,0 +block_hint,GrowFastDoubleElements,14,15,0 +block_hint,GrowFastDoubleElements,16,17,1 +block_hint,GrowFastDoubleElements,28,29,0 +block_hint,GrowFastDoubleElements,6,7,0 +block_hint,GrowFastDoubleElements,26,27,0 +block_hint,GrowFastSmiOrObjectElements,16,17,0 +block_hint,GrowFastSmiOrObjectElements,18,19,0 +block_hint,GrowFastSmiOrObjectElements,14,15,0 +block_hint,GrowFastSmiOrObjectElements,22,23,1 +block_hint,GrowFastSmiOrObjectElements,6,7,0 +block_hint,GrowFastSmiOrObjectElements,12,13,0 +block_hint,ToNumber,3,4,1 +block_hint,ToNumber,5,6,0 +block_hint,ToNumber,18,19,0 +block_hint,ToNumber,15,16,1 +block_hint,ToNumber_Baseline,24,25,0 +block_hint,ToNumber_Baseline,22,23,1 +block_hint,ToNumber_Baseline,3,4,1 +block_hint,ToNumeric_Baseline,7,8,0 +block_hint,ToNumeric_Baseline,9,10,1 +block_hint,ToNumeric_Baseline,3,4,1 +block_hint,ToNumberConvertBigInt,3,4,1 +block_hint,ToNumberConvertBigInt,5,6,0 +block_hint,ToNumberConvertBigInt,20,21,0 +block_hint,ToNumberConvertBigInt,17,18,1 +block_hint,ToNumberConvertBigInt,9,10,1 +block_hint,Typeof,15,16,0 +block_hint,Typeof,17,18,0 +block_hint,Typeof,9,10,0 +block_hint,Typeof,13,14,1 +block_hint,KeyedLoadIC_PolymorphicName,247,248,1 +block_hint,KeyedLoadIC_PolymorphicName,96,97,1 +block_hint,KeyedLoadIC_PolymorphicName,263,264,0 +block_hint,KeyedLoadIC_PolymorphicName,60,61,0 +block_hint,KeyedLoadIC_PolymorphicName,133,134,1 +block_hint,KeyedLoadIC_PolymorphicName,303,304,0 +block_hint,KeyedLoadIC_PolymorphicName,333,334,1 +block_hint,KeyedLoadIC_PolymorphicName,98,99,0 +block_hint,KeyedLoadIC_PolymorphicName,284,285,1 +block_hint,KeyedLoadIC_PolymorphicName,24,25,1 +block_hint,KeyedLoadIC_PolymorphicName,165,166,0 +block_hint,KeyedLoadIC_PolymorphicName,122,123,1 +block_hint,KeyedLoadIC_PolymorphicName,335,336,1 +block_hint,KeyedLoadIC_PolymorphicName,110,111,0 +block_hint,KeyedLoadIC_PolymorphicName,175,176,0 +block_hint,KeyedLoadIC_PolymorphicName,45,46,1 +block_hint,KeyedLoadIC_PolymorphicName,74,75,0 +block_hint,KeyedLoadIC_PolymorphicName,253,254,0 +block_hint,KeyedLoadIC_PolymorphicName,292,293,1 +block_hint,KeyedLoadIC_PolymorphicName,28,29,0 +block_hint,KeyedLoadIC_PolymorphicName,26,27,0 +block_hint,KeyedStoreIC_Megamorphic,379,380,1 +block_hint,KeyedStoreIC_Megamorphic,381,382,0 +block_hint,KeyedStoreIC_Megamorphic,1234,1235,0 +block_hint,KeyedStoreIC_Megamorphic,1215,1216,1 +block_hint,KeyedStoreIC_Megamorphic,1149,1150,0 +block_hint,KeyedStoreIC_Megamorphic,918,919,1 +block_hint,KeyedStoreIC_Megamorphic,383,384,1 +block_hint,KeyedStoreIC_Megamorphic,1244,1245,0 +block_hint,KeyedStoreIC_Megamorphic,1223,1224,0 +block_hint,KeyedStoreIC_Megamorphic,601,602,0 +block_hint,KeyedStoreIC_Megamorphic,746,747,1 +block_hint,KeyedStoreIC_Megamorphic,603,604,0 +block_hint,KeyedStoreIC_Megamorphic,1203,1204,0 +block_hint,KeyedStoreIC_Megamorphic,1038,1039,0 +block_hint,KeyedStoreIC_Megamorphic,1177,1178,0 +block_hint,KeyedStoreIC_Megamorphic,192,193,1 +block_hint,KeyedStoreIC_Megamorphic,194,195,1 +block_hint,KeyedStoreIC_Megamorphic,539,540,0 +block_hint,KeyedStoreIC_Megamorphic,541,542,0 +block_hint,KeyedStoreIC_Megamorphic,1042,1043,0 +block_hint,KeyedStoreIC_Megamorphic,547,548,1 +block_hint,KeyedStoreIC_Megamorphic,1068,1069,0 +block_hint,KeyedStoreIC_Megamorphic,606,607,0 +block_hint,KeyedStoreIC_Megamorphic,1205,1206,0 +block_hint,KeyedStoreIC_Megamorphic,549,550,0 +block_hint,KeyedStoreIC_Megamorphic,1044,1045,0 +block_hint,KeyedStoreIC_Megamorphic,200,201,1 +block_hint,KeyedStoreIC_Megamorphic,553,554,0 +block_hint,KeyedStoreIC_Megamorphic,202,203,0 +block_hint,KeyedStoreIC_Megamorphic,204,205,0 +block_hint,KeyedStoreIC_Megamorphic,953,954,0 +block_hint,KeyedStoreIC_Megamorphic,555,556,1 +block_hint,KeyedStoreIC_Megamorphic,557,558,0 +block_hint,KeyedStoreIC_Megamorphic,559,560,1 +block_hint,KeyedStoreIC_Megamorphic,561,562,0 +block_hint,KeyedStoreIC_Megamorphic,1157,1158,0 +block_hint,KeyedStoreIC_Megamorphic,563,564,1 +block_hint,KeyedStoreIC_Megamorphic,905,906,0 +block_hint,KeyedStoreIC_Megamorphic,1159,1160,0 +block_hint,KeyedStoreIC_Megamorphic,565,566,1 +block_hint,KeyedStoreIC_Megamorphic,571,572,1 +block_hint,KeyedStoreIC_Megamorphic,573,574,0 +block_hint,KeyedStoreIC_Megamorphic,575,576,0 +block_hint,KeyedStoreIC_Megamorphic,577,578,1 +block_hint,KeyedStoreIC_Megamorphic,960,961,1 +block_hint,KeyedStoreIC_Megamorphic,569,570,1 +block_hint,KeyedStoreIC_Megamorphic,567,568,0 +block_hint,KeyedStoreIC_Megamorphic,1232,1233,0 +block_hint,KeyedStoreIC_Megamorphic,1247,1248,1 +block_hint,KeyedStoreIC_Megamorphic,1240,1241,1 +block_hint,KeyedStoreIC_Megamorphic,1139,1140,1 +block_hint,KeyedStoreIC_Megamorphic,978,979,1 +block_hint,KeyedStoreIC_Megamorphic,206,207,0 +block_hint,KeyedStoreIC_Megamorphic,362,363,0 +block_hint,KeyedStoreIC_Megamorphic,1143,1144,0 +block_hint,KeyedStoreIC_Megamorphic,1152,1153,0 +block_hint,KeyedStoreIC_Megamorphic,930,931,0 +block_hint,KeyedStoreIC_Megamorphic,491,492,0 +block_hint,KeyedStoreIC_Megamorphic,898,899,0 +block_hint,KeyedStoreIC_Megamorphic,934,935,0 +block_hint,KeyedStoreIC_Megamorphic,932,933,1 +block_hint,KeyedStoreIC_Megamorphic,493,494,1 +block_hint,KeyedStoreIC_Megamorphic,499,500,1 +block_hint,KeyedStoreIC_Megamorphic,501,502,0 +block_hint,KeyedStoreIC_Megamorphic,938,939,1 +block_hint,KeyedStoreIC_Megamorphic,503,504,0 +block_hint,KeyedStoreIC_Megamorphic,505,506,1 +block_hint,KeyedStoreIC_Megamorphic,936,937,1 +block_hint,KeyedStoreIC_Megamorphic,497,498,1 +block_hint,KeyedStoreIC_Megamorphic,495,496,0 +block_hint,KeyedStoreIC_Megamorphic,1124,1125,1 +block_hint,KeyedStoreIC_Megamorphic,1189,1190,1 +block_hint,KeyedStoreIC_Megamorphic,896,897,0 +block_hint,KeyedStoreIC_Megamorphic,350,351,1 +block_hint,KeyedStoreIC_Megamorphic,336,337,1 +block_hint,KeyedStoreIC_Megamorphic,1122,1123,1 +block_hint,KeyedStoreIC_Megamorphic,683,684,0 +block_hint,KeyedStoreIC_Megamorphic,980,981,1 +block_hint,KeyedStoreIC_Megamorphic,214,215,0 +block_hint,KeyedStoreIC_Megamorphic,1024,1025,0 +block_hint,KeyedStoreIC_Megamorphic,693,694,0 +block_hint,KeyedStoreIC_Megamorphic,579,580,0 +block_hint,KeyedStoreIC_Megamorphic,167,168,1 +block_hint,KeyedStoreIC_Megamorphic,581,582,0 +block_hint,KeyedStoreIC_Megamorphic,583,584,0 +block_hint,KeyedStoreIC_Megamorphic,1051,1052,0 +block_hint,KeyedStoreIC_Megamorphic,585,586,1 +block_hint,KeyedStoreIC_Megamorphic,966,967,0 +block_hint,KeyedStoreIC_Megamorphic,1183,1184,0 +block_hint,KeyedStoreIC_Megamorphic,1053,1054,1 +block_hint,KeyedStoreIC_Megamorphic,759,760,1 +block_hint,KeyedStoreIC_Megamorphic,612,613,0 +block_hint,KeyedStoreIC_Megamorphic,1208,1209,0 +block_hint,KeyedStoreIC_Megamorphic,1055,1056,0 +block_hint,KeyedStoreIC_Megamorphic,1181,1182,0 +block_hint,KeyedStoreIC_Megamorphic,224,225,1 +block_hint,KeyedStoreIC_Megamorphic,761,762,0 +block_hint,KeyedStoreIC_Megamorphic,593,594,0 +block_hint,KeyedStoreIC_Megamorphic,1145,1146,0 +block_hint,KeyedStoreIC_Megamorphic,1192,1193,0 +block_hint,KeyedStoreIC_Megamorphic,909,910,0 +block_hint,KeyedStoreIC_Megamorphic,173,174,1 +block_hint,KeyedStoreIC_Megamorphic,175,176,1 +block_hint,KeyedStoreIC_Megamorphic,373,374,0 +block_hint,KeyedStoreIC_Megamorphic,177,178,1 +block_hint,KeyedStoreIC_Megamorphic,375,376,0 +block_hint,KeyedStoreIC_Megamorphic,179,180,1 +block_hint,KeyedStoreIC_Megamorphic,234,235,0 +block_hint,KeyedStoreIC_Megamorphic,236,237,0 +block_hint,KeyedStoreIC_Megamorphic,181,182,1 +block_hint,KeyedStoreIC_Megamorphic,183,184,1 +block_hint,KeyedStoreIC_Megamorphic,1029,1030,0 +block_hint,KeyedStoreIC_Megamorphic,185,186,1 +block_hint,KeyedStoreIC_Megamorphic,928,929,1 +block_hint,KeyedStoreIC_Megamorphic,485,486,1 +block_hint,KeyedStoreIC_Megamorphic,733,734,0 +block_hint,KeyedStoreIC_Megamorphic,922,923,1 +block_hint,KeyedStoreIC_Megamorphic,413,414,0 +block_hint,KeyedStoreIC_Megamorphic,415,416,0 +block_hint,KeyedStoreIC_Megamorphic,254,255,1 +block_hint,KeyedStoreIC_Megamorphic,417,418,0 +block_hint,KeyedStoreIC_Megamorphic,630,631,1 +block_hint,KeyedStoreIC_Megamorphic,92,93,1 +block_hint,KeyedStoreIC_Megamorphic,94,95,0 +block_hint,KeyedStoreIC_Megamorphic,771,772,1 +block_hint,KeyedStoreIC_Megamorphic,387,388,0 +block_hint,KeyedStoreIC_Megamorphic,639,640,1 +block_hint,KeyedStoreIC_Megamorphic,64,65,1 +block_hint,KeyedStoreIC_Megamorphic,66,67,0 +block_hint,DefineKeyedOwnIC_Megamorphic,312,313,1 +block_hint,DefineKeyedOwnIC_Megamorphic,314,315,0 +block_hint,DefineKeyedOwnIC_Megamorphic,899,900,0 +block_hint,DefineKeyedOwnIC_Megamorphic,420,421,0 +block_hint,DefineKeyedOwnIC_Megamorphic,418,419,1 +block_hint,DefineKeyedOwnIC_Megamorphic,800,801,0 +block_hint,DefineKeyedOwnIC_Megamorphic,575,576,1 +block_hint,DefineKeyedOwnIC_Megamorphic,603,604,1 +block_hint,DefineKeyedOwnIC_Megamorphic,232,233,0 +block_hint,DefineKeyedOwnIC_Megamorphic,53,54,1 +block_hint,DefineKeyedOwnIC_Megamorphic,55,56,0 +block_hint,LoadGlobalIC_NoFeedback,41,42,1 +block_hint,LoadGlobalIC_NoFeedback,6,7,1 +block_hint,LoadGlobalIC_NoFeedback,8,9,1 +block_hint,LoadGlobalIC_NoFeedback,10,11,1 +block_hint,LoadGlobalIC_NoFeedback,12,13,1 +block_hint,LoadGlobalIC_NoFeedback,31,32,1 +block_hint,LoadGlobalIC_NoFeedback,49,50,1 +block_hint,LoadGlobalIC_NoFeedback,18,19,1 +block_hint,LoadGlobalIC_NoFeedback,27,28,0 +block_hint,LoadGlobalIC_NoFeedback,14,15,1 +block_hint,LoadGlobalIC_NoFeedback,33,34,0 +block_hint,LoadGlobalIC_NoFeedback,16,17,1 +block_hint,LoadGlobalIC_NoFeedback,20,21,1 +block_hint,LoadGlobalIC_NoFeedback,22,23,0 +block_hint,LoadGlobalIC_NoFeedback,24,25,1 +block_hint,LoadIC_FunctionPrototype,2,3,0 +block_hint,LoadIC_FunctionPrototype,4,5,1 +block_hint,LoadIC_NoFeedback,97,98,1 +block_hint,LoadIC_NoFeedback,99,100,0 +block_hint,LoadIC_NoFeedback,306,307,1 +block_hint,LoadIC_NoFeedback,226,227,0 +block_hint,LoadIC_NoFeedback,141,142,0 +block_hint,LoadIC_NoFeedback,320,321,0 +block_hint,LoadIC_NoFeedback,287,288,0 +block_hint,LoadIC_NoFeedback,302,303,0 +block_hint,LoadIC_NoFeedback,53,54,1 +block_hint,LoadIC_NoFeedback,289,290,0 +block_hint,LoadIC_NoFeedback,55,56,1 +block_hint,LoadIC_NoFeedback,324,325,1 +block_hint,LoadIC_NoFeedback,272,273,0 +block_hint,LoadIC_NoFeedback,295,296,1 +block_hint,LoadIC_NoFeedback,247,248,1 +block_hint,LoadIC_NoFeedback,59,60,0 +block_hint,LoadIC_NoFeedback,22,23,1 +block_hint,LoadIC_NoFeedback,113,114,0 +block_hint,LoadIC_NoFeedback,35,36,1 +block_hint,LoadIC_NoFeedback,130,131,1 +block_hint,LoadIC_NoFeedback,145,146,0 +block_hint,LoadIC_NoFeedback,125,126,0 +block_hint,LoadIC_NoFeedback,261,262,0 +block_hint,LoadIC_NoFeedback,250,251,0 +block_hint,LoadIC_NoFeedback,149,150,1 +block_hint,LoadIC_NoFeedback,167,168,0 +block_hint,LoadIC_NoFeedback,322,323,0 +block_hint,LoadIC_NoFeedback,151,152,0 +block_hint,LoadIC_NoFeedback,291,292,0 +block_hint,LoadIC_NoFeedback,70,71,1 +block_hint,LoadIC_NoFeedback,155,156,0 +block_hint,LoadIC_NoFeedback,72,73,1 +block_hint,LoadIC_NoFeedback,254,255,1 +block_hint,LoadIC_NoFeedback,76,77,0 +block_hint,LoadIC_NoFeedback,326,327,1 +block_hint,LoadIC_NoFeedback,278,279,0 +block_hint,LoadIC_NoFeedback,276,277,0 +block_hint,LoadIC_NoFeedback,24,25,1 +block_hint,LoadIC_NoFeedback,242,243,1 +block_hint,LoadIC_NoFeedback,135,136,1 +block_hint,LoadIC_NoFeedback,93,94,0 +block_hint,StoreIC_NoFeedback,147,148,1 +block_hint,StoreIC_NoFeedback,149,150,0 +block_hint,StoreIC_NoFeedback,259,260,0 +block_hint,StoreIC_NoFeedback,549,550,0 +block_hint,StoreIC_NoFeedback,443,444,0 +block_hint,StoreIC_NoFeedback,527,528,0 +block_hint,StoreIC_NoFeedback,58,59,1 +block_hint,StoreIC_NoFeedback,60,61,1 +block_hint,StoreIC_NoFeedback,199,200,0 +block_hint,StoreIC_NoFeedback,201,202,0 +block_hint,StoreIC_NoFeedback,447,448,0 +block_hint,StoreIC_NoFeedback,207,208,1 +block_hint,StoreIC_NoFeedback,473,474,0 +block_hint,StoreIC_NoFeedback,262,263,0 +block_hint,StoreIC_NoFeedback,551,552,0 +block_hint,StoreIC_NoFeedback,209,210,0 +block_hint,StoreIC_NoFeedback,449,450,0 +block_hint,StoreIC_NoFeedback,66,67,1 +block_hint,StoreIC_NoFeedback,213,214,0 +block_hint,StoreIC_NoFeedback,68,69,0 +block_hint,StoreIC_NoFeedback,390,391,0 +block_hint,StoreIC_NoFeedback,215,216,1 +block_hint,StoreIC_NoFeedback,217,218,0 +block_hint,StoreIC_NoFeedback,219,220,1 +block_hint,StoreIC_NoFeedback,221,222,0 +block_hint,StoreIC_NoFeedback,509,510,0 +block_hint,StoreIC_NoFeedback,223,224,1 +block_hint,StoreIC_NoFeedback,356,357,0 +block_hint,StoreIC_NoFeedback,511,512,0 +block_hint,StoreIC_NoFeedback,393,394,1 +block_hint,StoreIC_NoFeedback,231,232,1 +block_hint,StoreIC_NoFeedback,233,234,0 +block_hint,StoreIC_NoFeedback,235,236,0 +block_hint,StoreIC_NoFeedback,237,238,1 +block_hint,StoreIC_NoFeedback,227,228,0 +block_hint,StoreIC_NoFeedback,564,565,0 +block_hint,StoreIC_NoFeedback,494,495,1 +block_hint,StoreIC_NoFeedback,413,414,1 +block_hint,StoreIC_NoFeedback,72,73,0 +block_hint,StoreIC_NoFeedback,78,79,0 +block_hint,StoreIC_NoFeedback,130,131,0 +block_hint,StoreIC_NoFeedback,498,499,0 +block_hint,StoreIC_NoFeedback,367,368,0 +block_hint,StoreIC_NoFeedback,151,152,0 +block_hint,StoreIC_NoFeedback,349,350,0 +block_hint,StoreIC_NoFeedback,153,154,1 +block_hint,StoreIC_NoFeedback,159,160,1 +block_hint,StoreIC_NoFeedback,161,162,0 +block_hint,StoreIC_NoFeedback,163,164,0 +block_hint,StoreIC_NoFeedback,157,158,1 +block_hint,StoreIC_NoFeedback,155,156,0 +block_hint,StoreIC_NoFeedback,536,537,1 +block_hint,StoreIC_NoFeedback,385,386,1 +block_hint,StoreIC_NoFeedback,193,194,0 +block_hint,StoreIC_NoFeedback,381,382,1 +block_hint,StoreIC_NoFeedback,179,180,0 +block_hint,StoreIC_NoFeedback,519,520,1 +block_hint,StoreIC_NoFeedback,415,416,1 +block_hint,StoreIC_NoFeedback,80,81,0 +block_hint,StoreIC_NoFeedback,82,83,0 +block_hint,StoreIC_NoFeedback,241,242,0 +block_hint,StoreIC_NoFeedback,243,244,0 +block_hint,StoreIC_NoFeedback,456,457,0 +block_hint,StoreIC_NoFeedback,245,246,1 +block_hint,StoreIC_NoFeedback,513,514,0 +block_hint,StoreIC_NoFeedback,403,404,0 +block_hint,StoreIC_NoFeedback,458,459,1 +block_hint,StoreIC_NoFeedback,268,269,0 +block_hint,StoreIC_NoFeedback,553,554,0 +block_hint,StoreIC_NoFeedback,460,461,0 +block_hint,StoreIC_NoFeedback,531,532,0 +block_hint,StoreIC_NoFeedback,90,91,1 +block_hint,StoreIC_NoFeedback,332,333,0 +block_hint,StoreIC_NoFeedback,420,421,1 +block_hint,StoreIC_NoFeedback,94,95,0 +block_hint,StoreIC_NoFeedback,96,97,0 +block_hint,StoreIC_NoFeedback,253,254,0 +block_hint,StoreIC_NoFeedback,255,256,1 +block_hint,StoreIC_NoFeedback,362,363,0 +block_hint,StoreIC_NoFeedback,40,41,1 +block_hint,StoreIC_NoFeedback,42,43,1 +block_hint,StoreIC_NoFeedback,141,142,0 +block_hint,StoreIC_NoFeedback,44,45,1 +block_hint,StoreIC_NoFeedback,143,144,0 +block_hint,StoreIC_NoFeedback,46,47,1 +block_hint,StoreIC_NoFeedback,100,101,0 +block_hint,StoreIC_NoFeedback,102,103,0 +block_hint,StoreIC_NoFeedback,48,49,1 +block_hint,StoreIC_NoFeedback,50,51,1 +block_hint,StoreIC_NoFeedback,439,440,0 +block_hint,StoreIC_NoFeedback,52,53,1 +block_hint,DefineNamedOwnIC_NoFeedback,80,81,1 +block_hint,DefineNamedOwnIC_NoFeedback,82,83,0 +block_hint,DefineNamedOwnIC_NoFeedback,236,237,0 +block_hint,DefineNamedOwnIC_NoFeedback,210,211,1 +block_hint,DefineNamedOwnIC_NoFeedback,136,137,0 +block_hint,DefineNamedOwnIC_NoFeedback,239,240,0 +block_hint,DefineNamedOwnIC_NoFeedback,212,213,0 +block_hint,DefineNamedOwnIC_NoFeedback,234,235,0 +block_hint,DefineNamedOwnIC_NoFeedback,157,158,1 +block_hint,DefineNamedOwnIC_NoFeedback,36,37,1 +block_hint,DefineNamedOwnIC_NoFeedback,86,87,0 +block_hint,DefineNamedOwnIC_NoFeedback,38,39,1 +block_hint,DefineNamedOwnIC_NoFeedback,40,41,1 +block_hint,KeyedLoadIC_SloppyArguments,12,13,0 +block_hint,KeyedLoadIC_SloppyArguments,14,15,1 +block_hint,KeyedLoadIC_SloppyArguments,4,5,1 +block_hint,KeyedLoadIC_SloppyArguments,22,23,0 +block_hint,KeyedLoadIC_SloppyArguments,6,7,1 +block_hint,KeyedLoadIC_SloppyArguments,16,17,0 +block_hint,KeyedLoadIC_SloppyArguments,18,19,0 +block_hint,KeyedLoadIC_SloppyArguments,8,9,1 +block_hint,KeyedLoadIC_SloppyArguments,10,11,0 +block_hint,StoreFastElementIC_Standard,340,341,0 +block_hint,StoreFastElementIC_Standard,826,827,0 +block_hint,StoreFastElementIC_Standard,346,347,0 +block_hint,StoreFastElementIC_Standard,966,967,1 +block_hint,StoreFastElementIC_Standard,348,349,1 +block_hint,StoreFastElementIC_Standard,40,41,1 +block_hint,StoreFastElementIC_Standard,350,351,0 +block_hint,StoreFastElementIC_Standard,828,829,0 +block_hint,StoreFastElementIC_Standard,356,357,0 +block_hint,StoreFastElementIC_Standard,968,969,1 +block_hint,StoreFastElementIC_Standard,358,359,1 +block_hint,StoreFastElementIC_Standard,42,43,1 +block_hint,StoreFastElementIC_Standard,360,361,0 +block_hint,StoreFastElementIC_Standard,830,831,0 +block_hint,StoreFastElementIC_Standard,970,971,1 +block_hint,StoreFastElementIC_Standard,366,367,1 +block_hint,StoreFastElementIC_Standard,44,45,1 +block_hint,StoreFastElementIC_Standard,392,393,0 +block_hint,StoreFastElementIC_Standard,838,839,0 +block_hint,StoreFastElementIC_Standard,978,979,1 +block_hint,StoreFastElementIC_Standard,398,399,1 +block_hint,StoreFastElementIC_Standard,52,53,1 +block_hint,StoreFastElementIC_Standard,844,845,0 +block_hint,StoreFastElementIC_Standard,420,421,1 +block_hint,StoreFastElementIC_Standard,985,986,1 +block_hint,StoreFastElementIC_Standard,422,423,1 +block_hint,StoreFastElementIC_Standard,58,59,1 +block_hint,StoreFastElementIC_Standard,848,849,0 +block_hint,StoreFastElementIC_Standard,428,429,1 +block_hint,StoreFastElementIC_Standard,988,989,1 +block_hint,StoreFastElementIC_Standard,430,431,1 +block_hint,StoreFastElementIC_Standard,60,61,1 +block_hint,StoreFastElementIC_Standard,600,601,0 +block_hint,StoreFastElementIC_Standard,1072,1073,0 +block_hint,StoreFastElementIC_Standard,669,670,0 +block_hint,StoreFastElementIC_Standard,300,301,1 +block_hint,StoreFastElementIC_Standard,596,597,0 +block_hint,StoreFastElementIC_Standard,1074,1075,0 +block_hint,StoreFastElementIC_Standard,671,672,0 +block_hint,StoreFastElementIC_Standard,302,303,1 +block_hint,StoreFastElementIC_Standard,592,593,0 +block_hint,StoreFastElementIC_Standard,1076,1077,0 +block_hint,StoreFastElementIC_Standard,673,674,0 +block_hint,StoreFastElementIC_Standard,304,305,1 +block_hint,StoreFastElementIC_Standard,588,589,0 +block_hint,StoreFastElementIC_Standard,1078,1079,0 +block_hint,StoreFastElementIC_Standard,675,676,0 +block_hint,StoreFastElementIC_Standard,306,307,1 +block_hint,StoreFastElementIC_Standard,584,585,0 +block_hint,StoreFastElementIC_Standard,931,932,1 +block_hint,StoreFastElementIC_Standard,770,771,0 +block_hint,StoreFastElementIC_Standard,308,309,1 +block_hint,StoreFastElementIC_Standard,580,581,0 +block_hint,StoreFastElementIC_Standard,929,930,1 +block_hint,StoreFastElementIC_Standard,772,773,0 +block_hint,StoreFastElementIC_Standard,310,311,1 +block_hint,StoreFastElementIC_Standard,576,577,0 +block_hint,StoreFastElementIC_Standard,927,928,1 +block_hint,StoreFastElementIC_Standard,774,775,0 +block_hint,StoreFastElementIC_Standard,312,313,1 +block_hint,StoreFastElementIC_Standard,572,573,0 +block_hint,StoreFastElementIC_Standard,776,777,0 +block_hint,StoreFastElementIC_Standard,314,315,1 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,469,470,0 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,263,264,0 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,647,648,1 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,472,473,1 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,563,564,0 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,435,436,0 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,214,215,0 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,541,542,1 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,198,199,0 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,204,205,0 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,629,630,1 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,437,438,0 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,34,35,1 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,474,475,0 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,277,278,0 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,653,654,1 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,283,284,1 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,567,568,0 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,439,440,0 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,219,220,0 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,537,538,1 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,182,183,0 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,184,185,0 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,631,632,1 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,441,442,0 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,36,37,1 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,479,480,0 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,659,660,1 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,295,296,1 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,571,572,0 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,443,444,0 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,224,225,0 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,533,534,1 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,166,167,0 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,633,634,1 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,445,446,0 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,38,39,1 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,492,493,0 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,665,666,1 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,331,332,1 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,581,582,0 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,453,454,0 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,238,239,0 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,523,524,1 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,126,127,0 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,637,638,1 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,455,456,0 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,46,47,1 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,501,502,0 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,359,360,1 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,669,670,1 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,365,366,1 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,590,591,0 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,461,462,0 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,249,250,0 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,251,252,1 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,603,604,0 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,102,103,0 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,517,518,0 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,463,464,1 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,56,57,1 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,508,509,0 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,371,372,1 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,673,674,1 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,377,378,1 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,96,97,0 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,100,101,0 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,467,468,1 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,58,59,1 +block_hint,StoreFastElementIC_NoTransitionHandleCOW,335,336,0 +block_hint,StoreFastElementIC_NoTransitionHandleCOW,185,186,0 +block_hint,StoreFastElementIC_NoTransitionHandleCOW,393,394,1 +block_hint,StoreFastElementIC_NoTransitionHandleCOW,187,188,1 +block_hint,StoreFastElementIC_NoTransitionHandleCOW,22,23,1 +block_hint,StoreFastElementIC_NoTransitionHandleCOW,339,340,0 +block_hint,StoreFastElementIC_NoTransitionHandleCOW,397,398,1 +block_hint,StoreFastElementIC_NoTransitionHandleCOW,205,206,1 +block_hint,StoreFastElementIC_NoTransitionHandleCOW,207,208,0 +block_hint,StoreFastElementIC_NoTransitionHandleCOW,160,161,0 +block_hint,StoreFastElementIC_NoTransitionHandleCOW,375,376,1 +block_hint,StoreFastElementIC_NoTransitionHandleCOW,130,131,1 +block_hint,StoreFastElementIC_NoTransitionHandleCOW,134,135,0 +block_hint,StoreFastElementIC_NoTransitionHandleCOW,26,27,1 +block_hint,StoreFastElementIC_NoTransitionHandleCOW,355,356,0 +block_hint,StoreFastElementIC_NoTransitionHandleCOW,265,266,1 +block_hint,StoreFastElementIC_NoTransitionHandleCOW,412,413,1 +block_hint,StoreFastElementIC_NoTransitionHandleCOW,267,268,1 +block_hint,StoreFastElementIC_NoTransitionHandleCOW,44,45,1 +block_hint,ElementsTransitionAndStore_Standard,433,434,1 +block_hint,ElementsTransitionAndStore_Standard,435,436,0 +block_hint,ElementsTransitionAndStore_Standard,543,544,1 +block_hint,ElementsTransitionAndStore_Standard,431,432,0 +block_hint,ElementsTransitionAndStore_Standard,187,188,0 +block_hint,ElementsTransitionAndStore_Standard,189,190,0 +block_hint,ElementsTransitionAndStore_Standard,334,335,0 +block_hint,ElementsTransitionAndStore_Standard,437,438,1 +block_hint,ElementsTransitionAndStore_Standard,195,196,1 +block_hint,ElementsTransitionAndStore_Standard,28,29,1 +block_hint,ElementsTransitionAndStore_Standard,481,482,1 +block_hint,ElementsTransitionAndStore_Standard,483,484,0 +block_hint,ElementsTransitionAndStore_Standard,477,478,1 +block_hint,ElementsTransitionAndStore_Standard,479,480,0 +block_hint,ElementsTransitionAndStore_Standard,245,246,0 +block_hint,ElementsTransitionAndStore_Standard,349,350,0 +block_hint,ElementsTransitionAndStore_Standard,485,486,1 +block_hint,ElementsTransitionAndStore_Standard,251,252,1 +block_hint,ElementsTransitionAndStore_Standard,38,39,1 +block_hint,ElementsTransitionAndStore_GrowNoTransitionHandleCOW,739,740,0 +block_hint,ElementsTransitionAndStore_GrowNoTransitionHandleCOW,1119,1120,1 +block_hint,ElementsTransitionAndStore_GrowNoTransitionHandleCOW,742,743,1 +block_hint,ElementsTransitionAndStore_GrowNoTransitionHandleCOW,896,897,0 +block_hint,ElementsTransitionAndStore_GrowNoTransitionHandleCOW,684,685,0 +block_hint,ElementsTransitionAndStore_GrowNoTransitionHandleCOW,324,325,0 +block_hint,ElementsTransitionAndStore_GrowNoTransitionHandleCOW,830,831,1 +block_hint,ElementsTransitionAndStore_GrowNoTransitionHandleCOW,237,238,0 +block_hint,ElementsTransitionAndStore_GrowNoTransitionHandleCOW,241,242,1 +block_hint,ElementsTransitionAndStore_GrowNoTransitionHandleCOW,1063,1064,1 +block_hint,ElementsTransitionAndStore_GrowNoTransitionHandleCOW,686,687,0 +block_hint,ElementsTransitionAndStore_GrowNoTransitionHandleCOW,62,63,1 +block_hint,ElementsTransitionAndStore_GrowNoTransitionHandleCOW,95,96,0 +block_hint,KeyedHasIC_PolymorphicName,69,70,1 +block_hint,KeyedHasIC_PolymorphicName,28,29,1 +block_hint,KeyedHasIC_PolymorphicName,24,25,0 +block_hint,KeyedHasIC_PolymorphicName,26,27,0 +block_hint,KeyedHasIC_PolymorphicName,55,56,1 +block_hint,KeyedHasIC_PolymorphicName,89,90,0 +block_hint,KeyedHasIC_PolymorphicName,93,94,1 +block_hint,KeyedHasIC_PolymorphicName,30,31,0 +block_hint,KeyedHasIC_PolymorphicName,78,79,1 +block_hint,KeyedHasIC_PolymorphicName,14,15,1 +block_hint,KeyedHasIC_PolymorphicName,16,17,1 +block_hint,EnqueueMicrotask,4,5,1 +block_hint,EnqueueMicrotask,2,3,0 +block_hint,RunMicrotasks,18,19,1 +block_hint,RunMicrotasks,31,32,1 +block_hint,RunMicrotasks,65,66,0 +block_hint,RunMicrotasks,36,37,1 +block_hint,RunMicrotasks,85,86,0 +block_hint,RunMicrotasks,67,68,0 +block_hint,RunMicrotasks,38,39,1 +block_hint,HasProperty,137,138,1 +block_hint,HasProperty,139,140,1 +block_hint,HasProperty,261,262,0 +block_hint,HasProperty,211,212,1 +block_hint,HasProperty,254,255,0 +block_hint,HasProperty,97,98,0 +block_hint,HasProperty,234,235,1 +block_hint,HasProperty,123,124,1 +block_hint,HasProperty,141,142,1 +block_hint,HasProperty,199,200,0 +block_hint,HasProperty,201,202,0 +block_hint,HasProperty,101,102,0 +block_hint,HasProperty,99,100,0 +block_hint,HasProperty,250,251,0 +block_hint,HasProperty,268,269,0 +block_hint,HasProperty,257,258,1 +block_hint,HasProperty,106,107,0 +block_hint,HasProperty,275,276,0 +block_hint,HasProperty,280,281,0 +block_hint,HasProperty,266,267,0 +block_hint,HasProperty,203,204,1 +block_hint,HasProperty,42,43,1 +block_hint,HasProperty,65,66,0 +block_hint,HasProperty,44,45,1 +block_hint,HasProperty,239,240,1 +block_hint,HasProperty,48,49,0 +block_hint,HasProperty,270,271,0 +block_hint,HasProperty,228,229,0 +block_hint,HasProperty,38,39,0 +block_hint,DeleteProperty,38,39,1 +block_hint,DeleteProperty,62,63,0 +block_hint,DeleteProperty,40,41,0 +block_hint,DeleteProperty,66,67,1 +block_hint,DeleteProperty,80,81,0 +block_hint,DeleteProperty,73,74,0 +block_hint,DeleteProperty,64,65,1 +block_hint,DeleteProperty,56,57,1 +block_hint,DeleteProperty,42,43,1 +block_hint,DeleteProperty,83,84,0 +block_hint,DeleteProperty,85,86,0 +block_hint,DeleteProperty,77,78,0 +block_hint,DeleteProperty,75,76,0 +block_hint,DeleteProperty,47,48,0 +block_hint,DeleteProperty,49,50,0 +block_hint,DeleteProperty,87,88,0 +block_hint,DeleteProperty,71,72,1 +block_hint,DeleteProperty,20,21,0 +block_hint,DeleteProperty,54,55,0 +block_hint,DeleteProperty,7,8,1 +block_hint,DeleteProperty,9,10,1 +block_hint,DeleteProperty,11,12,1 +block_hint,DeleteProperty,13,14,1 +block_hint,DeleteProperty,15,16,1 +block_hint,SetDataProperties,136,137,1 +block_hint,SetDataProperties,263,264,1 +block_hint,SetDataProperties,261,262,1 +block_hint,SetDataProperties,144,145,0 +block_hint,SetDataProperties,316,317,0 +block_hint,SetDataProperties,146,147,0 +block_hint,SetDataProperties,59,60,0 +block_hint,SetDataProperties,341,342,0 +block_hint,SetDataProperties,267,268,0 +block_hint,SetDataProperties,385,386,1 +block_hint,SetDataProperties,277,278,0 +block_hint,SetDataProperties,752,753,0 +block_hint,SetDataProperties,762,763,1 +block_hint,SetDataProperties,750,751,0 +block_hint,SetDataProperties,748,749,0 +block_hint,SetDataProperties,659,660,0 +block_hint,SetDataProperties,451,452,1 +block_hint,SetDataProperties,221,222,1 +block_hint,SetDataProperties,87,88,1 +block_hint,SetDataProperties,223,224,0 +block_hint,SetDataProperties,513,514,0 +block_hint,SetDataProperties,515,516,0 +block_hint,SetDataProperties,519,520,1 +block_hint,SetDataProperties,449,450,0 +block_hint,SetDataProperties,329,330,1 +block_hint,SetDataProperties,326,327,0 +block_hint,SetDataProperties,158,159,0 +block_hint,SetDataProperties,399,400,0 +block_hint,SetDataProperties,447,448,0 +block_hint,SetDataProperties,352,353,0 +block_hint,SetDataProperties,226,227,1 +block_hint,SetDataProperties,93,94,1 +block_hint,SetDataProperties,521,522,0 +block_hint,SetDataProperties,95,96,0 +block_hint,SetDataProperties,97,98,0 +block_hint,SetDataProperties,617,618,0 +block_hint,SetDataProperties,523,524,1 +block_hint,SetDataProperties,525,526,0 +block_hint,SetDataProperties,527,528,1 +block_hint,SetDataProperties,529,530,0 +block_hint,SetDataProperties,673,674,0 +block_hint,SetDataProperties,531,532,1 +block_hint,SetDataProperties,577,578,0 +block_hint,SetDataProperties,675,676,0 +block_hint,SetDataProperties,620,621,1 +block_hint,SetDataProperties,539,540,1 +block_hint,SetDataProperties,541,542,0 +block_hint,SetDataProperties,543,544,0 +block_hint,SetDataProperties,545,546,1 +block_hint,SetDataProperties,535,536,0 +block_hint,SetDataProperties,657,658,0 +block_hint,SetDataProperties,555,556,1 +block_hint,SetDataProperties,292,293,1 +block_hint,SetDataProperties,99,100,0 +block_hint,SetDataProperties,437,438,0 +block_hint,SetDataProperties,241,242,0 +block_hint,SetDataProperties,279,280,1 +block_hint,SetDataProperties,204,205,0 +block_hint,SetDataProperties,61,62,0 +block_hint,ReturnReceiver,3,4,1 +block_hint,ArrayConstructorImpl,9,10,0 +block_hint,ArrayConstructorImpl,13,14,1 +block_hint,ArrayConstructorImpl,40,41,1 +block_hint,ArrayConstructorImpl,15,16,1 +block_hint,ArrayConstructorImpl,19,20,0 +block_hint,ArrayConstructorImpl,23,24,0 +block_hint,ArrayConstructorImpl,25,26,1 +block_hint,ArrayConstructorImpl,27,28,1 +block_hint,ArrayConstructorImpl,29,30,1 +block_hint,ArrayNoArgumentConstructor_PackedSmi_DontOverride,3,4,1 +block_hint,ArrayNoArgumentConstructor_PackedSmi_DontOverride,5,6,1 +block_hint,ArrayNoArgumentConstructor_HoleySmi_DontOverride,3,4,1 +block_hint,ArrayNoArgumentConstructor_HoleySmi_DontOverride,5,6,1 +block_hint,ArrayNoArgumentConstructor_Packed_DisableAllocationSites,3,4,1 +block_hint,ArrayNoArgumentConstructor_Packed_DisableAllocationSites,5,6,1 +block_hint,ArrayNoArgumentConstructor_Holey_DisableAllocationSites,3,4,1 +block_hint,ArrayNoArgumentConstructor_Holey_DisableAllocationSites,5,6,1 +block_hint,ArrayNoArgumentConstructor_PackedDouble_DisableAllocationSites,3,4,1 +block_hint,ArrayNoArgumentConstructor_PackedDouble_DisableAllocationSites,5,6,1 +block_hint,ArraySingleArgumentConstructor_HoleySmi_DontOverride,5,6,1 +block_hint,ArraySingleArgumentConstructor_HoleySmi_DontOverride,14,15,0 +block_hint,ArraySingleArgumentConstructor_HoleySmi_DontOverride,16,17,0 +block_hint,ArraySingleArgumentConstructor_HoleySmi_DontOverride,23,24,1 +block_hint,ArraySingleArgumentConstructor_HoleySmi_DontOverride,8,9,0 +block_hint,ArraySingleArgumentConstructor_HoleySmi_DisableAllocationSites,5,6,1 +block_hint,ArraySingleArgumentConstructor_HoleySmi_DisableAllocationSites,14,15,0 +block_hint,ArraySingleArgumentConstructor_HoleySmi_DisableAllocationSites,16,17,0 +block_hint,ArraySingleArgumentConstructor_HoleySmi_DisableAllocationSites,21,22,1 +block_hint,ArraySingleArgumentConstructor_HoleySmi_DisableAllocationSites,23,24,1 +block_hint,ArraySingleArgumentConstructor_HoleySmi_DisableAllocationSites,8,9,0 +block_hint,ArraySingleArgumentConstructor_Holey_DisableAllocationSites,5,6,1 +block_hint,ArraySingleArgumentConstructor_Holey_DisableAllocationSites,14,15,0 +block_hint,ArraySingleArgumentConstructor_Holey_DisableAllocationSites,25,26,1 +block_hint,ArraySingleArgumentConstructor_Holey_DisableAllocationSites,23,24,1 +block_hint,ArraySingleArgumentConstructor_Holey_DisableAllocationSites,12,13,1 +block_hint,ArraySingleArgumentConstructor_Holey_DisableAllocationSites,8,9,0 +block_hint,ArraySingleArgumentConstructor_Holey_DisableAllocationSites,10,11,0 +block_hint,ArraySingleArgumentConstructor_HoleyDouble_DisableAllocationSites,5,6,1 +block_hint,ArraySingleArgumentConstructor_HoleyDouble_DisableAllocationSites,14,15,0 +block_hint,ArraySingleArgumentConstructor_HoleyDouble_DisableAllocationSites,16,17,0 +block_hint,ArraySingleArgumentConstructor_HoleyDouble_DisableAllocationSites,25,26,1 +block_hint,ArraySingleArgumentConstructor_HoleyDouble_DisableAllocationSites,23,24,1 +block_hint,ArraySingleArgumentConstructor_HoleyDouble_DisableAllocationSites,12,13,1 +block_hint,ArraySingleArgumentConstructor_HoleyDouble_DisableAllocationSites,8,9,0 +block_hint,ArrayIncludesSmi,120,121,0 +block_hint,ArrayIncludesSmi,43,44,1 +block_hint,ArrayIncludesSmi,108,109,1 +block_hint,ArrayIncludesSmi,75,76,1 +block_hint,ArrayIncludesSmi,122,123,0 +block_hint,ArrayIncludesSmiOrObject,113,114,1 +block_hint,ArrayIncludesSmiOrObject,38,39,0 +block_hint,ArrayIncludesSmiOrObject,107,108,0 +block_hint,ArrayIncludesSmiOrObject,28,29,1 +block_hint,ArrayIncludesSmiOrObject,84,85,1 +block_hint,ArrayIncludesSmiOrObject,86,87,1 +block_hint,ArrayIncludesSmiOrObject,117,118,0 +block_hint,ArrayIncludesSmiOrObject,131,132,1 +block_hint,ArrayIncludesSmiOrObject,125,126,0 +block_hint,ArrayIncludesSmiOrObject,98,99,0 +block_hint,ArrayIncludes,52,53,1 +block_hint,ArrayIncludes,49,50,0 +block_hint,ArrayIncludes,42,43,1 +block_hint,ArrayIncludes,44,45,1 +block_hint,ArrayIncludes,25,26,1 +block_hint,ArrayIncludes,17,18,1 +block_hint,ArrayIncludes,3,4,1 +block_hint,ArrayIncludes,47,48,1 +block_hint,ArrayIncludes,38,39,0 +block_hint,ArrayIncludes,27,28,1 +block_hint,ArrayIncludes,13,14,0 +block_hint,ArrayIncludes,19,20,1 +block_hint,ArrayIndexOfSmiOrObject,96,97,1 +block_hint,ArrayIndexOfSmiOrObject,88,89,0 +block_hint,ArrayIndexOfSmiOrObject,23,24,0 +block_hint,ArrayIndexOfSmiOrObject,37,38,0 +block_hint,ArrayIndexOfSmiOrObject,69,70,1 +block_hint,ArrayIndexOfSmiOrObject,43,44,0 +block_hint,ArrayIndexOfSmiOrObject,71,72,1 +block_hint,ArrayIndexOfSmiOrObject,110,111,1 +block_hint,ArrayIndexOfSmiOrObject,100,101,0 +block_hint,ArrayIndexOfSmiOrObject,77,78,0 +block_hint,ArrayIndexOfSmiOrObject,102,103,0 +block_hint,ArrayIndexOfSmiOrObject,79,80,0 +block_hint,ArrayIndexOfSmiOrObject,49,50,0 +block_hint,ArrayIndexOfSmiOrObject,29,30,0 +block_hint,ArrayIndexOfSmiOrObject,106,107,0 +block_hint,ArrayIndexOfSmiOrObject,83,84,0 +block_hint,ArrayIndexOfSmiOrObject,35,36,1 +block_hint,ArrayIndexOfSmiOrObject,94,95,1 +block_hint,ArrayIndexOfSmiOrObject,86,87,0 +block_hint,ArrayIndexOf,52,53,1 +block_hint,ArrayIndexOf,49,50,0 +block_hint,ArrayIndexOf,42,43,1 +block_hint,ArrayIndexOf,44,45,1 +block_hint,ArrayIndexOf,25,26,1 +block_hint,ArrayIndexOf,17,18,1 +block_hint,ArrayIndexOf,3,4,1 +block_hint,ArrayIndexOf,47,48,1 +block_hint,ArrayIndexOf,38,39,0 +block_hint,ArrayIndexOf,27,28,1 +block_hint,ArrayIndexOf,13,14,0 +block_hint,ArrayIndexOf,7,8,0 +block_hint,ArrayIndexOf,19,20,1 +block_hint,ArrayIndexOf,22,23,1 +block_hint,ArrayPrototypePop,49,50,1 +block_hint,ArrayPrototypePop,42,43,1 +block_hint,ArrayPrototypePop,47,48,1 +block_hint,ArrayPrototypePop,36,37,1 +block_hint,ArrayPrototypePop,26,27,1 +block_hint,ArrayPrototypePop,5,6,1 +block_hint,ArrayPrototypePop,45,46,1 +block_hint,ArrayPrototypePop,39,40,0 +block_hint,ArrayPrototypePop,20,21,1 +block_hint,ArrayPrototypePop,28,29,0 +block_hint,ArrayPrototypePop,7,8,1 +block_hint,ArrayPrototypePop,33,34,0 +block_hint,ArrayPrototypePop,16,17,1 +block_hint,ArrayPrototypePop,22,23,0 +block_hint,ArrayPrototypePop,30,31,1 +block_hint,ArrayPrototypePop,18,19,0 +block_hint,ArrayPrototypePop,14,15,1 +block_hint,ArrayPrototypePop,9,10,1 +block_hint,ArrayPrototypePop,11,12,0 +block_hint,ArrayPrototypePush,171,172,1 +block_hint,ArrayPrototypePush,151,152,1 +block_hint,ArrayPrototypePush,167,168,1 +block_hint,ArrayPrototypePush,136,137,1 +block_hint,ArrayPrototypePush,92,93,1 +block_hint,ArrayPrototypePush,17,18,1 +block_hint,ArrayPrototypePush,161,162,1 +block_hint,ArrayPrototypePush,140,141,0 +block_hint,ArrayPrototypePush,77,78,1 +block_hint,ArrayPrototypePush,79,80,1 +block_hint,ArrayPrototypePush,94,95,0 +block_hint,ArrayPrototypePush,19,20,1 +block_hint,ArrayPrototypePush,99,100,0 +block_hint,ArrayPrototypePush,116,117,0 +block_hint,ArrayPrototypePush,105,106,0 +block_hint,ArrayPrototypePush,68,69,0 +block_hint,ArrayPrototypePush,114,115,1 +block_hint,ArrayPrototypePush,28,29,0 +block_hint,ArrayPrototypePush,30,31,0 +block_hint,ArrayPrototypePush,34,35,0 +block_hint,ArrayPrototypePush,36,37,0 +block_hint,ArrayPrototypePush,96,97,0 +block_hint,ArrayPrototypePush,21,22,1 +block_hint,ArrayPrototypePush,38,39,1 +block_hint,ArrayPrototypePush,126,127,1 +block_hint,ArrayPrototypePush,128,129,0 +block_hint,ArrayPrototypePush,163,164,0 +block_hint,ArrayPrototypePush,165,166,0 +block_hint,ArrayPrototypePush,109,110,0 +block_hint,ArrayPrototypePush,72,73,0 +block_hint,ArrayPrototypePush,74,75,1 +block_hint,ArrayPrototypePush,142,143,0 +block_hint,ArrayPrototypePush,40,41,0 +block_hint,ArrayPrototypePush,118,119,0 +block_hint,ArrayPrototypePush,48,49,0 +block_hint,ArrayPrototypePush,158,159,1 +block_hint,ArrayPrototypePush,25,26,1 +block_hint,ArrayPrototypePush,50,51,1 +block_hint,ArrayPrototypePush,107,108,0 +block_hint,ArrayPrototypePush,70,71,0 +block_hint,ArrayPrototypePush,122,123,1 +block_hint,ArrayPrototypePush,52,53,0 +block_hint,ArrayPrototypePush,58,59,0 +block_hint,ArrayPrototypePush,60,61,0 +block_hint,ArrayPrototypePush,23,24,1 +block_hint,CloneFastJSArray,10,11,0 +block_hint,CloneFastJSArray,38,39,1 +block_hint,CloneFastJSArray,34,35,1 +block_hint,CloneFastJSArray,19,20,0 +block_hint,CloneFastJSArray,8,9,0 +block_hint,CloneFastJSArray,12,13,0 +block_hint,CloneFastJSArray,14,15,1 +block_hint,CloneFastJSArray,41,42,1 +block_hint,CloneFastJSArray,48,49,0 +block_hint,CloneFastJSArray,43,44,0 +block_hint,CloneFastJSArray,45,46,1 +block_hint,CloneFastJSArray,25,26,1 +block_hint,CloneFastJSArray,4,5,1 +block_hint,CloneFastJSArray,17,18,1 +block_hint,CloneFastJSArrayFillingHoles,76,77,0 +block_hint,CloneFastJSArrayFillingHoles,78,79,0 +block_hint,CloneFastJSArrayFillingHoles,91,92,0 +block_hint,CloneFastJSArrayFillingHoles,46,47,0 +block_hint,CloneFastJSArrayFillingHoles,95,96,1 +block_hint,CloneFastJSArrayFillingHoles,83,84,1 +block_hint,CloneFastJSArrayFillingHoles,16,17,0 +block_hint,CloneFastJSArrayFillingHoles,20,21,0 +block_hint,CloneFastJSArrayFillingHoles,85,86,0 +block_hint,CloneFastJSArrayFillingHoles,117,118,0 +block_hint,CloneFastJSArrayFillingHoles,80,81,1 +block_hint,CloneFastJSArrayFillingHoles,10,11,1 +block_hint,CloneFastJSArrayFillingHoles,55,56,1 +block_hint,ExtractFastJSArray,4,5,1 +block_hint,ExtractFastJSArray,27,28,0 +block_hint,ExtractFastJSArray,10,11,0 +block_hint,ExtractFastJSArray,42,43,1 +block_hint,ExtractFastJSArray,34,35,1 +block_hint,ExtractFastJSArray,20,21,0 +block_hint,ExtractFastJSArray,6,7,0 +block_hint,ExtractFastJSArray,12,13,0 +block_hint,ExtractFastJSArray,14,15,1 +block_hint,ExtractFastJSArray,36,37,1 +block_hint,ExtractFastJSArray,38,39,0 +block_hint,ExtractFastJSArray,16,17,1 +block_hint,ArrayPrototypeValues,14,15,1 +block_hint,ArrayPrototypeValues,11,12,1 +block_hint,ArrayPrototypeValues,8,9,1 +block_hint,ArrayPrototypeValues,3,4,1 +block_hint,ArrayPrototypeValues,6,7,1 +block_hint,ArrayIteratorPrototypeNext,140,141,1 +block_hint,ArrayIteratorPrototypeNext,88,89,1 +block_hint,ArrayIteratorPrototypeNext,90,91,1 +block_hint,ArrayIteratorPrototypeNext,196,197,0 +block_hint,ArrayIteratorPrototypeNext,255,256,0 +block_hint,ArrayIteratorPrototypeNext,224,225,0 +block_hint,ArrayIteratorPrototypeNext,206,207,0 +block_hint,ArrayIteratorPrototypeNext,164,165,0 +block_hint,ArrayIteratorPrototypeNext,118,119,1 +block_hint,ArrayIteratorPrototypeNext,268,269,0 +block_hint,ArrayIteratorPrototypeNext,248,249,0 +block_hint,ArrayIteratorPrototypeNext,159,160,0 +block_hint,ArrayIteratorPrototypeNext,109,110,1 +block_hint,ArrayIteratorPrototypeNext,6,7,1 +block_hint,ArrayIteratorPrototypeNext,8,9,1 +block_hint,ArrayIteratorPrototypeNext,142,143,0 +block_hint,ArrayIteratorPrototypeNext,124,125,1 +block_hint,ArrayIteratorPrototypeNext,60,61,1 +block_hint,ArrayIteratorPrototypeNext,80,81,1 +block_hint,AsyncFunctionEnter,41,42,1 +block_hint,AsyncFunctionEnter,28,29,0 +block_hint,AsyncFunctionEnter,13,14,0 +block_hint,AsyncFunctionEnter,33,34,1 +block_hint,AsyncFunctionEnter,26,27,1 +block_hint,AsyncFunctionEnter,9,10,0 +block_hint,AsyncFunctionEnter,3,4,1 +block_hint,AsyncFunctionEnter,36,37,1 +block_hint,AsyncFunctionEnter,22,23,0 +block_hint,AsyncFunctionEnter,5,6,1 +block_hint,AsyncFunctionEnter,24,25,1 +block_hint,AsyncFunctionEnter,7,8,0 +block_hint,AsyncFunctionResolve,2,3,0 +block_hint,AsyncFunctionAwaitCaught,24,25,1 +block_hint,AsyncFunctionAwaitCaught,19,20,1 +block_hint,AsyncFunctionAwaitCaught,2,3,1 +block_hint,AsyncFunctionAwaitCaught,30,31,1 +block_hint,AsyncFunctionAwaitCaught,32,33,0 +block_hint,AsyncFunctionAwaitCaught,28,29,1 +block_hint,AsyncFunctionAwaitCaught,8,9,1 +block_hint,AsyncFunctionAwaitCaught,10,11,1 +block_hint,AsyncFunctionAwaitCaught,12,13,1 +block_hint,AsyncFunctionAwaitCaught,14,15,1 +block_hint,AsyncFunctionAwaitCaught,22,23,0 +block_hint,AsyncFunctionAwaitUncaught,24,25,1 +block_hint,AsyncFunctionAwaitUncaught,19,20,1 +block_hint,AsyncFunctionAwaitUncaught,2,3,1 +block_hint,AsyncFunctionAwaitUncaught,30,31,1 +block_hint,AsyncFunctionAwaitUncaught,32,33,0 +block_hint,AsyncFunctionAwaitUncaught,28,29,1 +block_hint,AsyncFunctionAwaitUncaught,8,9,1 +block_hint,AsyncFunctionAwaitUncaught,10,11,1 +block_hint,AsyncFunctionAwaitUncaught,12,13,1 +block_hint,AsyncFunctionAwaitUncaught,14,15,1 +block_hint,AsyncFunctionAwaitUncaught,22,23,0 +block_hint,AsyncFunctionAwaitResolveClosure,8,9,1 +block_hint,AsyncFunctionAwaitResolveClosure,2,3,1 +block_hint,AsyncFunctionAwaitResolveClosure,6,7,0 +block_hint,DatePrototypeGetDate,10,11,1 +block_hint,DatePrototypeGetDate,7,8,1 +block_hint,DatePrototypeGetDate,5,6,1 +block_hint,DatePrototypeGetDate,2,3,1 +block_hint,DatePrototypeGetDay,10,11,1 +block_hint,DatePrototypeGetDay,7,8,1 +block_hint,DatePrototypeGetDay,5,6,1 +block_hint,DatePrototypeGetDay,2,3,1 +block_hint,DatePrototypeGetFullYear,10,11,1 +block_hint,DatePrototypeGetFullYear,7,8,1 +block_hint,DatePrototypeGetFullYear,5,6,1 +block_hint,DatePrototypeGetHours,10,11,1 +block_hint,DatePrototypeGetHours,7,8,1 +block_hint,DatePrototypeGetHours,5,6,1 +block_hint,DatePrototypeGetHours,2,3,1 +block_hint,DatePrototypeGetMinutes,10,11,1 +block_hint,DatePrototypeGetMinutes,7,8,1 +block_hint,DatePrototypeGetMinutes,5,6,1 +block_hint,DatePrototypeGetMinutes,2,3,1 +block_hint,DatePrototypeGetMonth,10,11,1 +block_hint,DatePrototypeGetMonth,7,8,1 +block_hint,DatePrototypeGetMonth,5,6,1 +block_hint,DatePrototypeGetMonth,2,3,1 +block_hint,DatePrototypeGetSeconds,10,11,1 +block_hint,DatePrototypeGetSeconds,7,8,1 +block_hint,DatePrototypeGetSeconds,5,6,1 +block_hint,DatePrototypeGetSeconds,2,3,1 +block_hint,DatePrototypeGetTime,8,9,1 +block_hint,DatePrototypeGetTime,5,6,1 +block_hint,DatePrototypeGetTime,2,3,1 +block_hint,CreateIterResultObject,4,5,1 +block_hint,CreateIterResultObject,11,12,1 +block_hint,CreateIterResultObject,6,7,0 +block_hint,CreateGeneratorObject,32,33,1 +block_hint,CreateGeneratorObject,34,35,1 +block_hint,CreateGeneratorObject,57,58,1 +block_hint,CreateGeneratorObject,54,55,0 +block_hint,CreateGeneratorObject,43,44,1 +block_hint,CreateGeneratorObject,24,25,0 +block_hint,CreateGeneratorObject,47,48,1 +block_hint,CreateGeneratorObject,40,41,1 +block_hint,CreateGeneratorObject,8,9,0 +block_hint,CreateGeneratorObject,51,52,1 +block_hint,CreateGeneratorObject,37,38,0 +block_hint,CreateGeneratorObject,12,13,0 +block_hint,GeneratorPrototypeNext,19,20,1 +block_hint,GeneratorPrototypeNext,11,12,1 +block_hint,GeneratorPrototypeNext,13,14,1 +block_hint,GeneratorPrototypeNext,5,6,0 +block_hint,GeneratorPrototypeNext,7,8,0 +block_hint,SuspendGeneratorBaseline,19,20,1 +block_hint,SuspendGeneratorBaseline,5,6,1 +block_hint,SuspendGeneratorBaseline,11,12,1 +block_hint,SuspendGeneratorBaseline,7,8,1 +block_hint,SuspendGeneratorBaseline,13,14,0 +block_hint,ResumeGeneratorBaseline,11,12,1 +block_hint,ResumeGeneratorBaseline,4,5,1 +block_hint,ResumeGeneratorBaseline,6,7,0 +block_hint,GlobalIsFinite,9,10,1 +block_hint,GlobalIsNaN,9,10,1 +block_hint,GlobalIsNaN,11,12,1 +block_hint,LoadIC,373,374,1 +block_hint,LoadIC,139,140,0 +block_hint,LoadIC,61,62,0 +block_hint,LoadIC,233,234,0 +block_hint,LoadIC,350,351,1 +block_hint,LoadIC,235,236,0 +block_hint,LoadIC,399,400,1 +block_hint,LoadIC,396,397,0 +block_hint,LoadIC,388,389,1 +block_hint,LoadIC,295,296,1 +block_hint,LoadIC,100,101,1 +block_hint,LoadIC,281,282,0 +block_hint,LoadIC,324,325,0 +block_hint,LoadIC,141,142,1 +block_hint,LoadIC,361,362,1 +block_hint,LoadIC,102,103,0 +block_hint,LoadIC,21,22,1 +block_hint,LoadIC,64,65,0 +block_hint,LoadIC,143,144,0 +block_hint,LoadIC,313,314,1 +block_hint,LoadIC,23,24,1 +block_hint,LoadIC,173,174,0 +block_hint,LoadIC,367,368,0 +block_hint,LoadIC,369,370,0 +block_hint,LoadIC,322,323,0 +block_hint,LoadIC,129,130,0 +block_hint,LoadIC,51,52,1 +block_hint,LoadIC,209,210,0 +block_hint,LoadIC,84,85,0 +block_hint,LoadIC,46,47,0 +block_hint,LoadIC,363,364,1 +block_hint,LoadIC,114,115,0 +block_hint,LoadIC,183,184,0 +block_hint,LoadIC,44,45,1 +block_hint,LoadIC,76,77,0 +block_hint,LoadIC,271,272,0 +block_hint,LoadIC,315,316,1 +block_hint,LoadIC,27,28,0 +block_hint,LoadIC,179,180,1 +block_hint,LoadIC,181,182,1 +block_hint,LoadIC,175,176,1 +block_hint,LoadIC,177,178,1 +block_hint,LoadIC,133,134,1 +block_hint,LoadIC,135,136,0 +block_hint,LoadIC_Megamorphic,367,368,1 +block_hint,LoadIC_Megamorphic,364,365,0 +block_hint,LoadIC_Megamorphic,356,357,1 +block_hint,LoadIC_Megamorphic,260,261,1 +block_hint,LoadIC_Megamorphic,262,263,1 +block_hint,LoadIC_Megamorphic,258,259,0 +block_hint,LoadIC_Megamorphic,58,59,0 +block_hint,LoadIC_Megamorphic,299,300,0 +block_hint,LoadIC_Megamorphic,130,131,1 +block_hint,LoadIC_Megamorphic,285,286,0 +block_hint,LoadIC_Megamorphic,331,332,1 +block_hint,LoadIC_Megamorphic,95,96,0 +block_hint,LoadIC_Megamorphic,132,133,0 +block_hint,LoadIC_Megamorphic,287,288,1 +block_hint,LoadIC_Megamorphic,22,23,1 +block_hint,LoadIC_Megamorphic,162,163,0 +block_hint,LoadIC_Megamorphic,292,293,0 +block_hint,LoadIC_Megamorphic,252,253,1 +block_hint,LoadIC_Megamorphic,337,338,0 +block_hint,LoadIC_Megamorphic,339,340,0 +block_hint,LoadIC_Megamorphic,296,297,0 +block_hint,LoadIC_Megamorphic,122,123,0 +block_hint,LoadIC_Megamorphic,50,51,1 +block_hint,LoadIC_Megamorphic,45,46,0 +block_hint,LoadIC_Megamorphic,248,249,0 +block_hint,LoadIC_Megamorphic,289,290,1 +block_hint,LoadIC_Megamorphic,26,27,0 +block_hint,LoadIC_Megamorphic,24,25,0 +block_hint,LoadIC_Megamorphic,164,165,1 +block_hint,LoadIC_Megamorphic,166,167,1 +block_hint,LoadIC_Megamorphic,126,127,1 +block_hint,LoadIC_Noninlined,376,377,1 +block_hint,LoadIC_Noninlined,132,133,0 +block_hint,LoadIC_Noninlined,384,385,1 +block_hint,LoadIC_Noninlined,381,382,0 +block_hint,LoadIC_Noninlined,371,372,1 +block_hint,LoadIC_Noninlined,270,271,0 +block_hint,LoadIC_Noninlined,58,59,0 +block_hint,LoadIC_Noninlined,313,314,0 +block_hint,LoadIC_Noninlined,142,143,1 +block_hint,LoadIC_Noninlined,297,298,0 +block_hint,LoadIC_Noninlined,22,23,1 +block_hint,LoadIC_Noninlined,174,175,0 +block_hint,LoadIC_Noninlined,39,40,1 +block_hint,LoadIC_Noninlined,260,261,0 +block_hint,LoadIC_Noninlined,301,302,1 +block_hint,LoadIC_Noninlined,26,27,0 +block_hint,LoadIC_Noninlined,24,25,0 +block_hint,LoadICTrampoline,3,4,1 +block_hint,LoadICTrampoline_Megamorphic,3,4,1 +block_hint,LoadSuperIC,533,534,0 +block_hint,LoadSuperIC,253,254,0 +block_hint,LoadSuperIC,569,570,1 +block_hint,LoadSuperIC,443,444,0 +block_hint,LoadSuperIC,77,78,0 +block_hint,LoadSuperIC,545,546,0 +block_hint,LoadSuperIC,255,256,1 +block_hint,LoadSuperIC,520,521,0 +block_hint,LoadSuperIC,43,44,1 +block_hint,LoadSuperIC,555,556,0 +block_hint,LoadSuperIC,287,288,0 +block_hint,LoadSuperIC,62,63,1 +block_hint,LoadSuperIC,432,433,0 +block_hint,LoadSuperIC,430,431,0 +block_hint,LoadSuperIC,524,525,1 +block_hint,LoadSuperIC,47,48,0 +block_hint,LoadSuperIC,681,682,0 +block_hint,KeyedLoadIC,636,637,1 +block_hint,KeyedLoadIC,258,259,0 +block_hint,KeyedLoadIC,250,251,0 +block_hint,KeyedLoadIC,384,385,0 +block_hint,KeyedLoadIC,501,502,1 +block_hint,KeyedLoadIC,650,651,0 +block_hint,KeyedLoadIC,630,631,0 +block_hint,KeyedLoadIC,585,586,1 +block_hint,KeyedLoadIC,390,391,1 +block_hint,KeyedLoadIC,388,389,1 +block_hint,KeyedLoadIC,669,670,0 +block_hint,KeyedLoadIC,671,672,0 +block_hint,KeyedLoadIC,634,635,0 +block_hint,KeyedLoadIC,587,588,1 +block_hint,KeyedLoadIC,152,153,1 +block_hint,KeyedLoadIC,628,629,0 +block_hint,KeyedLoadIC,482,483,0 +block_hint,KeyedLoadIC,106,107,1 +block_hint,KeyedLoadIC,685,686,0 +block_hint,KeyedLoadIC,687,688,0 +block_hint,KeyedLoadIC,642,643,1 +block_hint,KeyedLoadIC,294,295,1 +block_hint,KeyedLoadIC,296,297,0 +block_hint,KeyedLoadIC,681,682,1 +block_hint,KeyedLoadIC,528,529,1 +block_hint,KeyedLoadIC,626,627,0 +block_hint,KeyedLoadIC,615,616,0 +block_hint,KeyedLoadIC,565,566,1 +block_hint,KeyedLoadIC,316,317,1 +block_hint,KeyedLoadIC,68,69,0 +block_hint,KeyedLoadIC,306,307,0 +block_hint,KeyedLoadIC,532,533,1 +block_hint,KeyedLoadIC,308,309,1 +block_hint,KeyedLoadIC,222,223,0 +block_hint,KeyedLoadIC,178,179,0 +block_hint,KeyedLoadIC,567,568,0 +block_hint,KeyedLoadIC,457,458,1 +block_hint,KeyedLoadIC,118,119,0 +block_hint,KeyedLoadIC,120,121,0 +block_hint,KeyedLoadIC,405,406,1 +block_hint,KeyedLoadIC,618,619,1 +block_hint,KeyedLoadIC,246,247,1 +block_hint,KeyedLoadIC,569,570,0 +block_hint,KeyedLoadIC,519,520,0 +block_hint,KeyedLoadIC,443,444,1 +block_hint,KeyedLoadIC,690,691,0 +block_hint,KeyedLoadIC,122,123,1 +block_hint,KeyedLoadIC,322,323,0 +block_hint,KeyedLoadIC,324,325,1 +block_hint,KeyedLoadIC,70,71,0 +block_hint,KeyedLoadIC_Megamorphic,497,498,1 +block_hint,KeyedLoadIC_Megamorphic,499,500,0 +block_hint,KeyedLoadIC_Megamorphic,1245,1246,0 +block_hint,KeyedLoadIC_Megamorphic,1217,1218,1 +block_hint,KeyedLoadIC_Megamorphic,1169,1170,0 +block_hint,KeyedLoadIC_Megamorphic,1241,1242,1 +block_hint,KeyedLoadIC_Megamorphic,1247,1248,1 +block_hint,KeyedLoadIC_Megamorphic,1219,1220,1 +block_hint,KeyedLoadIC_Megamorphic,1243,1244,0 +block_hint,KeyedLoadIC_Megamorphic,1129,1130,0 +block_hint,KeyedLoadIC_Megamorphic,940,941,1 +block_hint,KeyedLoadIC_Megamorphic,938,939,1 +block_hint,KeyedLoadIC_Megamorphic,529,530,1 +block_hint,KeyedLoadIC_Megamorphic,1211,1212,0 +block_hint,KeyedLoadIC_Megamorphic,1213,1214,0 +block_hint,KeyedLoadIC_Megamorphic,1179,1180,0 +block_hint,KeyedLoadIC_Megamorphic,1177,1178,1 +block_hint,KeyedLoadIC_Megamorphic,1207,1208,0 +block_hint,KeyedLoadIC_Megamorphic,1173,1174,0 +block_hint,KeyedLoadIC_Megamorphic,942,943,1 +block_hint,KeyedLoadIC_Megamorphic,533,534,1 +block_hint,KeyedLoadIC_Megamorphic,952,953,0 +block_hint,KeyedLoadIC_Megamorphic,649,650,0 +block_hint,KeyedLoadIC_Megamorphic,1197,1198,0 +block_hint,KeyedLoadIC_Megamorphic,1103,1104,0 +block_hint,KeyedLoadIC_Megamorphic,1155,1156,0 +block_hint,KeyedLoadIC_Megamorphic,234,235,1 +block_hint,KeyedLoadIC_Megamorphic,1105,1106,0 +block_hint,KeyedLoadIC_Megamorphic,236,237,1 +block_hint,KeyedLoadIC_Megamorphic,1085,1086,0 +block_hint,KeyedLoadIC_Megamorphic,1231,1232,1 +block_hint,KeyedLoadIC_Megamorphic,1083,1084,0 +block_hint,KeyedLoadIC_Megamorphic,1081,1082,0 +block_hint,KeyedLoadIC_Megamorphic,991,992,1 +block_hint,KeyedLoadIC_Megamorphic,240,241,0 +block_hint,KeyedLoadIC_Megamorphic,126,127,1 +block_hint,KeyedLoadIC_Megamorphic,198,199,0 +block_hint,KeyedLoadIC_Megamorphic,653,654,0 +block_hint,KeyedLoadIC_Megamorphic,559,560,0 +block_hint,KeyedLoadIC_Megamorphic,1054,1055,0 +block_hint,KeyedLoadIC_Megamorphic,994,995,0 +block_hint,KeyedLoadIC_Megamorphic,657,658,1 +block_hint,KeyedLoadIC_Megamorphic,675,676,0 +block_hint,KeyedLoadIC_Megamorphic,1199,1200,0 +block_hint,KeyedLoadIC_Megamorphic,659,660,0 +block_hint,KeyedLoadIC_Megamorphic,1107,1108,0 +block_hint,KeyedLoadIC_Megamorphic,661,662,1 +block_hint,KeyedLoadIC_Megamorphic,251,252,1 +block_hint,KeyedLoadIC_Megamorphic,663,664,0 +block_hint,KeyedLoadIC_Megamorphic,253,254,1 +block_hint,KeyedLoadIC_Megamorphic,855,856,0 +block_hint,KeyedLoadIC_Megamorphic,998,999,1 +block_hint,KeyedLoadIC_Megamorphic,257,258,0 +block_hint,KeyedLoadIC_Megamorphic,665,666,0 +block_hint,KeyedLoadIC_Megamorphic,1091,1092,0 +block_hint,KeyedLoadIC_Megamorphic,1193,1194,0 +block_hint,KeyedLoadIC_Megamorphic,1233,1234,1 +block_hint,KeyedLoadIC_Megamorphic,1089,1090,0 +block_hint,KeyedLoadIC_Megamorphic,128,129,1 +block_hint,KeyedLoadIC_Megamorphic,206,207,0 +block_hint,KeyedLoadIC_Megamorphic,936,937,0 +block_hint,KeyedLoadIC_Megamorphic,684,685,0 +block_hint,KeyedLoadIC_Megamorphic,1201,1202,0 +block_hint,KeyedLoadIC_Megamorphic,1237,1238,0 +block_hint,KeyedLoadIC_Megamorphic,1159,1160,0 +block_hint,KeyedLoadIC_Megamorphic,857,858,1 +block_hint,KeyedLoadIC_Megamorphic,269,270,1 +block_hint,KeyedLoadIC_Megamorphic,1227,1228,0 +block_hint,KeyedLoadIC_Megamorphic,271,272,1 +block_hint,KeyedLoadIC_Megamorphic,1125,1126,0 +block_hint,KeyedLoadIC_Megamorphic,1239,1240,0 +block_hint,KeyedLoadIC_Megamorphic,1119,1120,1 +block_hint,KeyedLoadIC_Megamorphic,749,750,1 +block_hint,KeyedLoadIC_Megamorphic,1028,1029,1 +block_hint,KeyedLoadIC_Megamorphic,745,746,0 +block_hint,KeyedLoadIC_Megamorphic,117,118,0 +block_hint,KeyedLoadIC_Megamorphic,890,891,0 +block_hint,KeyedLoadIC_Megamorphic,339,340,1 +block_hint,KeyedLoadIC_Megamorphic,876,877,0 +block_hint,KeyedLoadIC_Megamorphic,81,82,1 +block_hint,KeyedLoadIC_Megamorphic,369,370,0 +block_hint,KeyedLoadIC_Megamorphic,737,738,0 +block_hint,KeyedLoadIC_Megamorphic,98,99,1 +block_hint,KeyedLoadIC_Megamorphic,1071,1072,0 +block_hint,KeyedLoadIC_Megamorphic,1225,1226,1 +block_hint,KeyedLoadIC_Megamorphic,1069,1070,0 +block_hint,KeyedLoadIC_Megamorphic,1137,1138,1 +block_hint,KeyedLoadIC_Megamorphic,1009,1010,1 +block_hint,KeyedLoadIC_Megamorphic,295,296,0 +block_hint,KeyedLoadIC_Megamorphic,120,121,1 +block_hint,KeyedLoadIC_Megamorphic,182,183,0 +block_hint,KeyedLoadIC_Megamorphic,971,972,0 +block_hint,KeyedLoadIC_Megamorphic,828,829,1 +block_hint,KeyedLoadIC_Megamorphic,186,187,1 +block_hint,KeyedLoadIC_Megamorphic,690,691,0 +block_hint,KeyedLoadIC_Megamorphic,525,526,0 +block_hint,KeyedLoadIC_Megamorphic,1042,1043,0 +block_hint,KeyedLoadIC_Megamorphic,1012,1013,0 +block_hint,KeyedLoadIC_Megamorphic,694,695,1 +block_hint,KeyedLoadIC_Megamorphic,869,870,1 +block_hint,KeyedLoadIC_Megamorphic,1203,1204,0 +block_hint,KeyedLoadIC_Megamorphic,308,309,1 +block_hint,KeyedLoadIC_Megamorphic,871,872,0 +block_hint,KeyedLoadIC_Megamorphic,1077,1078,0 +block_hint,KeyedLoadIC_Megamorphic,1229,1230,1 +block_hint,KeyedLoadIC_Megamorphic,1075,1076,0 +block_hint,KeyedLoadIC_Megamorphic,190,191,0 +block_hint,KeyedLoadIC_Megamorphic,960,961,0 +block_hint,KeyedLoadIC_Megamorphic,1166,1167,0 +block_hint,KeyedLoadIC_Megamorphic,918,919,1 +block_hint,KeyedLoadIC_Megamorphic,132,133,0 +block_hint,KeyedLoadIC_Megamorphic,727,728,0 +block_hint,KeyedLoadIC_Megamorphic,1037,1038,0 +block_hint,KeyedLoadIC_Megamorphic,563,564,1 +block_hint,KeyedLoadIC_Megamorphic,322,323,0 +block_hint,KeyedLoadIC_Megamorphic,723,724,0 +block_hint,KeyedLoadIC_Megamorphic,565,566,0 +block_hint,KeyedLoadIC_Megamorphic,134,135,1 +block_hint,KeyedLoadIC_Megamorphic,573,574,0 +block_hint,KeyedLoadIC_Megamorphic,922,923,1 +block_hint,KeyedLoadIC_Megamorphic,493,494,0 +block_hint,KeyedLoadIC_Megamorphic,958,959,0 +block_hint,KeyedLoadIC_Megamorphic,731,732,1 +block_hint,KeyedLoadIC_Megamorphic,581,582,0 +block_hint,KeyedLoadIC_Megamorphic,216,217,0 +block_hint,KeyedLoadIC_Megamorphic,491,492,1 +block_hint,KeyedLoadIC_Megamorphic,583,584,1 +block_hint,KeyedLoadIC_Megamorphic,150,151,1 +block_hint,KeyedLoadICTrampoline,3,4,1 +block_hint,KeyedLoadICTrampoline_Megamorphic,3,4,1 +block_hint,StoreGlobalIC,72,73,0 +block_hint,StoreGlobalIC,229,230,1 +block_hint,StoreGlobalIC,268,269,0 +block_hint,StoreGlobalIC,144,145,0 +block_hint,StoreGlobalIC,205,206,0 +block_hint,StoreGlobalIC,92,93,0 +block_hint,StoreGlobalIC,146,147,1 +block_hint,StoreGlobalIC,94,95,1 +block_hint,StoreGlobalIC,15,16,1 +block_hint,StoreGlobalICTrampoline,3,4,1 +block_hint,StoreIC,338,339,1 +block_hint,StoreIC,144,145,0 +block_hint,StoreIC,69,70,0 +block_hint,StoreIC,208,209,0 +block_hint,StoreIC,210,211,1 +block_hint,StoreIC,395,396,1 +block_hint,StoreIC,386,387,0 +block_hint,StoreIC,240,241,1 +block_hint,StoreIC,242,243,1 +block_hint,StoreIC,74,75,1 +block_hint,StoreIC,250,251,1 +block_hint,StoreIC,108,109,0 +block_hint,StoreIC,35,36,0 +block_hint,StoreIC,316,317,1 +block_hint,StoreIC,92,93,0 +block_hint,StoreIC,146,147,0 +block_hint,StoreIC,94,95,1 +block_hint,StoreIC,150,151,0 +block_hint,StoreIC,16,17,1 +block_hint,StoreIC,96,97,0 +block_hint,StoreIC,18,19,0 +block_hint,StoreIC,359,360,0 +block_hint,StoreIC,160,161,1 +block_hint,StoreIC,162,163,1 +block_hint,StoreIC,327,328,1 +block_hint,StoreIC,164,165,0 +block_hint,StoreIC,105,106,0 +block_hint,StoreIC,103,104,1 +block_hint,StoreIC,320,321,1 +block_hint,StoreIC,23,24,0 +block_hint,StoreIC,152,153,1 +block_hint,StoreIC,287,288,0 +block_hint,StoreIC,154,155,0 +block_hint,StoreIC,156,157,1 +block_hint,StoreIC,323,324,1 +block_hint,StoreIC,25,26,1 +block_hint,StoreIC,158,159,0 +block_hint,StoreIC,325,326,1 +block_hint,StoreIC,31,32,0 +block_hint,StoreIC,29,30,1 +block_hint,StoreIC,227,228,1 +block_hint,StoreIC,63,64,0 +block_hint,StoreIC,291,292,0 +block_hint,StoreIC,166,167,1 +block_hint,StoreIC,293,294,0 +block_hint,StoreIC,312,313,1 +block_hint,StoreIC,76,77,0 +block_hint,StoreIC,246,247,0 +block_hint,StoreIC,176,177,0 +block_hint,StoreIC,43,44,1 +block_hint,StoreIC,112,113,0 +block_hint,StoreIC,178,179,0 +block_hint,StoreIC,271,272,0 +block_hint,StoreIC,125,126,1 +block_hint,StoreIC,371,372,0 +block_hint,StoreIC,267,268,1 +block_hint,StoreIC,45,46,1 +block_hint,StoreIC,47,48,1 +block_hint,StoreIC,121,122,0 +block_hint,StoreIC,49,50,1 +block_hint,StoreIC,123,124,0 +block_hint,StoreIC,51,52,1 +block_hint,StoreIC,80,81,0 +block_hint,StoreIC,53,54,1 +block_hint,StoreIC,55,56,1 +block_hint,StoreIC,333,334,0 +block_hint,StoreIC,57,58,1 +block_hint,StoreIC,184,185,0 +block_hint,StoreIC,186,187,0 +block_hint,StoreIC,229,230,0 +block_hint,StoreIC,133,134,0 +block_hint,StoreIC,299,300,0 +block_hint,StoreIC,190,191,1 +block_hint,StoreIC,192,193,0 +block_hint,StoreIC,281,282,0 +block_hint,StoreIC,365,366,0 +block_hint,StoreIC,301,302,1 +block_hint,StoreIC,194,195,1 +block_hint,StoreIC,200,201,1 +block_hint,StoreIC,202,203,0 +block_hint,StoreIC,204,205,0 +block_hint,StoreIC,206,207,1 +block_hint,StoreIC,198,199,1 +block_hint,StoreIC,196,197,0 +block_hint,StoreIC,384,385,0 +block_hint,StoreIC,388,389,1 +block_hint,StoreIC,357,358,1 +block_hint,StoreIC,314,315,1 +block_hint,StoreIC,84,85,0 +block_hint,StoreIC,139,140,0 +block_hint,StoreIC,231,232,1 +block_hint,StoreICTrampoline,3,4,1 +block_hint,DefineNamedOwnIC,329,330,1 +block_hint,DefineNamedOwnIC,145,146,0 +block_hint,DefineNamedOwnIC,300,301,1 +block_hint,DefineNamedOwnIC,203,204,0 +block_hint,DefineNamedOwnIC,69,70,0 +block_hint,DefineNamedOwnIC,205,206,0 +block_hint,DefineNamedOwnIC,326,327,0 +block_hint,DefineNamedOwnIC,243,244,1 +block_hint,DefineNamedOwnIC,93,94,0 +block_hint,DefineNamedOwnIC,17,18,0 +block_hint,DefineNamedOwnIC,350,351,0 +block_hint,DefineNamedOwnIC,282,283,1 +block_hint,DefineNamedOwnIC,157,158,1 +block_hint,DefineNamedOwnIC,159,160,1 +block_hint,DefineNamedOwnIC,254,255,1 +block_hint,DefineNamedOwnIC,32,33,0 +block_hint,DefineNamedOwnIC,246,247,1 +block_hint,DefineNamedOwnIC,22,23,0 +block_hint,DefineNamedOwnIC,149,150,1 +block_hint,DefineNamedOwnIC,352,353,0 +block_hint,DefineNamedOwnIC,280,281,0 +block_hint,DefineNamedOwnIC,151,152,0 +block_hint,DefineNamedOwnIC,153,154,1 +block_hint,DefineNamedOwnIC,248,249,1 +block_hint,DefineNamedOwnIC,26,27,0 +block_hint,DefineNamedOwnIC,155,156,0 +block_hint,DefineNamedOwnIC,250,251,1 +block_hint,DefineNamedOwnIC,30,31,0 +block_hint,KeyedStoreIC,401,402,1 +block_hint,KeyedStoreIC,173,174,0 +block_hint,KeyedStoreIC,169,170,0 +block_hint,KeyedStoreIC,239,240,0 +block_hint,KeyedStoreIC,171,172,1 +block_hint,KeyedStoreIC,87,88,1 +block_hint,KeyedStoreIC,398,399,1 +block_hint,KeyedStoreIC,109,110,0 +block_hint,KeyedStoreIC,22,23,0 +block_hint,KeyedStoreIC,428,429,0 +block_hint,KeyedStoreIC,181,182,1 +block_hint,KeyedStoreIC,430,431,0 +block_hint,KeyedStoreIC,351,352,0 +block_hint,KeyedStoreIC,298,299,1 +block_hint,KeyedStoreIC,31,32,0 +block_hint,KeyedStoreIC,272,273,0 +block_hint,KeyedStoreIC,355,356,0 +block_hint,KeyedStoreIC,195,196,1 +block_hint,KeyedStoreIC,260,261,1 +block_hint,KeyedStoreIC,432,433,0 +block_hint,KeyedStoreIC,329,330,0 +block_hint,KeyedStoreIC,137,138,1 +block_hint,KeyedStoreIC,45,46,1 +block_hint,KeyedStoreIC,197,198,0 +block_hint,KeyedStoreIC,47,48,0 +block_hint,KeyedStoreIC,215,216,0 +block_hint,KeyedStoreIC,361,362,1 +block_hint,KeyedStoreIC,363,364,0 +block_hint,KeyedStoreIC,221,222,1 +block_hint,KeyedStoreIC,223,224,0 +block_hint,KeyedStoreIC,345,346,0 +block_hint,KeyedStoreIC,367,368,0 +block_hint,KeyedStoreIC,434,435,0 +block_hint,KeyedStoreIC,365,366,1 +block_hint,KeyedStoreIC,231,232,1 +block_hint,KeyedStoreIC,233,234,0 +block_hint,KeyedStoreIC,235,236,0 +block_hint,KeyedStoreIC,237,238,1 +block_hint,KeyedStoreIC,449,450,0 +block_hint,KeyedStoreIC,426,427,1 +block_hint,KeyedStoreIC,278,279,0 +block_hint,KeyedStoreIC,377,378,1 +block_hint,KeyedStoreIC,97,98,0 +block_hint,KeyedStoreIC,164,165,0 +block_hint,KeyedStoreICTrampoline,3,4,1 +block_hint,DefineKeyedOwnIC,392,393,1 +block_hint,DefineKeyedOwnIC,174,175,0 +block_hint,DefineKeyedOwnIC,170,171,1 +block_hint,StoreInArrayLiteralIC,30,31,1 +block_hint,StoreInArrayLiteralIC,19,20,0 +block_hint,StoreInArrayLiteralIC,23,24,0 +block_hint,StoreInArrayLiteralIC,14,15,1 +block_hint,StoreInArrayLiteralIC,16,17,1 +block_hint,StoreInArrayLiteralIC,8,9,1 +block_hint,StoreInArrayLiteralIC,4,5,1 +block_hint,LoadGlobalIC,62,63,0 +block_hint,LoadGlobalIC,16,17,1 +block_hint,LoadGlobalIC,18,19,1 +block_hint,LoadGlobalIC,20,21,1 +block_hint,LoadGlobalIC,194,195,0 +block_hint,LoadGlobalIC,14,15,0 +block_hint,LoadGlobalIC,111,112,1 +block_hint,LoadGlobalICInsideTypeof,62,63,0 +block_hint,LoadGlobalICInsideTypeof,196,197,1 +block_hint,LoadGlobalICInsideTypeof,14,15,0 +block_hint,LoadGlobalICInsideTypeof,111,112,0 +block_hint,LoadGlobalICInsideTypeof,22,23,1 +block_hint,LoadGlobalICInsideTypeof,24,25,1 +block_hint,LoadGlobalICInsideTypeof,257,258,1 +block_hint,LoadGlobalICInsideTypeof,211,212,0 +block_hint,LoadGlobalICInsideTypeof,60,61,0 +block_hint,LoadGlobalICInsideTypeof,225,226,0 +block_hint,LoadGlobalICInsideTypeof,113,114,1 +block_hint,LoadGlobalICInsideTypeof,26,27,1 +block_hint,LoadGlobalICInsideTypeof,234,235,1 +block_hint,LoadGlobalICInsideTypeof,202,203,0 +block_hint,LoadGlobalICInsideTypeof,45,46,0 +block_hint,LoadGlobalICInsideTypeof,43,44,1 +block_hint,LoadGlobalICTrampoline,3,4,1 +block_hint,LoadGlobalICInsideTypeofTrampoline,3,4,1 +block_hint,LookupGlobalICBaseline,3,4,1 +block_hint,LookupGlobalICBaseline,14,15,1 +block_hint,LookupGlobalICBaseline,5,6,1 +block_hint,LookupGlobalICBaseline,11,12,1 +block_hint,LookupGlobalICBaseline,7,8,1 +block_hint,LookupGlobalICBaseline,9,10,0 +block_hint,KeyedHasIC,261,262,1 +block_hint,KeyedHasIC,125,126,0 +block_hint,KeyedHasIC,117,118,0 +block_hint,KeyedHasIC,239,240,0 +block_hint,KeyedHasIC,165,166,0 +block_hint,KeyedHasIC,77,78,0 +block_hint,KeyedHasIC,119,120,1 +block_hint,KeyedHasIC,167,168,0 +block_hint,KeyedHasIC,123,124,1 +block_hint,KeyedHasIC,79,80,1 +block_hint,KeyedHasIC,197,198,0 +block_hint,KeyedHasIC,221,222,0 +block_hint,KeyedHasIC,281,282,0 +block_hint,KeyedHasIC,279,280,0 +block_hint,KeyedHasIC,161,162,1 +block_hint,KeyedHasIC,61,62,0 +block_hint,KeyedHasIC_Megamorphic,137,138,1 +block_hint,KeyedHasIC_Megamorphic,139,140,1 +block_hint,KeyedHasIC_Megamorphic,261,262,0 +block_hint,KeyedHasIC_Megamorphic,211,212,1 +block_hint,KeyedHasIC_Megamorphic,254,255,0 +block_hint,KeyedHasIC_Megamorphic,97,98,0 +block_hint,KeyedHasIC_Megamorphic,234,235,1 +block_hint,KeyedHasIC_Megamorphic,123,124,1 +block_hint,KeyedHasIC_Megamorphic,141,142,1 +block_hint,KeyedHasIC_Megamorphic,199,200,0 +block_hint,KeyedHasIC_Megamorphic,201,202,0 +block_hint,KeyedHasIC_Megamorphic,101,102,0 +block_hint,KeyedHasIC_Megamorphic,99,100,0 +block_hint,KeyedHasIC_Megamorphic,250,251,0 +block_hint,KeyedHasIC_Megamorphic,268,269,0 +block_hint,KeyedHasIC_Megamorphic,106,107,0 +block_hint,KeyedHasIC_Megamorphic,275,276,0 +block_hint,KeyedHasIC_Megamorphic,280,281,0 +block_hint,KeyedHasIC_Megamorphic,266,267,0 +block_hint,KeyedHasIC_Megamorphic,203,204,0 +block_hint,KeyedHasIC_Megamorphic,44,45,1 +block_hint,KeyedHasIC_Megamorphic,63,64,0 +block_hint,KeyedHasIC_Megamorphic,239,240,1 +block_hint,KeyedHasIC_Megamorphic,48,49,0 +block_hint,KeyedHasIC_Megamorphic,270,271,0 +block_hint,KeyedHasIC_Megamorphic,228,229,0 +block_hint,KeyedHasIC_Megamorphic,87,88,0 +block_hint,KeyedHasIC_Megamorphic,155,156,0 +block_hint,KeyedHasIC_Megamorphic,196,197,0 +block_hint,KeyedHasIC_Megamorphic,59,60,0 +block_hint,KeyedHasIC_Megamorphic,222,223,0 +block_hint,KeyedHasIC_Megamorphic,57,58,1 +block_hint,IterableToList,42,43,1 +block_hint,IterableToList,44,45,1 +block_hint,IterableToList,46,47,1 +block_hint,IterableToList,36,37,1 +block_hint,IterableToList,48,49,1 +block_hint,IterableToList,50,51,1 +block_hint,IterableToList,98,99,1 +block_hint,IterableToList,107,108,0 +block_hint,IterableToList,109,110,0 +block_hint,IterableToList,100,101,0 +block_hint,IterableToList,74,75,0 +block_hint,IterableToList,58,59,1 +block_hint,IterableToList,96,97,0 +block_hint,IterableToList,52,53,0 +block_hint,IterableToList,93,94,1 +block_hint,IterableToList,82,83,1 +block_hint,IterableToList,17,18,0 +block_hint,IterableToList,61,62,0 +block_hint,IterableToList,14,15,1 +block_hint,IterableToList,90,91,0 +block_hint,IterableToList,103,104,1 +block_hint,IterableToList,88,89,0 +block_hint,IterableToList,32,33,0 +block_hint,IterableToList,113,114,1 +block_hint,IterableToList,111,112,1 +block_hint,IterableToList,63,64,0 +block_hint,IterableToList,34,35,1 +block_hint,IterableToListWithSymbolLookup,39,40,0 +block_hint,IterableToListWithSymbolLookup,96,97,1 +block_hint,IterableToListWithSymbolLookup,94,95,0 +block_hint,IterableToListWithSymbolLookup,82,83,1 +block_hint,IterableToListWithSymbolLookup,55,56,1 +block_hint,IterableToListWithSymbolLookup,25,26,1 +block_hint,IterableToListWithSymbolLookup,2,3,1 +block_hint,IterableToListWithSymbolLookup,99,100,1 +block_hint,IterableToListWithSymbolLookup,92,93,0 +block_hint,IterableToListWithSymbolLookup,71,72,1 +block_hint,IterableToListWithSymbolLookup,78,79,0 +block_hint,IterableToListWithSymbolLookup,84,85,1 +block_hint,IterableToListWithSymbolLookup,57,58,1 +block_hint,IterableToListWithSymbolLookup,27,28,1 +block_hint,IterableToListWithSymbolLookup,4,5,1 +block_hint,IterableToListWithSymbolLookup,80,81,1 +block_hint,IterableToListWithSymbolLookup,62,63,0 +block_hint,IterableToListWithSymbolLookup,17,18,1 +block_hint,IterableToListMayPreserveHoles,8,9,1 +block_hint,IterableToListMayPreserveHoles,15,16,0 +block_hint,IterableToListMayPreserveHoles,20,21,1 +block_hint,IterableToListMayPreserveHoles,17,18,1 +block_hint,IterableToListMayPreserveHoles,11,12,1 +block_hint,IterableToListMayPreserveHoles,3,4,1 +block_hint,IterableToListMayPreserveHoles,13,14,1 +block_hint,IterableToListMayPreserveHoles,5,6,0 +block_hint,FindOrderedHashMapEntry,26,27,1 +block_hint,FindOrderedHashMapEntry,64,65,0 +block_hint,FindOrderedHashMapEntry,24,25,0 +block_hint,FindOrderedHashMapEntry,22,23,0 +block_hint,FindOrderedHashMapEntry,68,69,0 +block_hint,FindOrderedHashMapEntry,58,59,1 +block_hint,FindOrderedHashMapEntry,60,61,1 +block_hint,MapConstructor,328,329,1 +block_hint,MapConstructor,248,249,1 +block_hint,MapConstructor,105,106,0 +block_hint,MapConstructor,13,14,1 +block_hint,MapConstructor,270,271,1 +block_hint,MapConstructor,211,212,1 +block_hint,MapConstructor,86,87,0 +block_hint,MapConstructor,88,89,1 +block_hint,MapConstructor,272,273,1 +block_hint,MapConstructor,308,309,0 +block_hint,MapConstructor,319,320,0 +block_hint,MapConstructor,220,221,0 +block_hint,MapConstructor,109,110,0 +block_hint,MapConstructor,238,239,1 +block_hint,MapConstructor,103,104,1 +block_hint,MapPrototypeSet,98,99,1 +block_hint,MapPrototypeSet,62,63,1 +block_hint,MapPrototypeSet,64,65,1 +block_hint,MapPrototypeSet,88,89,1 +block_hint,MapPrototypeSet,90,91,0 +block_hint,MapPrototypeSet,26,27,1 +block_hint,MapPrototypeSet,94,95,0 +block_hint,MapPrototypeSet,56,57,0 +block_hint,MapPrototypeSet,24,25,0 +block_hint,MapPrototypeSet,22,23,0 +block_hint,MapPrototypeSet,31,32,1 +block_hint,MapPrototypeSet,66,67,0 +block_hint,MapPrototypeSet,47,48,0 +block_hint,MapPrototypeSet,49,50,1 +block_hint,MapPrototypeSet,51,52,1 +block_hint,MapPrototypeSet,53,54,0 +block_hint,MapPrototypeSet,17,18,1 +block_hint,MapPrototypeSet,29,30,1 +block_hint,MapPrototypeDelete,98,99,1 +block_hint,MapPrototypeDelete,77,78,1 +block_hint,MapPrototypeDelete,79,80,1 +block_hint,MapPrototypeDelete,15,16,0 +block_hint,MapPrototypeDelete,89,90,1 +block_hint,MapPrototypeDelete,63,64,0 +block_hint,MapPrototypeDelete,40,41,0 +block_hint,MapPrototypeDelete,65,66,1 +block_hint,MapPrototypeDelete,67,68,1 +block_hint,MapPrototypeDelete,19,20,1 +block_hint,MapPrototypeDelete,21,22,1 +block_hint,MapPrototypeDelete,23,24,1 +block_hint,MapPrototypeGet,12,13,1 +block_hint,MapPrototypeGet,7,8,1 +block_hint,MapPrototypeGet,9,10,1 +block_hint,MapPrototypeGet,3,4,1 +block_hint,MapPrototypeHas,10,11,1 +block_hint,MapPrototypeHas,5,6,1 +block_hint,MapPrototypeHas,7,8,1 +block_hint,MapPrototypeEntries,13,14,1 +block_hint,MapPrototypeEntries,8,9,1 +block_hint,MapPrototypeEntries,10,11,1 +block_hint,MapPrototypeEntries,4,5,1 +block_hint,MapPrototypeEntries,6,7,1 +block_hint,MapPrototypeGetSize,8,9,1 +block_hint,MapPrototypeGetSize,5,6,1 +block_hint,MapPrototypeGetSize,3,4,1 +block_hint,MapPrototypeForEach,33,34,1 +block_hint,MapPrototypeForEach,30,31,0 +block_hint,MapPrototypeForEach,27,28,1 +block_hint,MapPrototypeForEach,20,21,1 +block_hint,MapPrototypeForEach,22,23,1 +block_hint,MapPrototypeForEach,24,25,1 +block_hint,MapPrototypeForEach,12,13,1 +block_hint,MapPrototypeForEach,14,15,0 +block_hint,MapPrototypeValues,13,14,1 +block_hint,MapPrototypeValues,8,9,1 +block_hint,MapPrototypeValues,10,11,1 +block_hint,MapPrototypeValues,4,5,1 +block_hint,MapPrototypeValues,6,7,1 +block_hint,MapIteratorPrototypeNext,47,48,1 +block_hint,MapIteratorPrototypeNext,30,31,1 +block_hint,MapIteratorPrototypeNext,32,33,1 +block_hint,MapIteratorPrototypeNext,19,20,0 +block_hint,MapIteratorPrototypeNext,21,22,0 +block_hint,MapIteratorPrototypeNext,34,35,0 +block_hint,MapIteratorPrototypeNext,7,8,1 +block_hint,MapIteratorPrototypeNext,39,40,1 +block_hint,MapIteratorPrototypeNext,9,10,1 +block_hint,MapIteratorPrototypeNext,11,12,1 +block_hint,MapIteratorPrototypeNext,13,14,1 +block_hint,MapIteratorPrototypeNext,15,16,1 +block_hint,MapIteratorPrototypeNext,17,18,1 +block_hint,MapIteratorPrototypeNext,25,26,1 +block_hint,SameValueNumbersOnly,4,5,1 +block_hint,Add_Baseline,32,33,0 +block_hint,Add_Baseline,21,22,0 +block_hint,Add_Baseline,8,9,1 +block_hint,Add_Baseline,58,59,0 +block_hint,Add_Baseline,35,36,1 +block_hint,Add_Baseline,47,48,0 +block_hint,Add_Baseline,17,18,1 +block_hint,Add_Baseline,53,54,1 +block_hint,Add_Baseline,19,20,1 +block_hint,Add_Baseline,26,27,1 +block_hint,Add_Baseline,10,11,1 +block_hint,AddSmi_Baseline,32,33,0 +block_hint,AddSmi_Baseline,21,22,0 +block_hint,AddSmi_Baseline,8,9,1 +block_hint,AddSmi_Baseline,49,50,1 +block_hint,AddSmi_Baseline,26,27,1 +block_hint,AddSmi_Baseline,10,11,1 +block_hint,Subtract_Baseline,21,22,0 +block_hint,Subtract_Baseline,8,9,1 +block_hint,Subtract_Baseline,46,47,1 +block_hint,Subtract_Baseline,56,57,1 +block_hint,Subtract_Baseline,54,55,0 +block_hint,Subtract_Baseline,42,43,0 +block_hint,Subtract_Baseline,48,49,1 +block_hint,Subtract_Baseline,17,18,1 +block_hint,Subtract_Baseline,23,24,1 +block_hint,Subtract_Baseline,10,11,1 +block_hint,SubtractSmi_Baseline,21,22,0 +block_hint,SubtractSmi_Baseline,8,9,1 +block_hint,SubtractSmi_Baseline,38,39,1 +block_hint,SubtractSmi_Baseline,23,24,1 +block_hint,SubtractSmi_Baseline,10,11,1 +block_hint,Multiply_Baseline,69,70,0 +block_hint,Multiply_Baseline,47,48,0 +block_hint,Multiply_Baseline,55,56,0 +block_hint,Multiply_Baseline,61,62,1 +block_hint,Multiply_Baseline,57,58,1 +block_hint,Multiply_Baseline,10,11,1 +block_hint,Multiply_Baseline,49,50,1 +block_hint,Multiply_Baseline,67,68,1 +block_hint,Multiply_Baseline,51,52,1 +block_hint,Multiply_Baseline,24,25,1 +block_hint,Multiply_Baseline,12,13,1 +block_hint,MultiplySmi_Baseline,61,62,0 +block_hint,MultiplySmi_Baseline,47,48,0 +block_hint,MultiplySmi_Baseline,49,50,0 +block_hint,MultiplySmi_Baseline,51,52,1 +block_hint,MultiplySmi_Baseline,22,23,0 +block_hint,MultiplySmi_Baseline,10,11,1 +block_hint,MultiplySmi_Baseline,38,39,1 +block_hint,MultiplySmi_Baseline,24,25,1 +block_hint,MultiplySmi_Baseline,12,13,1 +block_hint,Divide_Baseline,59,60,0 +block_hint,Divide_Baseline,61,62,0 +block_hint,Divide_Baseline,48,49,0 +block_hint,Divide_Baseline,31,32,1 +block_hint,Divide_Baseline,10,11,1 +block_hint,Divide_Baseline,52,53,1 +block_hint,Divide_Baseline,67,68,1 +block_hint,Divide_Baseline,54,55,1 +block_hint,Divide_Baseline,38,39,0 +block_hint,Divide_Baseline,19,20,1 +block_hint,Divide_Baseline,25,26,1 +block_hint,Divide_Baseline,12,13,1 +block_hint,DivideSmi_Baseline,53,54,0 +block_hint,DivideSmi_Baseline,61,62,0 +block_hint,DivideSmi_Baseline,55,56,0 +block_hint,DivideSmi_Baseline,48,49,0 +block_hint,DivideSmi_Baseline,31,32,1 +block_hint,DivideSmi_Baseline,10,11,1 +block_hint,DivideSmi_Baseline,40,41,1 +block_hint,DivideSmi_Baseline,25,26,1 +block_hint,DivideSmi_Baseline,12,13,1 +block_hint,Modulus_Baseline,61,62,0 +block_hint,Modulus_Baseline,57,58,0 +block_hint,Modulus_Baseline,43,44,1 +block_hint,Modulus_Baseline,38,39,1 +block_hint,Modulus_Baseline,17,18,0 +block_hint,Modulus_Baseline,6,7,1 +block_hint,ModulusSmi_Baseline,43,44,1 +block_hint,ModulusSmi_Baseline,38,39,1 +block_hint,ModulusSmi_Baseline,17,18,0 +block_hint,ModulusSmi_Baseline,6,7,1 +block_hint,ModulusSmi_Baseline,32,33,1 +block_hint,ModulusSmi_Baseline,19,20,1 +block_hint,ModulusSmi_Baseline,8,9,1 +block_hint,BitwiseAnd_Baseline,35,36,0 +block_hint,BitwiseAnd_Baseline,23,24,1 +block_hint,BitwiseAnd_Baseline,8,9,0 +block_hint,BitwiseAnd_Baseline,33,34,0 +block_hint,BitwiseAnd_Baseline,27,28,1 +block_hint,BitwiseAnd_Baseline,12,13,0 +block_hint,BitwiseAnd_Baseline,50,51,1 +block_hint,BitwiseAnd_Baseline,14,15,1 +block_hint,BitwiseAndSmi_Baseline,18,19,0 +block_hint,BitwiseAndSmi_Baseline,16,17,1 +block_hint,BitwiseAndSmi_Baseline,7,8,0 +block_hint,BitwiseAndSmi_Baseline,26,27,0 +block_hint,BitwiseAndSmi_Baseline,20,21,0 +block_hint,BitwiseAndSmi_Baseline,9,10,1 +block_hint,BitwiseOr_Baseline,35,36,0 +block_hint,BitwiseOr_Baseline,23,24,1 +block_hint,BitwiseOr_Baseline,8,9,1 +block_hint,BitwiseOr_Baseline,48,49,1 +block_hint,BitwiseOr_Baseline,50,51,1 +block_hint,BitwiseOr_Baseline,14,15,1 +block_hint,BitwiseOrSmi_Baseline,5,6,0 +block_hint,BitwiseOrSmi_Baseline,18,19,0 +block_hint,BitwiseOrSmi_Baseline,16,17,0 +block_hint,BitwiseOrSmi_Baseline,28,29,1 +block_hint,BitwiseOrSmi_Baseline,9,10,1 +block_hint,BitwiseXor_Baseline,25,26,1 +block_hint,BitwiseXor_Baseline,35,36,0 +block_hint,BitwiseXor_Baseline,23,24,1 +block_hint,BitwiseXor_Baseline,48,49,1 +block_hint,BitwiseXor_Baseline,33,34,0 +block_hint,BitwiseXor_Baseline,27,28,1 +block_hint,BitwiseXor_Baseline,50,51,1 +block_hint,BitwiseXor_Baseline,14,15,1 +block_hint,BitwiseXorSmi_Baseline,18,19,0 +block_hint,BitwiseXorSmi_Baseline,16,17,1 +block_hint,BitwiseXorSmi_Baseline,7,8,1 +block_hint,BitwiseXorSmi_Baseline,9,10,1 +block_hint,ShiftLeft_Baseline,25,26,1 +block_hint,ShiftLeft_Baseline,10,11,0 +block_hint,ShiftLeft_Baseline,50,51,1 +block_hint,ShiftLeft_Baseline,14,15,1 +block_hint,ShiftLeftSmi_Baseline,35,36,1 +block_hint,ShiftLeftSmi_Baseline,25,26,1 +block_hint,ShiftLeftSmi_Baseline,37,38,1 +block_hint,ShiftLeftSmi_Baseline,9,10,1 +block_hint,ShiftRight_Baseline,6,7,0 +block_hint,ShiftRight_Baseline,10,11,0 +block_hint,ShiftRight_Baseline,46,47,0 +block_hint,ShiftRight_Baseline,29,30,0 +block_hint,ShiftRight_Baseline,14,15,1 +block_hint,ShiftRightSmi_Baseline,5,6,0 +block_hint,ShiftRightSmi_Baseline,22,23,1 +block_hint,ShiftRightSmi_Baseline,26,27,0 +block_hint,ShiftRightSmi_Baseline,20,21,0 +block_hint,ShiftRightSmi_Baseline,9,10,1 +block_hint,ShiftRightLogical_Baseline,25,26,1 +block_hint,ShiftRightLogical_Baseline,10,11,0 +block_hint,ShiftRightLogical_Baseline,46,47,0 +block_hint,ShiftRightLogical_Baseline,29,30,0 +block_hint,ShiftRightLogical_Baseline,14,15,1 +block_hint,ShiftRightLogicalSmi_Baseline,35,36,1 +block_hint,ShiftRightLogicalSmi_Baseline,25,26,1 +block_hint,ShiftRightLogicalSmi_Baseline,33,34,0 +block_hint,ShiftRightLogicalSmi_Baseline,23,24,0 +block_hint,ShiftRightLogicalSmi_Baseline,9,10,1 +block_hint,Add_WithFeedback,49,50,1 +block_hint,Add_WithFeedback,60,61,0 +block_hint,Add_WithFeedback,58,59,0 +block_hint,Add_WithFeedback,45,46,1 +block_hint,Add_WithFeedback,35,36,1 +block_hint,Add_WithFeedback,28,29,0 +block_hint,Add_WithFeedback,19,20,1 +block_hint,Subtract_WithFeedback,52,53,1 +block_hint,Subtract_WithFeedback,56,57,0 +block_hint,Subtract_WithFeedback,54,55,0 +block_hint,Subtract_WithFeedback,42,43,0 +block_hint,Subtract_WithFeedback,17,18,1 +block_hint,Modulus_WithFeedback,61,62,0 +block_hint,Modulus_WithFeedback,57,58,0 +block_hint,Modulus_WithFeedback,43,44,1 +block_hint,Modulus_WithFeedback,38,39,1 +block_hint,Modulus_WithFeedback,17,18,0 +block_hint,Modulus_WithFeedback,6,7,1 +block_hint,BitwiseOr_WithFeedback,6,7,1 +block_hint,BitwiseOr_WithFeedback,35,36,0 +block_hint,BitwiseOr_WithFeedback,23,24,0 +block_hint,BitwiseOr_WithFeedback,10,11,0 +block_hint,BitwiseOr_WithFeedback,46,47,0 +block_hint,BitwiseOr_WithFeedback,29,30,0 +block_hint,BitwiseOr_WithFeedback,14,15,1 +block_hint,Equal_Baseline,48,49,0 +block_hint,Equal_Baseline,18,19,1 +block_hint,Equal_Baseline,101,102,0 +block_hint,Equal_Baseline,14,15,1 +block_hint,Equal_Baseline,39,40,0 +block_hint,Equal_Baseline,26,27,0 +block_hint,Equal_Baseline,28,29,1 +block_hint,Equal_Baseline,45,46,0 +block_hint,Equal_Baseline,32,33,0 +block_hint,Equal_Baseline,24,25,1 +block_hint,Equal_Baseline,77,78,0 +block_hint,Equal_Baseline,75,76,0 +block_hint,Equal_Baseline,83,84,0 +block_hint,Equal_Baseline,85,86,0 +block_hint,Equal_Baseline,59,60,0 +block_hint,Equal_Baseline,109,110,0 +block_hint,Equal_Baseline,65,66,0 +block_hint,Equal_Baseline,69,70,1 +block_hint,Equal_Baseline,98,99,0 +block_hint,Equal_Baseline,71,72,1 +block_hint,Equal_Baseline,6,7,1 +block_hint,StrictEqual_Baseline,37,38,0 +block_hint,StrictEqual_Baseline,76,77,0 +block_hint,StrictEqual_Baseline,47,48,1 +block_hint,StrictEqual_Baseline,60,61,0 +block_hint,StrictEqual_Baseline,51,52,0 +block_hint,StrictEqual_Baseline,53,54,1 +block_hint,StrictEqual_Baseline,35,36,1 +block_hint,StrictEqual_Baseline,33,34,0 +block_hint,StrictEqual_Baseline,55,56,0 +block_hint,StrictEqual_Baseline,29,30,1 +block_hint,StrictEqual_Baseline,31,32,1 +block_hint,StrictEqual_Baseline,49,50,1 +block_hint,StrictEqual_Baseline,41,42,0 +block_hint,StrictEqual_Baseline,45,46,0 +block_hint,StrictEqual_Baseline,66,67,0 +block_hint,StrictEqual_Baseline,13,14,0 +block_hint,StrictEqual_Baseline,43,44,0 +block_hint,StrictEqual_Baseline,3,4,1 +block_hint,LessThan_Baseline,44,45,0 +block_hint,LessThan_Baseline,23,24,1 +block_hint,LessThan_Baseline,25,26,1 +block_hint,LessThan_Baseline,10,11,0 +block_hint,LessThan_Baseline,56,57,0 +block_hint,LessThan_Baseline,12,13,0 +block_hint,LessThan_Baseline,5,6,1 +block_hint,GreaterThan_Baseline,44,45,0 +block_hint,GreaterThan_Baseline,10,11,0 +block_hint,GreaterThan_Baseline,48,49,1 +block_hint,GreaterThan_Baseline,12,13,0 +block_hint,GreaterThan_Baseline,5,6,1 +block_hint,LessThanOrEqual_Baseline,44,45,0 +block_hint,LessThanOrEqual_Baseline,23,24,1 +block_hint,LessThanOrEqual_Baseline,25,26,1 +block_hint,LessThanOrEqual_Baseline,56,57,0 +block_hint,LessThanOrEqual_Baseline,58,59,1 +block_hint,LessThanOrEqual_Baseline,37,38,1 +block_hint,LessThanOrEqual_Baseline,27,28,1 +block_hint,LessThanOrEqual_Baseline,5,6,1 +block_hint,GreaterThanOrEqual_Baseline,44,45,0 +block_hint,GreaterThanOrEqual_Baseline,23,24,1 +block_hint,GreaterThanOrEqual_Baseline,25,26,1 +block_hint,GreaterThanOrEqual_Baseline,56,57,0 +block_hint,GreaterThanOrEqual_Baseline,27,28,1 +block_hint,GreaterThanOrEqual_Baseline,5,6,1 +block_hint,Equal_WithFeedback,103,104,0 +block_hint,Equal_WithFeedback,81,82,1 +block_hint,Equal_WithFeedback,37,38,0 +block_hint,Equal_WithFeedback,48,49,0 +block_hint,Equal_WithFeedback,18,19,1 +block_hint,Equal_WithFeedback,8,9,0 +block_hint,Equal_WithFeedback,95,96,0 +block_hint,Equal_WithFeedback,101,102,0 +block_hint,Equal_WithFeedback,20,21,0 +block_hint,Equal_WithFeedback,39,40,0 +block_hint,Equal_WithFeedback,26,27,0 +block_hint,Equal_WithFeedback,28,29,1 +block_hint,Equal_WithFeedback,45,46,0 +block_hint,Equal_WithFeedback,32,33,0 +block_hint,Equal_WithFeedback,75,76,0 +block_hint,Equal_WithFeedback,83,84,0 +block_hint,Equal_WithFeedback,85,86,0 +block_hint,Equal_WithFeedback,87,88,0 +block_hint,Equal_WithFeedback,79,80,0 +block_hint,Equal_WithFeedback,89,90,0 +block_hint,Equal_WithFeedback,117,118,0 +block_hint,Equal_WithFeedback,109,110,0 +block_hint,Equal_WithFeedback,107,108,0 +block_hint,Equal_WithFeedback,67,68,0 +block_hint,Equal_WithFeedback,105,106,0 +block_hint,Equal_WithFeedback,65,66,0 +block_hint,Equal_WithFeedback,6,7,1 +block_hint,StrictEqual_WithFeedback,37,38,0 +block_hint,StrictEqual_WithFeedback,72,73,0 +block_hint,StrictEqual_WithFeedback,47,48,1 +block_hint,StrictEqual_WithFeedback,60,61,0 +block_hint,StrictEqual_WithFeedback,53,54,1 +block_hint,StrictEqual_WithFeedback,35,36,1 +block_hint,StrictEqual_WithFeedback,57,58,1 +block_hint,StrictEqual_WithFeedback,55,56,0 +block_hint,StrictEqual_WithFeedback,31,32,1 +block_hint,StrictEqual_WithFeedback,41,42,0 +block_hint,StrictEqual_WithFeedback,70,71,1 +block_hint,StrictEqual_WithFeedback,45,46,0 +block_hint,StrictEqual_WithFeedback,21,22,1 +block_hint,StrictEqual_WithFeedback,66,67,0 +block_hint,StrictEqual_WithFeedback,15,16,0 +block_hint,StrictEqual_WithFeedback,13,14,0 +block_hint,StrictEqual_WithFeedback,43,44,0 +block_hint,StrictEqual_WithFeedback,3,4,1 +block_hint,LessThan_WithFeedback,44,45,1 +block_hint,LessThan_WithFeedback,23,24,1 +block_hint,LessThan_WithFeedback,46,47,1 +block_hint,LessThan_WithFeedback,48,49,1 +block_hint,LessThan_WithFeedback,56,57,0 +block_hint,LessThan_WithFeedback,54,55,0 +block_hint,LessThan_WithFeedback,18,19,1 +block_hint,LessThan_WithFeedback,31,32,0 +block_hint,LessThan_WithFeedback,16,17,1 +block_hint,LessThan_WithFeedback,12,13,0 +block_hint,LessThan_WithFeedback,39,40,1 +block_hint,LessThan_WithFeedback,5,6,1 +block_hint,GreaterThan_WithFeedback,60,61,1 +block_hint,GreaterThan_WithFeedback,23,24,1 +block_hint,GreaterThan_WithFeedback,25,26,1 +block_hint,GreaterThan_WithFeedback,48,49,1 +block_hint,GreaterThan_WithFeedback,56,57,0 +block_hint,GreaterThan_WithFeedback,58,59,0 +block_hint,GreaterThan_WithFeedback,54,55,1 +block_hint,GreaterThan_WithFeedback,50,51,1 +block_hint,GreaterThan_WithFeedback,18,19,0 +block_hint,GreaterThan_WithFeedback,12,13,0 +block_hint,GreaterThan_WithFeedback,5,6,1 +block_hint,GreaterThanOrEqual_WithFeedback,60,61,1 +block_hint,GreaterThanOrEqual_WithFeedback,46,47,1 +block_hint,GreaterThanOrEqual_WithFeedback,48,49,0 +block_hint,GreaterThanOrEqual_WithFeedback,56,57,0 +block_hint,GreaterThanOrEqual_WithFeedback,54,55,0 +block_hint,GreaterThanOrEqual_WithFeedback,18,19,1 +block_hint,GreaterThanOrEqual_WithFeedback,31,32,0 +block_hint,GreaterThanOrEqual_WithFeedback,16,17,1 +block_hint,GreaterThanOrEqual_WithFeedback,5,6,1 +block_hint,BitwiseNot_Baseline,19,20,0 +block_hint,BitwiseNot_Baseline,15,16,1 +block_hint,BitwiseNot_Baseline,7,8,1 +block_hint,BitwiseNot_Baseline,27,28,1 +block_hint,BitwiseNot_Baseline,9,10,1 +block_hint,Decrement_Baseline,19,20,0 +block_hint,Decrement_Baseline,17,18,1 +block_hint,Decrement_Baseline,13,14,0 +block_hint,Decrement_Baseline,15,16,1 +block_hint,Decrement_Baseline,5,6,1 +block_hint,Increment_Baseline,19,20,0 +block_hint,Increment_Baseline,17,18,1 +block_hint,Increment_Baseline,13,14,0 +block_hint,Increment_Baseline,15,16,1 +block_hint,Increment_Baseline,5,6,1 +block_hint,Negate_Baseline,20,21,1 +block_hint,Negate_Baseline,14,15,0 +block_hint,Negate_Baseline,18,19,1 +block_hint,Negate_Baseline,5,6,1 +block_hint,ObjectAssign,21,22,1 +block_hint,ObjectAssign,18,19,0 +block_hint,ObjectAssign,15,16,1 +block_hint,ObjectAssign,12,13,1 +block_hint,ObjectAssign,9,10,0 +block_hint,ObjectAssign,5,6,0 +block_hint,ObjectCreate,78,79,1 +block_hint,ObjectCreate,75,76,0 +block_hint,ObjectCreate,33,34,1 +block_hint,ObjectCreate,35,36,1 +block_hint,ObjectCreate,37,38,1 +block_hint,ObjectCreate,39,40,0 +block_hint,ObjectCreate,41,42,1 +block_hint,ObjectCreate,43,44,0 +block_hint,ObjectCreate,45,46,1 +block_hint,ObjectCreate,17,18,1 +block_hint,ObjectCreate,69,70,0 +block_hint,ObjectCreate,55,56,0 +block_hint,ObjectCreate,59,60,1 +block_hint,ObjectCreate,47,48,0 +block_hint,ObjectCreate,49,50,0 +block_hint,ObjectCreate,5,6,1 +block_hint,ObjectCreate,52,53,1 +block_hint,ObjectCreate,7,8,1 +block_hint,ObjectCreate,9,10,1 +block_hint,ObjectCreate,11,12,1 +block_hint,ObjectCreate,13,14,1 +block_hint,ObjectCreate,15,16,1 +block_hint,ObjectCreate,20,21,0 +block_hint,ObjectCreate,61,62,1 +block_hint,ObjectGetOwnPropertyDescriptor,517,518,1 +block_hint,ObjectGetOwnPropertyDescriptor,514,515,0 +block_hint,ObjectGetOwnPropertyDescriptor,511,512,0 +block_hint,ObjectGetOwnPropertyDescriptor,503,504,1 +block_hint,ObjectGetOwnPropertyDescriptor,490,491,1 +block_hint,ObjectGetOwnPropertyDescriptor,408,409,0 +block_hint,ObjectGetOwnPropertyDescriptor,470,471,1 +block_hint,ObjectGetOwnPropertyDescriptor,488,489,0 +block_hint,ObjectGetOwnPropertyDescriptor,434,435,0 +block_hint,ObjectGetOwnPropertyDescriptor,467,468,1 +block_hint,ObjectGetOwnPropertyDescriptor,410,411,1 +block_hint,ObjectGetOwnPropertyDescriptor,462,463,0 +block_hint,ObjectGetOwnPropertyDescriptor,464,465,0 +block_hint,ObjectGetOwnPropertyDescriptor,436,437,0 +block_hint,ObjectGetOwnPropertyDescriptor,406,407,0 +block_hint,ObjectGetOwnPropertyDescriptor,331,332,0 +block_hint,ObjectGetOwnPropertyDescriptor,197,198,1 +block_hint,ObjectGetOwnPropertyDescriptor,307,308,1 +block_hint,ObjectGetOwnPropertyDescriptor,138,139,1 +block_hint,ObjectGetOwnPropertyDescriptor,497,498,0 +block_hint,ObjectGetOwnPropertyDescriptor,505,506,1 +block_hint,ObjectGetOwnPropertyDescriptor,493,494,0 +block_hint,ObjectGetOwnPropertyDescriptor,426,427,0 +block_hint,ObjectGetOwnPropertyDescriptor,329,330,0 +block_hint,ObjectGetOwnPropertyDescriptor,31,32,1 +block_hint,ObjectGetOwnPropertyDescriptor,361,362,1 +block_hint,ObjectGetOwnPropertyDescriptor,150,151,0 +block_hint,ObjectGetOwnPropertyDescriptor,474,475,0 +block_hint,ObjectGetOwnPropertyDescriptor,390,391,0 +block_hint,ObjectGetOwnPropertyDescriptor,264,265,0 +block_hint,ObjectGetOwnPropertyDescriptor,260,261,0 +block_hint,ObjectGetOwnPropertyDescriptor,282,283,0 +block_hint,ObjectGetOwnPropertyDescriptor,284,285,1 +block_hint,ObjectGetOwnPropertyDescriptor,36,37,1 +block_hint,ObjectGetOwnPropertyDescriptor,365,366,1 +block_hint,ObjectGetOwnPropertyDescriptor,186,187,0 +block_hint,ObjectGetOwnPropertyDescriptor,268,269,1 +block_hint,ObjectKeys,32,33,1 +block_hint,ObjectKeys,27,28,1 +block_hint,ObjectKeys,23,24,1 +block_hint,ObjectKeys,25,26,0 +block_hint,ObjectKeys,17,18,1 +block_hint,ObjectKeys,5,6,1 +block_hint,ObjectKeys,21,22,1 +block_hint,ObjectKeys,9,10,0 +block_hint,ObjectKeys,7,8,1 +block_hint,ObjectKeys,14,15,1 +block_hint,ObjectPrototypeHasOwnProperty,230,231,1 +block_hint,ObjectPrototypeHasOwnProperty,205,206,1 +block_hint,ObjectPrototypeHasOwnProperty,222,223,1 +block_hint,ObjectPrototypeHasOwnProperty,239,240,0 +block_hint,ObjectPrototypeHasOwnProperty,219,220,0 +block_hint,ObjectPrototypeHasOwnProperty,209,210,1 +block_hint,ObjectPrototypeHasOwnProperty,163,164,1 +block_hint,ObjectPrototypeHasOwnProperty,235,236,0 +block_hint,ObjectPrototypeHasOwnProperty,237,238,0 +block_hint,ObjectPrototypeHasOwnProperty,233,234,0 +block_hint,ObjectPrototypeHasOwnProperty,228,229,0 +block_hint,ObjectPrototypeHasOwnProperty,192,193,1 +block_hint,ObjectPrototypeHasOwnProperty,137,138,0 +block_hint,ObjectPrototypeHasOwnProperty,211,212,0 +block_hint,ObjectPrototypeHasOwnProperty,175,176,1 +block_hint,ObjectPrototypeHasOwnProperty,141,142,0 +block_hint,ObjectPrototypeHasOwnProperty,226,227,0 +block_hint,ObjectPrototypeHasOwnProperty,76,77,0 +block_hint,ObjectPrototypeHasOwnProperty,203,204,0 +block_hint,ObjectPrototypeHasOwnProperty,34,35,1 +block_hint,ObjectPrototypeHasOwnProperty,52,53,0 +block_hint,ObjectPrototypeHasOwnProperty,36,37,1 +block_hint,ObjectPrototypeHasOwnProperty,197,198,1 +block_hint,ObjectPrototypeHasOwnProperty,40,41,0 +block_hint,ObjectPrototypeHasOwnProperty,171,172,0 +block_hint,ObjectPrototypeHasOwnProperty,178,179,1 +block_hint,ObjectPrototypeHasOwnProperty,58,59,0 +block_hint,ObjectToString,42,43,0 +block_hint,ObjectToString,57,58,0 +block_hint,ObjectToString,65,66,0 +block_hint,ObjectToString,52,53,0 +block_hint,ObjectToString,7,8,1 +block_hint,ObjectToString,5,6,1 +block_hint,ObjectToString,11,12,1 +block_hint,ObjectToString,19,20,0 +block_hint,InstanceOf_WithFeedback,50,51,1 +block_hint,InstanceOf_WithFeedback,52,53,0 +block_hint,InstanceOf_WithFeedback,54,55,1 +block_hint,InstanceOf_WithFeedback,32,33,1 +block_hint,InstanceOf_WithFeedback,34,35,1 +block_hint,InstanceOf_WithFeedback,5,6,1 +block_hint,InstanceOf_WithFeedback,14,15,1 +block_hint,InstanceOf_Baseline,50,51,1 +block_hint,InstanceOf_Baseline,54,55,1 +block_hint,InstanceOf_Baseline,32,33,1 +block_hint,InstanceOf_Baseline,34,35,1 +block_hint,InstanceOf_Baseline,5,6,1 +block_hint,InstanceOf_Baseline,14,15,1 +block_hint,ForInEnumerate,34,35,1 +block_hint,ForInEnumerate,36,37,0 +block_hint,ForInEnumerate,30,31,0 +block_hint,ForInEnumerate,32,33,1 +block_hint,ForInEnumerate,5,6,1 +block_hint,ForInEnumerate,38,39,1 +block_hint,ForInEnumerate,9,10,1 +block_hint,ForInPrepare,7,8,1 +block_hint,ForInPrepare,12,13,1 +block_hint,ForInPrepare,5,6,1 +block_hint,ForInFilter,232,233,1 +block_hint,ForInFilter,234,235,1 +block_hint,ForInFilter,225,226,0 +block_hint,ForInFilter,117,118,1 +block_hint,ForInFilter,217,218,0 +block_hint,ForInFilter,62,63,0 +block_hint,ForInFilter,129,130,1 +block_hint,ForInFilter,219,220,1 +block_hint,ForInFilter,103,104,0 +block_hint,ForInFilter,105,106,0 +block_hint,ForInFilter,66,67,0 +block_hint,ForInFilter,64,65,0 +block_hint,ForInFilter,268,269,0 +block_hint,ForInFilter,223,224,1 +block_hint,ForInFilter,109,110,1 +block_hint,ForInFilter,71,72,0 +block_hint,ForInFilter,264,265,0 +block_hint,ForInFilter,262,263,0 +block_hint,ForInFilter,249,250,0 +block_hint,ForInFilter,107,108,1 +block_hint,ForInFilter,40,41,1 +block_hint,ForInFilter,201,202,0 +block_hint,ForInFilter,42,43,1 +block_hint,ForInFilter,144,145,1 +block_hint,ForInFilter,46,47,0 +block_hint,ForInFilter,113,114,0 +block_hint,ForInFilter,131,132,0 +block_hint,ForInFilter,36,37,0 +block_hint,ForInFilter,246,247,0 +block_hint,ForInFilter,253,254,1 +block_hint,ForInFilter,189,190,0 +block_hint,ForInFilter,33,34,1 +block_hint,RegExpConstructor,55,56,1 +block_hint,RegExpConstructor,7,8,1 +block_hint,RegExpConstructor,131,132,1 +block_hint,RegExpConstructor,133,134,1 +block_hint,RegExpConstructor,70,71,0 +block_hint,RegExpConstructor,106,107,1 +block_hint,RegExpConstructor,127,128,0 +block_hint,RegExpConstructor,108,109,0 +block_hint,RegExpConstructor,82,83,1 +block_hint,RegExpConstructor,67,68,1 +block_hint,RegExpConstructor,40,41,0 +block_hint,RegExpConstructor,76,77,0 +block_hint,RegExpConstructor,104,105,1 +block_hint,RegExpConstructor,86,87,1 +block_hint,RegExpConstructor,78,79,1 +block_hint,RegExpConstructor,63,64,1 +block_hint,RegExpExecInternal,20,21,0 +block_hint,RegExpExecInternal,22,23,0 +block_hint,RegExpExecInternal,36,37,0 +block_hint,RegExpExecInternal,12,13,0 +block_hint,RegExpExecInternal,49,50,0 +block_hint,RegExpExecInternal,52,53,1 +block_hint,RegExpExecInternal,40,41,1 +block_hint,RegExpExecInternal,54,55,1 +block_hint,RegExpExecInternal,44,45,0 +block_hint,RegExpExecInternal,24,25,0 +block_hint,FindOrderedHashSetEntry,26,27,1 +block_hint,FindOrderedHashSetEntry,34,35,0 +block_hint,FindOrderedHashSetEntry,24,25,0 +block_hint,FindOrderedHashSetEntry,22,23,0 +block_hint,FindOrderedHashSetEntry,42,43,1 +block_hint,FindOrderedHashSetEntry,68,69,0 +block_hint,FindOrderedHashSetEntry,58,59,1 +block_hint,FindOrderedHashSetEntry,60,61,1 +block_hint,SetConstructor,202,203,1 +block_hint,SetConstructor,74,75,0 +block_hint,SetConstructor,11,12,1 +block_hint,SetConstructor,172,173,1 +block_hint,SetConstructor,135,136,1 +block_hint,SetConstructor,56,57,0 +block_hint,SetConstructor,58,59,1 +block_hint,SetConstructor,218,219,1 +block_hint,SetConstructor,210,211,0 +block_hint,SetConstructor,79,80,1 +block_hint,SetConstructor,23,24,1 +block_hint,SetConstructor,222,223,1 +block_hint,SetConstructor,214,215,0 +block_hint,SetConstructor,150,151,1 +block_hint,SetConstructor,25,26,1 +block_hint,SetConstructor,178,179,1 +block_hint,SetConstructor,143,144,1 +block_hint,SetConstructor,83,84,1 +block_hint,SetConstructor,85,86,1 +block_hint,SetConstructor,87,88,1 +block_hint,SetConstructor,89,90,1 +block_hint,SetConstructor,91,92,1 +block_hint,SetConstructor,93,94,1 +block_hint,SetConstructor,34,35,1 +block_hint,SetConstructor,95,96,1 +block_hint,SetConstructor,146,147,1 +block_hint,SetConstructor,152,153,1 +block_hint,SetConstructor,190,191,0 +block_hint,SetConstructor,183,184,0 +block_hint,SetConstructor,154,155,0 +block_hint,SetConstructor,105,106,0 +block_hint,SetConstructor,137,138,1 +block_hint,SetConstructor,27,28,1 +block_hint,SetConstructor,62,63,1 +block_hint,SetConstructor,176,177,0 +block_hint,SetConstructor,66,67,1 +block_hint,SetPrototypeHas,10,11,1 +block_hint,SetPrototypeHas,5,6,1 +block_hint,SetPrototypeHas,7,8,1 +block_hint,SetPrototypeAdd,98,99,1 +block_hint,SetPrototypeAdd,62,63,1 +block_hint,SetPrototypeAdd,64,65,1 +block_hint,SetPrototypeAdd,88,89,1 +block_hint,SetPrototypeAdd,90,91,0 +block_hint,SetPrototypeAdd,27,28,1 +block_hint,SetPrototypeAdd,79,80,0 +block_hint,SetPrototypeAdd,25,26,0 +block_hint,SetPrototypeAdd,23,24,0 +block_hint,SetPrototypeAdd,35,36,1 +block_hint,SetPrototypeAdd,66,67,0 +block_hint,SetPrototypeAdd,51,52,1 +block_hint,SetPrototypeAdd,53,54,1 +block_hint,SetPrototypeAdd,33,34,1 +block_hint,SetPrototypeDelete,96,97,1 +block_hint,SetPrototypeDelete,75,76,1 +block_hint,SetPrototypeDelete,77,78,1 +block_hint,SetPrototypeDelete,15,16,0 +block_hint,SetPrototypeDelete,32,33,1 +block_hint,SetPrototypeDelete,87,88,0 +block_hint,SetPrototypeDelete,30,31,0 +block_hint,SetPrototypeDelete,28,29,0 +block_hint,SetPrototypeDelete,45,46,1 +block_hint,SetPrototypeDelete,83,84,0 +block_hint,SetPrototypeDelete,79,80,0 +block_hint,SetPrototypeDelete,19,20,1 +block_hint,SetPrototypeDelete,21,22,1 +block_hint,SetPrototypeGetSize,8,9,1 +block_hint,SetPrototypeGetSize,5,6,1 +block_hint,SetPrototypeGetSize,3,4,1 +block_hint,SetPrototypeValues,13,14,1 +block_hint,SetPrototypeValues,8,9,1 +block_hint,SetPrototypeValues,10,11,1 +block_hint,SetPrototypeValues,4,5,1 +block_hint,SetPrototypeValues,6,7,1 +block_hint,SetIteratorPrototypeNext,41,42,1 +block_hint,SetIteratorPrototypeNext,28,29,1 +block_hint,SetIteratorPrototypeNext,39,40,1 +block_hint,SetIteratorPrototypeNext,17,18,0 +block_hint,SetIteratorPrototypeNext,19,20,0 +block_hint,SetIteratorPrototypeNext,37,38,1 +block_hint,SetIteratorPrototypeNext,15,16,1 +block_hint,SetIteratorPrototypeNext,23,24,1 +block_hint,SetOrSetIteratorToList,33,34,1 +block_hint,SetOrSetIteratorToList,8,9,1 +block_hint,SetOrSetIteratorToList,43,44,1 +block_hint,SetOrSetIteratorToList,31,32,1 +block_hint,SetOrSetIteratorToList,47,48,1 +block_hint,SetOrSetIteratorToList,14,15,0 +block_hint,SetOrSetIteratorToList,19,20,0 +block_hint,SetOrSetIteratorToList,24,25,1 +block_hint,StringFromCharCode,87,88,1 +block_hint,StringFromCharCode,53,54,1 +block_hint,StringFromCharCode,11,12,0 +block_hint,StringFromCharCode,81,82,1 +block_hint,StringFromCharCode,77,78,1 +block_hint,StringFromCharCode,19,20,0 +block_hint,StringFromCharCode,23,24,0 +block_hint,StringFromCharCode,58,59,0 +block_hint,StringFromCharCode,21,22,0 +block_hint,StringFromCharCode,29,30,0 +block_hint,StringFromCharCode,35,36,0 +block_hint,StringFromCharCode,33,34,0 +block_hint,StringFromCharCode,75,76,0 +block_hint,StringFromCharCode,41,42,0 +block_hint,StringFromCharCode,17,18,1 +block_hint,StringFromCharCode,44,45,1 +block_hint,StringPrototypeReplace,36,37,1 +block_hint,StringPrototypeReplace,8,9,0 +block_hint,StringPrototypeReplace,55,56,1 +block_hint,StringPrototypeReplace,51,52,1 +block_hint,StringPrototypeReplace,38,39,1 +block_hint,StringPrototypeReplace,22,23,0 +block_hint,StringPrototypeReplace,3,4,1 +block_hint,StringPrototypeReplace,24,25,0 +block_hint,StringPrototypeReplace,5,6,1 +block_hint,StringPrototypeReplace,28,29,1 +block_hint,StringPrototypeReplace,10,11,1 +block_hint,StringPrototypeReplace,57,58,0 +block_hint,StringPrototypeReplace,30,31,1 +block_hint,StringPrototypeReplace,92,93,1 +block_hint,StringPrototypeReplace,87,88,1 +block_hint,StringPrototypeReplace,80,81,1 +block_hint,StringPrototypeReplace,73,74,1 +block_hint,StringPrototypeReplace,59,60,1 +block_hint,StringPrototypeReplace,61,62,0 +block_hint,StringPrototypeReplace,63,64,1 +block_hint,StringPrototypeReplace,53,54,1 +block_hint,StringPrototypeReplace,42,43,1 +block_hint,StringPrototypeReplace,14,15,1 +block_hint,StringPrototypeReplace,90,91,1 +block_hint,StringPrototypeReplace,82,83,1 +block_hint,StringPrototypeReplace,76,77,0 +block_hint,StringPrototypeReplace,78,79,1 +block_hint,StringPrototypeReplace,70,71,1 +block_hint,StringPrototypeReplace,49,50,1 +block_hint,StringPrototypeReplace,16,17,1 +block_hint,StringPrototypeReplace,18,19,0 +block_hint,StringPrototypeReplace,26,27,1 +block_hint,StringPrototypeSplit,125,126,1 +block_hint,StringPrototypeSplit,112,113,0 +block_hint,StringPrototypeSplit,92,93,1 +block_hint,StringPrototypeSplit,35,36,0 +block_hint,StringPrototypeSplit,114,115,1 +block_hint,StringPrototypeSplit,105,106,1 +block_hint,StringPrototypeSplit,94,95,1 +block_hint,StringPrototypeSplit,64,65,0 +block_hint,StringPrototypeSplit,8,9,1 +block_hint,StringPrototypeSplit,66,67,0 +block_hint,StringPrototypeSplit,10,11,1 +block_hint,StringPrototypeSplit,77,78,1 +block_hint,StringPrototypeSplit,37,38,1 +block_hint,StringPrototypeSplit,116,117,0 +block_hint,StringPrototypeSplit,79,80,1 +block_hint,StringPrototypeSplit,168,169,1 +block_hint,StringPrototypeSplit,152,153,1 +block_hint,StringPrototypeSplit,128,129,1 +block_hint,StringPrototypeSplit,122,123,1 +block_hint,StringPrototypeSplit,107,108,1 +block_hint,StringPrototypeSplit,83,84,0 +block_hint,StringPrototypeSplit,68,69,0 +block_hint,StringPrototypeSplit,85,86,1 +block_hint,StringPrototypeSplit,70,71,1 +block_hint,StringPrototypeSplit,88,89,1 +block_hint,StringPrototypeSplit,25,26,0 +block_hint,StringPrototypeSplit,72,73,1 +block_hint,StringPrototypeSplit,42,43,0 +block_hint,StringPrototypeSplit,110,111,1 +block_hint,StringPrototypeSplit,90,91,0 +block_hint,StringPrototypeSplit,27,28,1 +block_hint,StringPrototypeSplit,16,17,1 +block_hint,StringPrototypeSplit,18,19,1 +block_hint,StringPrototypeSplit,20,21,1 +block_hint,StringPrototypeSplit,50,51,1 +block_hint,TypedArrayConstructor,14,15,1 +block_hint,TypedArrayConstructor,11,12,0 +block_hint,TypedArrayConstructor,2,3,0 +block_hint,TypedArrayPrototypeByteLength,69,70,1 +block_hint,TypedArrayPrototypeByteLength,43,44,1 +block_hint,TypedArrayPrototypeByteLength,45,46,1 +block_hint,TypedArrayPrototypeByteLength,71,72,0 +block_hint,TypedArrayPrototypeByteLength,73,74,0 +block_hint,TypedArrayPrototypeByteLength,65,66,0 +block_hint,TypedArrayPrototypeByteLength,33,34,0 +block_hint,TypedArrayPrototypeLength,50,51,1 +block_hint,TypedArrayPrototypeLength,33,34,1 +block_hint,TypedArrayPrototypeLength,35,36,1 +block_hint,TypedArrayPrototypeLength,52,53,0 +block_hint,TypedArrayPrototypeLength,44,45,0 +block_hint,TypedArrayPrototypeLength,28,29,0 +block_hint,TypedArrayPrototypeLength,19,20,0 +block_hint,WeakMapConstructor,351,352,1 +block_hint,WeakMapConstructor,271,272,1 +block_hint,WeakMapConstructor,119,120,0 +block_hint,WeakMapConstructor,14,15,1 +block_hint,WeakMapConstructor,293,294,1 +block_hint,WeakMapConstructor,230,231,1 +block_hint,WeakMapConstructor,93,94,0 +block_hint,WeakMapConstructor,95,96,1 +block_hint,WeakMapConstructor,295,296,1 +block_hint,WeakMapConstructor,331,332,0 +block_hint,WeakMapConstructor,342,343,0 +block_hint,WeakMapConstructor,239,240,0 +block_hint,WeakMapConstructor,123,124,0 +block_hint,WeakMapConstructor,241,242,0 +block_hint,WeakMapConstructor,109,110,0 +block_hint,WeakMapConstructor,243,244,1 +block_hint,WeakMapConstructor,211,212,1 +block_hint,WeakMapConstructor,28,29,1 +block_hint,WeakMapConstructor,30,31,1 +block_hint,WeakMapConstructor,32,33,1 +block_hint,WeakMapConstructor,98,99,0 +block_hint,WeakMapConstructor,117,118,1 +block_hint,WeakMapLookupHashIndex,9,10,1 +block_hint,WeakMapLookupHashIndex,31,32,1 +block_hint,WeakMapLookupHashIndex,11,12,0 +block_hint,WeakMapLookupHashIndex,13,14,0 +block_hint,WeakMapLookupHashIndex,25,26,1 +block_hint,WeakMapLookupHashIndex,33,34,1 +block_hint,WeakMapLookupHashIndex,27,28,0 +block_hint,WeakMapLookupHashIndex,23,24,0 +block_hint,WeakMapGet,12,13,1 +block_hint,WeakMapGet,7,8,1 +block_hint,WeakMapGet,9,10,1 +block_hint,WeakMapGet,3,4,1 +block_hint,WeakMapPrototypeHas,10,11,1 +block_hint,WeakMapPrototypeHas,5,6,1 +block_hint,WeakMapPrototypeHas,7,8,1 +block_hint,WeakMapPrototypeSet,24,25,1 +block_hint,WeakMapPrototypeSet,5,6,1 +block_hint,WeakMapPrototypeSet,7,8,1 +block_hint,WeakMapPrototypeSet,13,14,1 +block_hint,WeakMapPrototypeSet,22,23,1 +block_hint,WeakMapPrototypeSet,15,16,0 +block_hint,WeakMapPrototypeSet,9,10,0 +block_hint,WeakCollectionSet,17,18,1 +block_hint,WeakCollectionSet,20,21,0 +block_hint,WeakCollectionSet,7,8,1 +block_hint,WeakCollectionSet,13,14,0 +block_hint,AsyncGeneratorResolve,9,10,1 +block_hint,AsyncGeneratorResolve,3,4,1 +block_hint,AsyncGeneratorResolve,11,12,0 +block_hint,AsyncGeneratorResolve,7,8,0 +block_hint,AsyncGeneratorYieldWithAwait,24,25,1 +block_hint,AsyncGeneratorYieldWithAwait,19,20,0 +block_hint,AsyncGeneratorYieldWithAwait,6,7,1 +block_hint,AsyncGeneratorYieldWithAwait,42,43,1 +block_hint,AsyncGeneratorYieldWithAwait,37,38,0 +block_hint,AsyncGeneratorYieldWithAwait,28,29,1 +block_hint,AsyncGeneratorYieldWithAwait,8,9,1 +block_hint,AsyncGeneratorYieldWithAwait,10,11,1 +block_hint,AsyncGeneratorYieldWithAwait,12,13,1 +block_hint,AsyncGeneratorYieldWithAwait,14,15,1 +block_hint,AsyncGeneratorYieldWithAwait,22,23,0 +block_hint,AsyncGeneratorResumeNext,18,19,0 +block_hint,AsyncGeneratorResumeNext,14,15,0 +block_hint,AsyncGeneratorPrototypeNext,27,28,1 +block_hint,AsyncGeneratorPrototypeNext,16,17,1 +block_hint,AsyncGeneratorPrototypeNext,4,5,1 +block_hint,AsyncGeneratorPrototypeNext,34,35,1 +block_hint,AsyncGeneratorPrototypeNext,29,30,0 +block_hint,AsyncGeneratorPrototypeNext,18,19,1 +block_hint,AsyncGeneratorPrototypeNext,20,21,1 +block_hint,AsyncGeneratorPrototypeNext,22,23,1 +block_hint,AsyncGeneratorPrototypeNext,6,7,1 +block_hint,AsyncGeneratorPrototypeNext,11,12,0 +block_hint,AsyncGeneratorAwaitUncaught,24,25,1 +block_hint,AsyncGeneratorAwaitUncaught,19,20,1 +block_hint,AsyncGeneratorAwaitUncaught,2,3,1 +block_hint,AsyncGeneratorAwaitUncaught,30,31,1 +block_hint,AsyncGeneratorAwaitUncaught,32,33,0 +block_hint,AsyncGeneratorAwaitUncaught,28,29,1 +block_hint,AsyncGeneratorAwaitUncaught,8,9,1 +block_hint,AsyncGeneratorAwaitUncaught,10,11,1 +block_hint,AsyncGeneratorAwaitUncaught,12,13,1 +block_hint,AsyncGeneratorAwaitUncaught,14,15,1 +block_hint,AsyncGeneratorAwaitUncaught,22,23,0 +block_hint,AsyncGeneratorAwaitResolveClosure,8,9,1 +block_hint,AsyncGeneratorAwaitResolveClosure,2,3,1 +block_hint,AsyncGeneratorAwaitResolveClosure,6,7,0 +block_hint,AsyncGeneratorYieldWithAwaitResolveClosure,5,6,1 +block_hint,AsyncGeneratorYieldWithAwaitResolveClosure,2,3,1 +block_hint,StringAdd_CheckNone,19,20,1 +block_hint,StringAdd_CheckNone,58,59,0 +block_hint,StringAdd_CheckNone,78,79,1 +block_hint,StringAdd_CheckNone,42,43,1 +block_hint,StringAdd_CheckNone,60,61,0 +block_hint,StringAdd_CheckNone,94,95,0 +block_hint,StringAdd_CheckNone,84,85,0 +block_hint,StringAdd_CheckNone,88,89,0 +block_hint,StringAdd_CheckNone,64,65,1 +block_hint,StringAdd_CheckNone,76,77,1 +block_hint,StringAdd_CheckNone,55,56,1 +block_hint,StringAdd_CheckNone,13,14,0 +block_hint,StringAdd_CheckNone,15,16,0 +block_hint,StringAdd_CheckNone,92,93,1 +block_hint,StringAdd_CheckNone,82,83,1 +block_hint,StringAdd_CheckNone,34,35,0 +block_hint,StringAdd_CheckNone,38,39,0 +block_hint,StringAdd_CheckNone,40,41,1 +block_hint,StringAdd_CheckNone,53,54,1 +block_hint,StringAdd_CheckNone,11,12,0 +block_hint,StringAdd_CheckNone,90,91,1 +block_hint,StringAdd_CheckNone,80,81,1 +block_hint,StringAdd_CheckNone,26,27,0 +block_hint,StringAdd_CheckNone,30,31,0 +block_hint,SubString,63,64,1 +block_hint,SubString,97,98,1 +block_hint,SubString,58,59,1 +block_hint,SubString,56,57,1 +block_hint,SubString,110,111,0 +block_hint,SubString,19,20,0 +block_hint,SubString,21,22,0 +block_hint,SubString,114,115,1 +block_hint,SubString,102,103,1 +block_hint,SubString,38,39,0 +block_hint,SubString,17,18,0 +block_hint,SubString,116,117,1 +block_hint,SubString,104,105,1 +block_hint,SubString,42,43,0 +block_hint,SubString,75,76,1 +block_hint,SubString,127,128,0 +block_hint,SubString,99,100,1 +block_hint,SubString,34,35,1 +block_hint,SubString,31,32,0 +block_hint,GetProperty,56,57,1 +block_hint,GetProperty,101,102,0 +block_hint,GetProperty,175,176,1 +block_hint,GetProperty,205,206,0 +block_hint,GetProperty,165,166,1 +block_hint,GetProperty,133,134,1 +block_hint,GetProperty,60,61,1 +block_hint,GetProperty,139,140,0 +block_hint,GetProperty,141,142,0 +block_hint,GetProperty,110,111,0 +block_hint,GetProperty,62,63,0 +block_hint,GetProperty,167,168,0 +block_hint,GetProperty,218,219,0 +block_hint,GetProperty,208,209,1 +block_hint,GetProperty,112,113,0 +block_hint,GetProperty,229,230,0 +block_hint,GetProperty,220,221,0 +block_hint,GetProperty,216,217,0 +block_hint,GetProperty,35,36,1 +block_hint,GetProperty,222,223,0 +block_hint,GetProperty,37,38,1 +block_hint,GetProperty,147,148,0 +block_hint,GetProperty,187,188,1 +block_hint,GetProperty,41,42,0 +block_hint,GetProperty,43,44,0 +block_hint,GetProperty,157,158,0 +block_hint,GetProperty,161,162,1 +block_hint,GetProperty,151,152,0 +block_hint,GetProperty,47,48,0 +block_hint,GetProperty,231,232,0 +block_hint,GetProperty,196,197,1 +block_hint,GetProperty,92,93,0 +block_hint,GetProperty,94,95,0 +block_hint,GetProperty,96,97,0 +block_hint,GetProperty,163,164,0 +block_hint,GetProperty,98,99,1 +block_hint,GetProperty,203,204,0 +block_hint,GetProperty,226,227,0 +block_hint,GetProperty,233,234,1 +block_hint,GetProperty,201,202,0 +block_hint,GetProperty,199,200,0 +block_hint,GetProperty,22,23,0 +block_hint,GetProperty,182,183,1 +block_hint,GetProperty,104,105,1 +block_hint,GetPropertyWithReceiver,58,59,1 +block_hint,GetPropertyWithReceiver,60,61,1 +block_hint,GetPropertyWithReceiver,203,204,0 +block_hint,GetPropertyWithReceiver,174,175,1 +block_hint,GetPropertyWithReceiver,211,212,0 +block_hint,GetPropertyWithReceiver,112,113,0 +block_hint,GetPropertyWithReceiver,162,163,1 +block_hint,GetPropertyWithReceiver,138,139,1 +block_hint,GetPropertyWithReceiver,62,63,1 +block_hint,GetPropertyWithReceiver,144,145,0 +block_hint,GetPropertyWithReceiver,146,147,0 +block_hint,GetPropertyWithReceiver,114,115,0 +block_hint,GetPropertyWithReceiver,64,65,0 +block_hint,GetPropertyWithReceiver,164,165,0 +block_hint,GetPropertyWithReceiver,215,216,1 +block_hint,GetPropertyWithReceiver,117,118,0 +block_hint,GetPropertyWithReceiver,236,237,0 +block_hint,GetPropertyWithReceiver,232,233,0 +block_hint,GetPropertyWithReceiver,223,224,0 +block_hint,GetPropertyWithReceiver,148,149,1 +block_hint,GetPropertyWithReceiver,38,39,1 +block_hint,GetPropertyWithReceiver,234,235,0 +block_hint,GetPropertyWithReceiver,40,41,1 +block_hint,GetPropertyWithReceiver,183,184,0 +block_hint,GetPropertyWithReceiver,34,35,0 +block_hint,GetPropertyWithReceiver,229,230,1 +block_hint,GetPropertyWithReceiver,205,206,0 +block_hint,SetProperty,379,380,1 +block_hint,SetProperty,381,382,0 +block_hint,SetProperty,1213,1214,0 +block_hint,SetProperty,928,929,1 +block_hint,SetProperty,1031,1032,1 +block_hint,SetProperty,1033,1034,0 +block_hint,SetProperty,733,734,0 +block_hint,SetProperty,922,923,1 +block_hint,SetProperty,413,414,0 +block_hint,SetProperty,415,416,0 +block_hint,SetProperty,256,257,1 +block_hint,SetProperty,417,418,0 +block_hint,SetProperty,630,631,1 +block_hint,SetProperty,92,93,1 +block_hint,SetProperty,94,95,1 +block_hint,SetProperty,1098,1099,0 +block_hint,SetProperty,811,812,0 +block_hint,SetProperty,813,814,1 +block_hint,SetProperty,815,816,0 +block_hint,SetProperty,104,105,1 +block_hint,SetProperty,108,109,1 +block_hint,SetProperty,429,430,1 +block_hint,SetProperty,110,111,1 +block_hint,SetProperty,106,107,1 +block_hint,CreateDataProperty,319,320,1 +block_hint,CreateDataProperty,321,322,0 +block_hint,CreateDataProperty,990,991,0 +block_hint,CreateDataProperty,782,783,1 +block_hint,CreateDataProperty,865,866,1 +block_hint,CreateDataProperty,539,540,1 +block_hint,CreateDataProperty,648,649,1 +block_hint,CreateDataProperty,650,651,0 +block_hint,CreateDataProperty,912,913,1 +block_hint,CreateDataProperty,333,334,0 +block_hint,CreateDataProperty,55,56,1 +block_hint,CreateDataProperty,543,544,1 +block_hint,CreateDataProperty,57,58,1 +block_hint,ArrayPrototypeConcat,79,80,1 +block_hint,ArrayPrototypeConcat,54,55,1 +block_hint,ArrayPrototypeConcat,63,64,1 +block_hint,ArrayPrototypeConcat,74,75,0 +block_hint,ArrayPrototypeConcat,81,82,0 +block_hint,ArrayPrototypeConcat,70,71,1 +block_hint,ArrayPrototypeConcat,37,38,1 +block_hint,ArrayPrototypeConcat,16,17,1 +block_hint,ArrayPrototypeConcat,3,4,1 +block_hint,ArrayPrototypeConcat,25,26,1 +block_hint,ArrayPrototypeConcat,9,10,0 +block_hint,ArrayPrototypeConcat,20,21,1 +block_hint,ArrayPrototypeConcat,30,31,0 +block_hint,ArrayPrototypeConcat,42,43,0 +block_hint,ArrayPrototypeConcat,72,73,1 +block_hint,ArrayPrototypeConcat,39,40,1 +block_hint,ArrayPrototypeConcat,18,19,1 +block_hint,ArrayPrototypeConcat,5,6,1 +block_hint,ArrayPrototypeConcat,57,58,1 +block_hint,ArrayPrototypeConcat,59,60,0 +block_hint,ArrayPrototypeConcat,66,67,0 +block_hint,ArrayPrototypeConcat,33,34,1 +block_hint,ArrayPrototypeConcat,68,69,0 +block_hint,ArrayPrototypeConcat,35,36,1 +block_hint,ArrayPrototypeConcat,27,28,1 +block_hint,ArrayPrototypeConcat,11,12,1 +block_hint,ArrayEvery,73,74,1 +block_hint,ArrayEvery,31,32,0 +block_hint,ArrayEvery,125,126,1 +block_hint,ArrayEvery,117,118,1 +block_hint,ArrayEvery,91,92,1 +block_hint,ArrayEvery,93,94,1 +block_hint,ArrayEvery,99,100,1 +block_hint,ArrayEvery,121,122,0 +block_hint,ArrayEvery,105,106,1 +block_hint,ArrayEvery,107,108,1 +block_hint,ArrayEvery,97,98,1 +block_hint,ArrayEvery,49,50,0 +block_hint,ArrayEvery,102,103,1 +block_hint,ArrayEvery,66,67,1 +block_hint,ArrayEvery,45,46,1 +block_hint,ArrayEvery,12,13,1 +block_hint,ArrayEvery,57,58,1 +block_hint,ArrayEvery,28,29,0 +block_hint,ArrayEvery,68,69,1 +block_hint,ArrayEvery,70,71,0 +block_hint,ArrayEvery,51,52,0 +block_hint,ArrayEvery,47,48,0 +block_hint,ArrayEvery,18,19,0 +block_hint,ArrayEvery,20,21,1 +block_hint,ArrayEvery,61,62,0 +block_hint,ArrayEvery,109,110,1 +block_hint,ArrayEvery,87,88,0 +block_hint,ArrayEvery,89,90,0 +block_hint,ArrayEvery,111,112,0 +block_hint,ArrayEvery,79,80,0 +block_hint,ArrayFilter,194,195,1 +block_hint,ArrayFilter,84,85,0 +block_hint,ArrayFilter,304,305,1 +block_hint,ArrayFilter,293,294,1 +block_hint,ArrayFilter,228,229,1 +block_hint,ArrayFilter,230,231,1 +block_hint,ArrayFilter,249,250,1 +block_hint,ArrayFilter,302,303,0 +block_hint,ArrayFilter,274,275,1 +block_hint,ArrayFilter,276,277,1 +block_hint,ArrayFilter,242,243,0 +block_hint,ArrayFilter,280,281,1 +block_hint,ArrayFilter,196,197,1 +block_hint,ArrayFilter,123,124,1 +block_hint,ArrayFilter,22,23,1 +block_hint,ArrayFilter,198,199,1 +block_hint,ArrayFilter,125,126,0 +block_hint,ArrayFilter,24,25,1 +block_hint,ArrayFilter,271,272,1 +block_hint,ArrayFilter,167,168,0 +block_hint,ArrayFilter,282,283,1 +block_hint,ArrayFilter,200,201,1 +block_hint,ArrayFilter,127,128,1 +block_hint,ArrayFilter,26,27,1 +block_hint,ArrayFilter,210,211,1 +block_hint,ArrayFilter,212,213,0 +block_hint,ArrayFilter,287,288,1 +block_hint,ArrayFilter,214,215,1 +block_hint,ArrayFilter,216,217,1 +block_hint,ArrayFilter,218,219,1 +block_hint,ArrayFilter,202,203,1 +block_hint,ArrayFilter,129,130,0 +block_hint,ArrayFilter,28,29,1 +block_hint,ArrayFilter,172,173,0 +block_hint,ArrayFilter,103,104,0 +block_hint,ArrayFilter,245,246,1 +block_hint,ArrayFilter,247,248,0 +block_hint,ArrayFilter,204,205,0 +block_hint,ArrayFilter,131,132,0 +block_hint,ArrayFilter,42,43,0 +block_hint,ArrayFilter,44,45,1 +block_hint,ArrayFilter,149,150,0 +block_hint,ArrayFilter,252,253,1 +block_hint,ArrayFilter,178,179,0 +block_hint,ArrayFilter,180,181,0 +block_hint,ArrayFilter,254,255,0 +block_hint,ArrayFilter,256,257,0 +block_hint,ArrayFilter,258,259,1 +block_hint,ArrayFilter,260,261,0 +block_hint,ArrayFilter,262,263,1 +block_hint,ArrayFilter,284,285,0 +block_hint,ArrayFilter,240,241,0 +block_hint,ArrayFilter,162,163,0 +block_hint,ArrayFilter,95,96,0 +block_hint,ArrayFilter,188,189,1 +block_hint,ArrayFilter,60,61,0 +block_hint,ArrayFilter,64,65,1 +block_hint,ArrayFilter,50,51,1 +block_hint,ArrayForEach,70,71,1 +block_hint,ArrayForEach,29,30,0 +block_hint,ArrayForEach,102,103,1 +block_hint,ArrayForEach,96,97,1 +block_hint,ArrayForEach,76,77,1 +block_hint,ArrayForEach,78,79,1 +block_hint,ArrayForEach,84,85,1 +block_hint,ArrayForEach,100,101,0 +block_hint,ArrayForEach,90,91,1 +block_hint,ArrayForEach,92,93,1 +block_hint,ArrayForEach,47,48,0 +block_hint,ArrayForEach,87,88,1 +block_hint,ArrayForEach,63,64,1 +block_hint,ArrayForEach,43,44,1 +block_hint,ArrayForEach,12,13,1 +block_hint,ArrayForEach,53,54,1 +block_hint,ArrayForEach,26,27,0 +block_hint,ArrayForEach,65,66,1 +block_hint,ArrayForEach,67,68,0 +block_hint,ArrayForEach,49,50,0 +block_hint,ArrayForEach,45,46,0 +block_hint,ArrayForEach,18,19,0 +block_hint,ArrayForEach,20,21,1 +block_hint,ArrayForEach,58,59,0 +block_hint,ArrayFrom,225,226,1 +block_hint,ArrayFrom,76,77,1 +block_hint,ArrayFrom,78,79,1 +block_hint,ArrayFrom,8,9,1 +block_hint,ArrayFrom,342,343,1 +block_hint,ArrayFrom,338,339,0 +block_hint,ArrayFrom,327,328,0 +block_hint,ArrayFrom,311,312,1 +block_hint,ArrayFrom,309,310,0 +block_hint,ArrayFrom,80,81,1 +block_hint,ArrayFrom,10,11,1 +block_hint,ArrayFrom,322,323,1 +block_hint,ArrayFrom,305,306,0 +block_hint,ArrayFrom,245,246,1 +block_hint,ArrayFrom,266,267,0 +block_hint,ArrayFrom,82,83,1 +block_hint,ArrayFrom,12,13,1 +block_hint,ArrayFrom,268,269,1 +block_hint,ArrayFrom,213,214,0 +block_hint,ArrayFrom,290,291,1 +block_hint,ArrayFrom,248,249,0 +block_hint,ArrayFrom,285,286,1 +block_hint,ArrayFrom,281,282,0 +block_hint,ArrayFrom,188,189,1 +block_hint,ArrayFrom,88,89,1 +block_hint,ArrayFrom,18,19,1 +block_hint,ArrayFrom,215,216,1 +block_hint,ArrayFrom,72,73,1 +block_hint,ArrayIsArray,13,14,1 +block_hint,ArrayIsArray,9,10,1 +block_hint,ArrayIsArray,7,8,0 +block_hint,LoadJoinElement_FastSmiOrObjectElements_0,2,3,1 +block_hint,LoadJoinElement_FastSmiOrObjectElements_0,4,5,0 +block_hint,LoadJoinElement_FastDoubleElements_0,3,4,1 +block_hint,LoadJoinElement_FastDoubleElements_0,5,6,0 +block_hint,LoadJoinElement_FastDoubleElements_0,7,8,1 +block_hint,JoinStackPush,28,29,1 +block_hint,JoinStackPush,6,7,1 +block_hint,JoinStackPush,10,11,0 +block_hint,JoinStackPop,9,10,1 +block_hint,JoinStackPop,4,5,1 +block_hint,ArrayPrototypeJoin,512,513,1 +block_hint,ArrayPrototypeJoin,450,451,1 +block_hint,ArrayPrototypeJoin,417,418,1 +block_hint,ArrayPrototypeJoin,330,331,1 +block_hint,ArrayPrototypeJoin,332,333,1 +block_hint,ArrayPrototypeJoin,363,364,1 +block_hint,ArrayPrototypeJoin,336,337,0 +block_hint,ArrayPrototypeJoin,179,180,0 +block_hint,ArrayPrototypeJoin,468,469,1 +block_hint,ArrayPrototypeJoin,434,435,1 +block_hint,ArrayPrototypeJoin,324,325,0 +block_hint,ArrayPrototypeJoin,226,227,1 +block_hint,ArrayPrototypeJoin,30,31,1 +block_hint,ArrayPrototypeJoin,181,182,0 +block_hint,ArrayPrototypeJoin,32,33,1 +block_hint,ArrayPrototypeJoin,385,386,1 +block_hint,ArrayPrototypeJoin,321,322,0 +block_hint,ArrayPrototypeJoin,143,144,1 +block_hint,ArrayPrototypeJoin,487,488,1 +block_hint,ArrayPrototypeJoin,452,453,0 +block_hint,ArrayPrototypeJoin,421,422,0 +block_hint,ArrayPrototypeJoin,365,366,1 +block_hint,ArrayPrototypeJoin,183,184,1 +block_hint,ArrayPrototypeJoin,38,39,1 +block_hint,ArrayPrototypeJoin,454,455,1 +block_hint,ArrayPrototypeJoin,423,424,0 +block_hint,ArrayPrototypeJoin,293,294,1 +block_hint,ArrayPrototypeJoin,426,427,0 +block_hint,ArrayPrototypeJoin,341,342,0 +block_hint,ArrayPrototypeJoin,189,190,0 +block_hint,ArrayPrototypeJoin,230,231,1 +block_hint,ArrayPrototypeJoin,145,146,1 +block_hint,ArrayPrototypeJoin,477,478,0 +block_hint,ArrayPrototypeJoin,481,482,1 +block_hint,ArrayPrototypeJoin,519,520,0 +block_hint,ArrayPrototypeJoin,515,516,0 +block_hint,ArrayPrototypeJoin,508,509,1 +block_hint,ArrayPrototypeJoin,483,484,1 +block_hint,ArrayPrototypeJoin,479,480,0 +block_hint,ArrayPrototypeJoin,147,148,0 +block_hint,ArrayPrototypeJoin,149,150,0 +block_hint,ArrayPrototypeJoin,463,464,0 +block_hint,ArrayPrototypeJoin,465,466,0 +block_hint,ArrayPrototypeJoin,448,449,1 +block_hint,ArrayPrototypeJoin,405,406,1 +block_hint,ArrayPrototypeJoin,407,408,1 +block_hint,ArrayPrototypeJoin,409,410,1 +block_hint,ArrayPrototypeJoin,411,412,1 +block_hint,ArrayPrototypeJoin,197,198,1 +block_hint,ArrayPrototypeJoin,252,253,0 +block_hint,ArrayPrototypeJoin,254,255,0 +block_hint,ArrayPrototypeJoin,300,301,0 +block_hint,ArrayPrototypeJoin,260,261,0 +block_hint,ArrayPrototypeJoin,262,263,0 +block_hint,ArrayPrototypeJoin,203,204,1 +block_hint,ArrayPrototypeJoin,72,73,1 +block_hint,ArrayPrototypeJoin,377,378,0 +block_hint,ArrayPrototypeJoin,303,304,1 +block_hint,ArrayPrototypeJoin,207,208,1 +block_hint,ArrayPrototypeJoin,268,269,0 +block_hint,ArrayPrototypeJoin,270,271,0 +block_hint,ArrayPrototypeJoin,209,210,1 +block_hint,ArrayPrototypeJoin,86,87,1 +block_hint,ArrayPrototypeJoin,305,306,1 +block_hint,ArrayPrototypeJoin,102,103,0 +block_hint,ArrayPrototypeJoin,104,105,0 +block_hint,ArrayPrototypeJoin,401,402,1 +block_hint,ArrayPrototypeJoin,383,384,1 +block_hint,ArrayPrototypeJoin,100,101,0 +block_hint,ArrayPrototypeJoin,399,400,1 +block_hint,ArrayPrototypeJoin,381,382,1 +block_hint,ArrayPrototypeJoin,96,97,1 +block_hint,ArrayPrototypeJoin,348,349,1 +block_hint,ArrayPrototypeJoin,307,308,0 +block_hint,ArrayPrototypeJoin,215,216,0 +block_hint,ArrayPrototypeJoin,106,107,1 +block_hint,ArrayPrototypeJoin,108,109,0 +block_hint,ArrayPrototypeJoin,110,111,1 +block_hint,ArrayPrototypeJoin,282,283,1 +block_hint,ArrayPrototypeJoin,139,140,1 +block_hint,ArrayPrototypeToString,14,15,1 +block_hint,ArrayPrototypeToString,11,12,1 +block_hint,ArrayPrototypeToString,8,9,1 +block_hint,ArrayPrototypeToString,5,6,1 +block_hint,ArrayPrototypeToString,3,4,1 +block_hint,ArrayPrototypeLastIndexOf,279,280,1 +block_hint,ArrayPrototypeLastIndexOf,261,262,1 +block_hint,ArrayPrototypeLastIndexOf,245,246,1 +block_hint,ArrayPrototypeLastIndexOf,175,176,1 +block_hint,ArrayPrototypeLastIndexOf,177,178,1 +block_hint,ArrayPrototypeLastIndexOf,91,92,0 +block_hint,ArrayPrototypeLastIndexOf,41,42,1 +block_hint,ArrayPrototypeLastIndexOf,375,376,0 +block_hint,ArrayPrototypeLastIndexOf,361,362,0 +block_hint,ArrayPrototypeLastIndexOf,367,368,0 +block_hint,ArrayPrototypeLastIndexOf,358,359,0 +block_hint,ArrayPrototypeLastIndexOf,335,336,0 +block_hint,ArrayPrototypeLastIndexOf,324,325,1 +block_hint,ArrayPrototypeLastIndexOf,338,339,0 +block_hint,ArrayPrototypeLastIndexOf,328,329,0 +block_hint,ArrayPrototypeLastIndexOf,315,316,0 +block_hint,ArrayPrototypeLastIndexOf,300,301,0 +block_hint,ArrayPrototypeLastIndexOf,313,314,0 +block_hint,ArrayPrototypeLastIndexOf,298,299,0 +block_hint,ArrayPrototypeLastIndexOf,281,282,1 +block_hint,ArrayPrototypeLastIndexOf,252,253,0 +block_hint,ArrayPrototypeLastIndexOf,194,195,1 +block_hint,ArrayPrototypeLastIndexOf,83,84,1 +block_hint,ArrayPrototypeLastIndexOf,73,74,1 +block_hint,ArrayPrototypeLastIndexOf,21,22,1 +block_hint,ArrayPrototypeLastIndexOf,85,86,1 +block_hint,ArrayPrototypeLastIndexOf,77,78,0 +block_hint,ArrayPrototypeLastIndexOf,29,30,1 +block_hint,ArrayPrototypeLastIndexOf,60,61,0 +block_hint,ArrayPrototypeLastIndexOf,98,99,1 +block_hint,ArrayPrototypeLastIndexOf,56,57,0 +block_hint,ArrayPrototypeLastIndexOf,23,24,1 +block_hint,ArrayPrototypeLastIndexOf,58,59,0 +block_hint,ArrayPrototypeLastIndexOf,214,215,0 +block_hint,ArrayPrototypeLastIndexOf,220,221,1 +block_hint,ArrayPrototypeLastIndexOf,239,240,0 +block_hint,ArrayPrototypeLastIndexOf,212,213,0 +block_hint,ArrayPrototypeLastIndexOf,145,146,0 +block_hint,ArrayPrototypeLastIndexOf,129,130,1 +block_hint,ArrayPrototypeLastIndexOf,31,32,0 +block_hint,ArrayMap,163,164,1 +block_hint,ArrayMap,72,73,0 +block_hint,ArrayMap,270,271,1 +block_hint,ArrayMap,249,250,1 +block_hint,ArrayMap,194,195,1 +block_hint,ArrayMap,196,197,1 +block_hint,ArrayMap,215,216,1 +block_hint,ArrayMap,268,269,0 +block_hint,ArrayMap,229,230,1 +block_hint,ArrayMap,231,232,1 +block_hint,ArrayMap,258,259,1 +block_hint,ArrayMap,212,213,0 +block_hint,ArrayMap,226,227,1 +block_hint,ArrayMap,233,234,1 +block_hint,ArrayMap,165,166,1 +block_hint,ArrayMap,114,115,1 +block_hint,ArrayMap,23,24,1 +block_hint,ArrayMap,241,242,1 +block_hint,ArrayMap,217,218,0 +block_hint,ArrayMap,161,162,0 +block_hint,ArrayMap,74,75,0 +block_hint,ArrayMap,180,181,1 +block_hint,ArrayMap,159,160,1 +block_hint,ArrayMap,55,56,0 +block_hint,ArrayMap,283,284,1 +block_hint,ArrayMap,280,281,0 +block_hint,ArrayMap,261,262,0 +block_hint,ArrayMap,235,236,0 +block_hint,ArrayMap,201,202,0 +block_hint,ArrayMap,116,117,0 +block_hint,ArrayMap,29,30,0 +block_hint,ArrayMap,31,32,1 +block_hint,ArrayMap,132,133,0 +block_hint,ArrayMap,33,34,1 +block_hint,ArrayMap,120,121,0 +block_hint,ArrayMap,37,38,1 +block_hint,ArrayMap,35,36,1 +block_hint,ArrayMap,266,267,0 +block_hint,ArrayMap,209,210,0 +block_hint,ArrayMap,151,152,0 +block_hint,ArrayMap,45,46,1 +block_hint,ArrayMap,153,154,0 +block_hint,ArrayMap,89,90,1 +block_hint,ArrayMap,83,84,0 +block_hint,ArrayMap,85,86,1 +block_hint,ArrayMap,182,183,1 +block_hint,ArrayMap,184,185,0 +block_hint,ArrayMap,62,63,0 +block_hint,ArrayMap,64,65,1 +block_hint,ArrayMap,96,97,1 +block_hint,ArrayMap,47,48,1 +block_hint,ArrayMap,155,156,1 +block_hint,ArrayMap,98,99,1 +block_hint,ArrayMap,49,50,1 +block_hint,ArrayMap,136,137,1 +block_hint,ArrayReduce,81,82,1 +block_hint,ArrayReduce,30,31,0 +block_hint,ArrayReduce,127,128,1 +block_hint,ArrayReduce,121,122,1 +block_hint,ArrayReduce,89,90,1 +block_hint,ArrayReduce,91,92,1 +block_hint,ArrayReduce,101,102,1 +block_hint,ArrayReduce,125,126,0 +block_hint,ArrayReduce,111,112,1 +block_hint,ArrayReduce,113,114,1 +block_hint,ArrayReduce,95,96,1 +block_hint,ArrayReduce,104,105,0 +block_hint,ArrayReduce,49,50,0 +block_hint,ArrayReduce,106,107,1 +block_hint,ArrayReduce,65,66,1 +block_hint,ArrayReduce,45,46,1 +block_hint,ArrayReduce,12,13,1 +block_hint,ArrayReduce,53,54,1 +block_hint,ArrayReduce,26,27,0 +block_hint,ArrayReduce,99,100,0 +block_hint,ArrayReduce,67,68,1 +block_hint,ArrayReduce,69,70,0 +block_hint,ArrayReduce,117,118,0 +block_hint,ArrayReduce,97,98,0 +block_hint,ArrayReduce,71,72,0 +block_hint,ArrayReduce,47,48,0 +block_hint,ArrayReduce,18,19,0 +block_hint,ArrayReduce,20,21,1 +block_hint,ArrayReduce,57,58,0 +block_hint,ArrayReduce,59,60,0 +block_hint,ArrayReduce,23,24,0 +block_hint,ArrayPrototypeReverse,236,237,1 +block_hint,ArrayPrototypeReverse,210,211,1 +block_hint,ArrayPrototypeReverse,190,191,1 +block_hint,ArrayPrototypeReverse,152,153,1 +block_hint,ArrayPrototypeReverse,103,104,1 +block_hint,ArrayPrototypeReverse,18,19,1 +block_hint,ArrayPrototypeReverse,192,193,1 +block_hint,ArrayPrototypeReverse,169,170,0 +block_hint,ArrayPrototypeReverse,140,141,1 +block_hint,ArrayPrototypeReverse,118,119,1 +block_hint,ArrayPrototypeReverse,89,90,0 +block_hint,ArrayPrototypeShift,237,238,1 +block_hint,ArrayPrototypeShift,205,206,1 +block_hint,ArrayPrototypeShift,185,186,1 +block_hint,ArrayPrototypeShift,132,133,1 +block_hint,ArrayPrototypeShift,81,82,1 +block_hint,ArrayPrototypeShift,11,12,1 +block_hint,ArrayPrototypeShift,196,197,1 +block_hint,ArrayPrototypeShift,168,169,0 +block_hint,ArrayPrototypeShift,134,135,1 +block_hint,ArrayPrototypeShift,83,84,0 +block_hint,ArrayPrototypeShift,13,14,1 +block_hint,ArrayPrototypeShift,136,137,0 +block_hint,ArrayPrototypeShift,85,86,0 +block_hint,ArrayPrototypeShift,68,69,0 +block_hint,ArrayPrototypeShift,87,88,0 +block_hint,ArrayPrototypeShift,27,28,0 +block_hint,ArrayPrototypeShift,29,30,1 +block_hint,ArrayPrototypeShift,170,171,0 +block_hint,ArrayPrototypeShift,89,90,0 +block_hint,ArrayPrototypeShift,33,34,0 +block_hint,ArrayPrototypeShift,148,149,0 +block_hint,ArrayPrototypeShift,111,112,0 +block_hint,ArrayPrototypeShift,91,92,0 +block_hint,ArrayPrototypeShift,39,40,0 +block_hint,ArrayPrototypeShift,41,42,1 +block_hint,ArrayPrototypeSlice,288,289,1 +block_hint,ArrayPrototypeSlice,267,268,1 +block_hint,ArrayPrototypeSlice,245,246,1 +block_hint,ArrayPrototypeSlice,182,183,1 +block_hint,ArrayPrototypeSlice,81,82,1 +block_hint,ArrayPrototypeSlice,12,13,1 +block_hint,ArrayPrototypeSlice,83,84,1 +block_hint,ArrayPrototypeSlice,14,15,1 +block_hint,ArrayPrototypeSlice,16,17,1 +block_hint,ArrayPrototypeSlice,87,88,1 +block_hint,ArrayPrototypeSlice,511,512,0 +block_hint,ArrayPrototypeSlice,509,510,0 +block_hint,ArrayPrototypeSlice,485,486,0 +block_hint,ArrayPrototypeSlice,448,449,0 +block_hint,ArrayPrototypeSlice,428,429,0 +block_hint,ArrayPrototypeSlice,405,406,0 +block_hint,ArrayPrototypeSlice,446,447,0 +block_hint,ArrayPrototypeSlice,426,427,0 +block_hint,ArrayPrototypeSlice,401,402,1 +block_hint,ArrayPrototypeSlice,479,480,0 +block_hint,ArrayPrototypeSlice,465,466,0 +block_hint,ArrayPrototypeSlice,454,455,0 +block_hint,ArrayPrototypeSlice,424,425,0 +block_hint,ArrayPrototypeSlice,422,423,0 +block_hint,ArrayPrototypeSlice,393,394,1 +block_hint,ArrayPrototypeSlice,332,333,0 +block_hint,ArrayPrototypeSlice,277,278,1 +block_hint,ArrayPrototypeSlice,257,258,0 +block_hint,ArrayPrototypeSlice,89,90,1 +block_hint,ArrayPrototypeSlice,20,21,1 +block_hint,ArrayPrototypeSlice,128,129,1 +block_hint,ArrayPrototypeSlice,66,67,0 +block_hint,ArrayPrototypeSlice,443,444,0 +block_hint,ArrayPrototypeSlice,386,387,0 +block_hint,ArrayPrototypeSlice,364,365,0 +block_hint,ArrayPrototypeSlice,384,385,0 +block_hint,ArrayPrototypeSlice,362,363,0 +block_hint,ArrayPrototypeSlice,344,345,1 +block_hint,ArrayPrototypeSlice,437,438,0 +block_hint,ArrayPrototypeSlice,413,414,0 +block_hint,ArrayPrototypeSlice,388,389,0 +block_hint,ArrayPrototypeSlice,360,361,0 +block_hint,ArrayPrototypeSlice,340,341,1 +block_hint,ArrayPrototypeSlice,309,310,0 +block_hint,ArrayPrototypeSlice,296,297,0 +block_hint,ArrayPrototypeSlice,284,285,0 +block_hint,ArrayPrototypeSlice,261,262,0 +block_hint,ArrayPrototypeSlice,238,239,1 +block_hint,ArrayPrototypeSlice,141,142,0 +block_hint,ArrayPrototypeSlice,143,144,0 +block_hint,ArrayPrototypeSlice,190,191,1 +block_hint,ArrayPrototypeSlice,211,212,0 +block_hint,ArrayPrototypeSlice,91,92,1 +block_hint,ArrayPrototypeSlice,22,23,1 +block_hint,ArrayPrototypeSlice,197,198,1 +block_hint,ArrayPrototypeSlice,134,135,0 +block_hint,ArrayPrototypeSlice,69,70,0 +block_hint,ArrayPrototypeSlice,93,94,1 +block_hint,ArrayPrototypeSlice,24,25,1 +block_hint,ArrayPrototypeSlice,95,96,1 +block_hint,ArrayPrototypeSlice,26,27,1 +block_hint,ArrayPrototypeSlice,28,29,1 +block_hint,ArrayPrototypeSlice,99,100,1 +block_hint,ArrayPrototypeSlice,42,43,1 +block_hint,ArrayPrototypeSlice,145,146,1 +block_hint,ArrayPrototypeSlice,174,175,0 +block_hint,ArrayPrototypeSlice,176,177,1 +block_hint,ArrayPrototypeSlice,157,158,0 +block_hint,ArrayPrototypeSlice,101,102,0 +block_hint,ArrayPrototypeSlice,32,33,1 +block_hint,ArrayPrototypeSlice,250,251,1 +block_hint,ArrayPrototypeSlice,221,222,1 +block_hint,ArrayPrototypeSlice,118,119,1 +block_hint,ArrayPrototypeSlice,57,58,0 +block_hint,ArrayPrototypeSlice,59,60,1 +block_hint,ArrayPrototypeSlice,75,76,1 +block_hint,ArrayPrototypeSlice,103,104,0 +block_hint,ArrayPrototypeSlice,232,233,0 +block_hint,ArrayPrototypeSlice,164,165,1 +block_hint,ArrayPrototypeSlice,71,72,0 +block_hint,ArrayPrototypeSlice,178,179,0 +block_hint,ArrayPrototypeSlice,160,161,0 +block_hint,ArrayPrototypeSlice,109,110,0 +block_hint,ArrayPrototypeSlice,44,45,1 +block_hint,ArrayPrototypeSlice,248,249,1 +block_hint,ArrayPrototypeSlice,217,218,1 +block_hint,ArrayPrototypeSlice,116,117,1 +block_hint,ArrayPrototypeSlice,49,50,0 +block_hint,ArrayPrototypeSlice,136,137,0 +block_hint,ArrayPrototypeSlice,73,74,0 +block_hint,ArraySome,88,89,1 +block_hint,ArraySome,31,32,0 +block_hint,ArraySome,122,123,1 +block_hint,ArraySome,116,117,1 +block_hint,ArraySome,93,94,1 +block_hint,ArraySome,95,96,1 +block_hint,ArraySome,101,102,1 +block_hint,ArraySome,120,121,0 +block_hint,ArraySome,108,109,1 +block_hint,ArraySome,110,111,1 +block_hint,ArraySome,99,100,1 +block_hint,ArraySome,56,57,0 +block_hint,ArraySome,105,106,1 +block_hint,ArraySome,77,78,1 +block_hint,ArraySome,52,53,1 +block_hint,ArraySome,13,14,1 +block_hint,ArraySome,62,63,1 +block_hint,ArraySome,27,28,0 +block_hint,ArraySome,79,80,1 +block_hint,ArraySome,81,82,0 +block_hint,ArraySome,58,59,0 +block_hint,ArraySome,54,55,0 +block_hint,ArraySome,19,20,0 +block_hint,ArraySome,21,22,1 +block_hint,ArraySome,66,67,0 +block_hint,ArrayPrototypeSplice,605,606,1 +block_hint,ArrayPrototypeSplice,450,451,1 +block_hint,ArrayPrototypeSplice,452,453,1 +block_hint,ArrayPrototypeSplice,1201,1202,0 +block_hint,ArrayPrototypeSplice,1183,1184,0 +block_hint,ArrayPrototypeSplice,1159,1160,0 +block_hint,ArrayPrototypeSplice,1138,1139,0 +block_hint,ArrayPrototypeSplice,1104,1105,0 +block_hint,ArrayPrototypeSplice,1072,1073,0 +block_hint,ArrayPrototypeSplice,1031,1032,0 +block_hint,ArrayPrototypeSplice,1102,1103,0 +block_hint,ArrayPrototypeSplice,1070,1071,0 +block_hint,ArrayPrototypeSplice,1027,1028,1 +block_hint,ArrayPrototypeSplice,940,941,1 +block_hint,ArrayPrototypeSplice,1093,1094,0 +block_hint,ArrayPrototypeSplice,1226,1227,0 +block_hint,ArrayPrototypeSplice,1218,1219,0 +block_hint,ArrayPrototypeSplice,1205,1206,0 +block_hint,ArrayPrototypeSplice,1193,1194,1 +block_hint,ArrayPrototypeSplice,1162,1163,0 +block_hint,ArrayPrototypeSplice,1142,1143,0 +block_hint,ArrayPrototypeSplice,1121,1122,0 +block_hint,ArrayPrototypeSplice,1078,1079,0 +block_hint,ArrayPrototypeSplice,1038,1039,0 +block_hint,ArrayPrototypeSplice,1076,1077,0 +block_hint,ArrayPrototypeSplice,1036,1037,0 +block_hint,ArrayPrototypeSplice,987,988,1 +block_hint,ArrayPrototypeSplice,877,878,0 +block_hint,ArrayPrototypeSplice,842,843,0 +block_hint,ArrayPrototypeSplice,810,811,0 +block_hint,ArrayPrototypeSplice,731,732,0 +block_hint,ArrayPrototypeSplice,677,678,0 +block_hint,ArrayPrototypeSplice,607,608,0 +block_hint,ArrayPrototypeSplice,511,512,1 +block_hint,ArrayPrototypeSplice,456,457,0 +block_hint,ArrayPrototypeSplice,223,224,0 +block_hint,ArrayPrototypeSplice,332,333,0 +block_hint,ArrayPrototypeSplice,334,335,0 +block_hint,ArrayPrototypeSplice,336,337,0 +block_hint,ArrayPrototypeSplice,225,226,1 +block_hint,ArrayPrototypeSplice,51,52,1 +block_hint,ArrayPrototypeSplice,338,339,1 +block_hint,ArrayPrototypeSplice,340,341,0 +block_hint,ArrayPrototypeSplice,342,343,0 +block_hint,ArrayPrototypeSplice,387,388,1 +block_hint,ArrayPrototypeSplice,227,228,0 +block_hint,ArrayPrototypeSplice,53,54,1 +block_hint,ArrayPrototypeSplice,244,245,0 +block_hint,ArrayPrototypeSplice,93,94,1 +block_hint,ArrayPrototypeSplice,556,557,0 +block_hint,ArrayPrototypeSplice,400,401,0 +block_hint,ArrayPrototypeSplice,590,591,0 +block_hint,ArrayPrototypeSplice,528,529,1 +block_hint,ArrayPrototypeSplice,345,346,0 +block_hint,ArrayPrototypeSplice,347,348,1 +block_hint,ArrayPrototypeSplice,235,236,0 +block_hint,ArrayPrototypeSplice,258,259,1 +block_hint,ArrayPrototypeSplice,105,106,0 +block_hint,ArrayPrototypeSplice,229,230,0 +block_hint,ArrayPrototypeSplice,329,330,0 +block_hint,ArrayPrototypeSplice,327,328,0 +block_hint,ArrayPrototypeSplice,391,392,1 +block_hint,ArrayPrototypeSplice,65,66,1 +block_hint,ArrayPrototypeSplice,294,295,1 +block_hint,ArrayPrototypeSplice,143,144,0 +block_hint,ArrayPrototypeSplice,67,68,0 +block_hint,ArrayPrototypeSplice,69,70,0 +block_hint,ArrayPrototypeSplice,263,264,1 +block_hint,ArrayPrototypeSplice,178,179,1 +block_hint,ArrayPrototypeSplice,325,326,0 +block_hint,ArrayPrototypeSplice,425,426,1 +block_hint,ArrayPrototypeSplice,265,266,0 +block_hint,ArrayPrototypeSplice,111,112,0 +block_hint,ArrayPrototypeSplice,427,428,0 +block_hint,ArrayPrototypeSplice,267,268,0 +block_hint,ArrayPrototypeSplice,113,114,0 +block_hint,ArrayPrototypeSplice,115,116,0 +block_hint,ArrayPrototypeSplice,182,183,0 +block_hint,ArrayPrototypeSplice,63,64,1 +block_hint,ArrayPrototypeSplice,131,132,1 +block_hint,ArrayPrototypeSplice,296,297,0 +block_hint,ArrayPrototypeSplice,71,72,1 +block_hint,ArrayPrototypeUnshift,186,187,1 +block_hint,ArrayPrototypeUnshift,157,158,1 +block_hint,ArrayPrototypeUnshift,141,142,1 +block_hint,ArrayPrototypeUnshift,96,97,1 +block_hint,ArrayPrototypeUnshift,55,56,1 +block_hint,ArrayPrototypeUnshift,10,11,1 +block_hint,ArrayPrototypeUnshift,128,129,1 +block_hint,ArrayPrototypeUnshift,98,99,0 +block_hint,ArrayPrototypeUnshift,57,58,0 +block_hint,ArrayPrototypeUnshift,100,101,1 +block_hint,ArrayPrototypeUnshift,59,60,0 +block_hint,ArrayPrototypeUnshift,20,21,1 +block_hint,ArrayPrototypeUnshift,22,23,0 +block_hint,ArrayBufferPrototypeGetByteLength,15,16,1 +block_hint,ArrayBufferPrototypeGetByteLength,10,11,1 +block_hint,ArrayBufferPrototypeGetByteLength,12,13,1 +block_hint,ArrayBufferPrototypeGetByteLength,6,7,0 +block_hint,ArrayBufferPrototypeGetByteLength,4,5,0 +block_hint,ArrayBufferIsView,8,9,1 +block_hint,ArrayBufferIsView,5,6,1 +block_hint,ArrayBufferIsView,3,4,1 +block_hint,ToInteger,4,5,1 +block_hint,ToInteger,6,7,0 +block_hint,BooleanConstructor,81,82,1 +block_hint,BooleanConstructor,74,75,0 +block_hint,BooleanConstructor,57,58,0 +block_hint,BooleanConstructor,68,69,1 +block_hint,BooleanConstructor,59,60,0 +block_hint,BooleanConstructor,70,71,0 +block_hint,BooleanConstructor,51,52,0 +block_hint,BooleanConstructor,7,8,1 +block_hint,ToString,20,21,0 +block_hint,ToString,34,35,0 +block_hint,ToString,67,68,0 +block_hint,ToString,83,84,0 +block_hint,ToString,25,26,1 +block_hint,ToString,50,51,1 +block_hint,ToString,54,55,0 +block_hint,StringPrototypeToString,9,10,1 +block_hint,StringPrototypeToString,11,12,1 +block_hint,StringPrototypeToString,7,8,0 +block_hint,StringPrototypeToString,5,6,1 +block_hint,StringPrototypeValueOf,9,10,1 +block_hint,StringPrototypeValueOf,11,12,1 +block_hint,StringPrototypeValueOf,5,6,1 +block_hint,StringPrototypeCharAt,51,52,1 +block_hint,StringPrototypeCharAt,37,38,1 +block_hint,StringPrototypeCharAt,28,29,1 +block_hint,StringPrototypeCharAt,33,34,0 +block_hint,StringPrototypeCharAt,12,13,0 +block_hint,StringPrototypeCharAt,14,15,0 +block_hint,StringPrototypeCharAt,19,20,1 +block_hint,StringPrototypeCharAt,43,44,0 +block_hint,StringPrototypeCharAt,6,7,1 +block_hint,StringPrototypeCharCodeAt,46,47,1 +block_hint,StringPrototypeCharCodeAt,41,42,1 +block_hint,StringPrototypeCharCodeAt,28,29,1 +block_hint,StringPrototypeCharCodeAt,39,40,0 +block_hint,StringPrototypeCharCodeAt,13,14,0 +block_hint,StringPrototypeCharCodeAt,15,16,0 +block_hint,StringPrototypeCharCodeAt,17,18,1 +block_hint,StringPrototypeCharCodeAt,32,33,0 +block_hint,StringPrototypeCodePointAt,79,80,1 +block_hint,StringPrototypeCodePointAt,53,54,1 +block_hint,StringPrototypeCodePointAt,43,44,1 +block_hint,StringPrototypeCodePointAt,51,52,0 +block_hint,StringPrototypeCodePointAt,20,21,0 +block_hint,StringPrototypeCodePointAt,22,23,0 +block_hint,StringPrototypeCodePointAt,8,9,0 +block_hint,StringPrototypeCodePointAt,65,66,0 +block_hint,StringPrototypeCodePointAt,45,46,0 +block_hint,StringPrototypeCodePointAt,14,15,1 +block_hint,StringPrototypeCodePointAt,16,17,1 +block_hint,StringPrototypeCodePointAt,10,11,0 +block_hint,StringPrototypeCodePointAt,72,73,0 +block_hint,StringPrototypeCodePointAt,48,49,0 +block_hint,StringPrototypeCodePointAt,18,19,1 +block_hint,StringConstructor,65,66,1 +block_hint,StringConstructor,49,50,1 +block_hint,StringConstructor,63,64,0 +block_hint,StringConstructor,36,37,0 +block_hint,StringConstructor,78,79,0 +block_hint,StringConstructor,83,84,1 +block_hint,StringConstructor,81,82,1 +block_hint,StringConstructor,75,76,1 +block_hint,StringConstructor,59,60,0 +block_hint,StringConstructor,61,62,1 +block_hint,StringConstructor,45,46,0 +block_hint,StringConstructor,24,25,0 +block_hint,StringConstructor,26,27,1 +block_hint,StringAddConvertLeft,47,48,1 +block_hint,StringAddConvertLeft,49,50,0 +block_hint,StringAddConvertLeft,82,83,1 +block_hint,StringAddConvertLeft,64,65,0 +block_hint,StringAddConvertLeft,43,44,0 +block_hint,StringAddConvertLeft,62,63,1 +block_hint,StringAddConvertRight,47,48,1 +block_hint,StringAddConvertRight,82,83,1 +block_hint,StringAddConvertRight,64,65,0 +block_hint,StringAddConvertRight,43,44,0 +block_hint,StringAddConvertRight,86,87,0 +block_hint,StringAddConvertRight,79,80,1 +block_hint,StringCharAt,27,28,0 +block_hint,StringCharAt,20,21,1 +block_hint,StringCharAt,5,6,1 +block_hint,FastNewFunctionContextFunction,11,12,1 +block_hint,FastNewFunctionContextFunction,4,5,1 +block_hint,FastNewFunctionContextFunction,6,7,0 +block_hint,CreateRegExpLiteral,6,7,0 +block_hint,CreateRegExpLiteral,8,9,1 +block_hint,CreateRegExpLiteral,10,11,1 +block_hint,CreateRegExpLiteral,2,3,1 +block_hint,CreateShallowArrayLiteral,20,21,1 +block_hint,CreateShallowArrayLiteral,22,23,1 +block_hint,CreateShallowArrayLiteral,35,36,1 +block_hint,CreateShallowArrayLiteral,11,12,0 +block_hint,CreateShallowArrayLiteral,43,44,1 +block_hint,CreateShallowArrayLiteral,39,40,1 +block_hint,CreateShallowArrayLiteral,24,25,0 +block_hint,CreateShallowArrayLiteral,13,14,0 +block_hint,CreateShallowArrayLiteral,15,16,1 +block_hint,CreateShallowArrayLiteral,46,47,1 +block_hint,CreateShallowArrayLiteral,48,49,0 +block_hint,CreateShallowArrayLiteral,30,31,1 +block_hint,CreateShallowArrayLiteral,5,6,1 +block_hint,CreateShallowArrayLiteral,18,19,1 +block_hint,CreateEmptyArrayLiteral,9,10,1 +block_hint,CreateEmptyArrayLiteral,3,4,1 +block_hint,CreateEmptyArrayLiteral,6,7,1 +block_hint,CreateShallowObjectLiteral,53,54,1 +block_hint,CreateShallowObjectLiteral,61,62,1 +block_hint,CreateShallowObjectLiteral,63,64,0 +block_hint,CreateShallowObjectLiteral,110,111,0 +block_hint,CreateShallowObjectLiteral,99,100,1 +block_hint,CreateShallowObjectLiteral,67,68,1 +block_hint,CreateShallowObjectLiteral,106,107,1 +block_hint,CreateShallowObjectLiteral,81,82,1 +block_hint,CreateShallowObjectLiteral,34,35,0 +block_hint,CreateShallowObjectLiteral,71,72,0 +block_hint,CreateShallowObjectLiteral,38,39,0 +block_hint,CreateShallowObjectLiteral,42,43,0 +block_hint,CreateShallowObjectLiteral,85,86,1 +block_hint,CreateShallowObjectLiteral,93,94,1 +block_hint,ObjectConstructor,27,28,1 +block_hint,ObjectConstructor,19,20,1 +block_hint,ObjectConstructor,29,30,0 +block_hint,ObjectConstructor,23,24,0 +block_hint,ObjectConstructor,17,18,0 +block_hint,ObjectConstructor,11,12,0 +block_hint,ObjectConstructor,4,5,1 +block_hint,ObjectConstructor,21,22,1 +block_hint,ObjectConstructor,6,7,0 +block_hint,CreateEmptyLiteralObject,4,5,1 +block_hint,CreateEmptyLiteralObject,11,12,1 +block_hint,CreateEmptyLiteralObject,6,7,0 +block_hint,NumberConstructor,18,19,1 +block_hint,NumberConstructor,6,7,1 +block_hint,NumberConstructor,28,29,0 +block_hint,NumberConstructor,12,13,0 +block_hint,NumberConstructor,34,35,0 +block_hint,NumberConstructor,32,33,1 +block_hint,NumberConstructor,30,31,1 +block_hint,NumberConstructor,2,3,1 +block_hint,NonNumberToNumber,14,15,0 +block_hint,NonNumberToNumber,3,4,1 +block_hint,NonNumberToNumeric,17,18,0 +block_hint,NonNumberToNumeric,14,15,0 +block_hint,NonNumberToNumeric,5,6,1 +block_hint,ToNumeric,5,6,1 +block_hint,ToNumeric,3,4,1 +block_hint,NumberToString,69,70,0 +block_hint,NumberToString,20,21,1 +block_hint,NumberToString,45,46,1 +block_hint,NumberToString,41,42,1 +block_hint,ToBoolean,18,19,1 +block_hint,ToBoolean,14,15,0 +block_hint,ToBoolean,20,21,0 +block_hint,ToBoolean,6,7,0 +block_hint,ToBooleanForBaselineJump,14,15,0 +block_hint,ToBooleanForBaselineJump,20,21,0 +block_hint,ToBooleanForBaselineJump,6,7,0 +block_hint,ToLength,19,20,0 +block_hint,ToLength,5,6,0 +block_hint,ToName,40,41,1 +block_hint,ToName,48,49,0 +block_hint,ToName,20,21,0 +block_hint,ToName,22,23,0 +block_hint,ToName,67,68,0 +block_hint,ToName,27,28,1 +block_hint,ToObject,45,46,1 +block_hint,ToObject,7,8,0 +block_hint,ToObject,38,39,1 +block_hint,ToObject,9,10,1 +block_hint,ToObject,53,54,0 +block_hint,ToObject,55,56,1 +block_hint,ToObject,48,49,0 +block_hint,ToObject,26,27,0 +block_hint,ToObject,28,29,1 +block_hint,NonPrimitiveToPrimitive_Default,5,6,1 +block_hint,NonPrimitiveToPrimitive_Number,5,6,1 +block_hint,NonPrimitiveToPrimitive_String,5,6,1 +block_hint,OrdinaryToPrimitive_Number,56,57,1 +block_hint,OrdinaryToPrimitive_Number,53,54,1 +block_hint,OrdinaryToPrimitive_Number,40,41,1 +block_hint,OrdinaryToPrimitive_Number,42,43,0 +block_hint,OrdinaryToPrimitive_Number,28,29,0 +block_hint,OrdinaryToPrimitive_Number,12,13,0 +block_hint,OrdinaryToPrimitive_Number,30,31,0 +block_hint,OrdinaryToPrimitive_Number,32,33,0 +block_hint,OrdinaryToPrimitive_Number,14,15,0 +block_hint,OrdinaryToPrimitive_Number,16,17,0 +block_hint,OrdinaryToPrimitive_Number,44,45,1 +block_hint,OrdinaryToPrimitive_Number,46,47,1 +block_hint,OrdinaryToPrimitive_Number,48,49,1 +block_hint,OrdinaryToPrimitive_Number,50,51,0 +block_hint,OrdinaryToPrimitive_Number,34,35,0 +block_hint,OrdinaryToPrimitive_Number,20,21,0 +block_hint,OrdinaryToPrimitive_String,56,57,1 +block_hint,OrdinaryToPrimitive_String,53,54,1 +block_hint,OrdinaryToPrimitive_String,40,41,1 +block_hint,OrdinaryToPrimitive_String,42,43,0 +block_hint,OrdinaryToPrimitive_String,28,29,0 +block_hint,OrdinaryToPrimitive_String,10,11,0 +block_hint,DataViewPrototypeGetByteLength,37,38,1 +block_hint,DataViewPrototypeGetByteLength,19,20,1 +block_hint,DataViewPrototypeGetByteLength,21,22,1 +block_hint,DataViewPrototypeGetByteLength,39,40,0 +block_hint,DataViewPrototypeGetByteLength,33,34,0 +block_hint,DataViewPrototypeGetByteLength,12,13,0 +block_hint,DataViewPrototypeGetByteLength,10,11,0 +block_hint,DataViewPrototypeGetFloat64,101,102,1 +block_hint,DataViewPrototypeGetFloat64,87,88,0 +block_hint,DataViewPrototypeGetFloat64,56,57,0 +block_hint,DataViewPrototypeGetFloat64,17,18,1 +block_hint,DataViewPrototypeGetFloat64,19,20,1 +block_hint,DataViewPrototypeGetFloat64,95,96,0 +block_hint,DataViewPrototypeGetFloat64,99,100,0 +block_hint,DataViewPrototypeGetFloat64,78,79,0 +block_hint,DataViewPrototypeGetFloat64,49,50,0 +block_hint,DataViewPrototypeGetFloat64,70,71,0 +block_hint,DataViewPrototypeGetFloat64,89,90,1 +block_hint,DataViewPrototypeGetFloat64,72,73,0 +block_hint,DataViewPrototypeGetFloat64,74,75,0 +block_hint,DataViewPrototypeGetFloat64,91,92,0 +block_hint,DataViewPrototypeGetFloat64,64,65,0 +block_hint,DataViewPrototypeGetFloat64,21,22,0 +block_hint,DataViewPrototypeGetFloat64,97,98,0 +block_hint,DataViewPrototypeGetFloat64,82,83,0 +block_hint,DataViewPrototypeGetFloat64,47,48,0 +block_hint,DataViewPrototypeGetFloat64,35,36,0 +block_hint,DataViewPrototypeGetFloat64,37,38,0 +block_hint,DataViewPrototypeGetFloat64,85,86,1 +block_hint,DataViewPrototypeGetFloat64,54,55,0 +block_hint,DataViewPrototypeGetFloat64,14,15,1 +block_hint,DataViewPrototypeSetFloat64,116,117,1 +block_hint,DataViewPrototypeSetFloat64,104,105,0 +block_hint,DataViewPrototypeSetFloat64,82,83,0 +block_hint,DataViewPrototypeSetFloat64,49,50,0 +block_hint,DataViewPrototypeSetFloat64,16,17,1 +block_hint,DataViewPrototypeSetFloat64,18,19,1 +block_hint,DataViewPrototypeSetFloat64,106,107,0 +block_hint,DataViewPrototypeSetFloat64,95,96,0 +block_hint,DataViewPrototypeSetFloat64,71,72,0 +block_hint,DataViewPrototypeSetFloat64,42,43,0 +block_hint,DataViewPrototypeSetFloat64,84,85,1 +block_hint,DataViewPrototypeSetFloat64,86,87,1 +block_hint,DataViewPrototypeSetFloat64,59,60,1 +block_hint,DataViewPrototypeSetFloat64,10,11,0 +block_hint,DataViewPrototypeSetFloat64,93,94,0 +block_hint,DataViewPrototypeSetFloat64,79,80,0 +block_hint,DataViewPrototypeSetFloat64,40,41,0 +block_hint,DataViewPrototypeSetFloat64,34,35,0 +block_hint,DataViewPrototypeSetFloat64,36,37,0 +block_hint,DataViewPrototypeSetFloat64,47,48,1 +block_hint,DataViewPrototypeSetFloat64,14,15,0 +block_hint,FunctionPrototypeHasInstance,35,36,1 +block_hint,FunctionPrototypeHasInstance,15,16,1 +block_hint,FunctionPrototypeHasInstance,17,18,1 +block_hint,FunctionPrototypeHasInstance,19,20,1 +block_hint,FunctionPrototypeHasInstance,33,34,1 +block_hint,FunctionPrototypeHasInstance,23,24,0 +block_hint,FunctionPrototypeHasInstance,13,14,0 +block_hint,FunctionPrototypeHasInstance,31,32,0 +block_hint,FunctionPrototypeHasInstance,25,26,0 +block_hint,FunctionPrototypeHasInstance,27,28,0 +block_hint,FastFunctionPrototypeBind,91,92,1 +block_hint,FastFunctionPrototypeBind,88,89,1 +block_hint,FastFunctionPrototypeBind,75,76,0 +block_hint,FastFunctionPrototypeBind,29,30,0 +block_hint,FastFunctionPrototypeBind,31,32,0 +block_hint,FastFunctionPrototypeBind,7,8,1 +block_hint,FastFunctionPrototypeBind,53,54,1 +block_hint,FastFunctionPrototypeBind,65,66,0 +block_hint,FastFunctionPrototypeBind,69,70,1 +block_hint,FastFunctionPrototypeBind,41,42,1 +block_hint,FastFunctionPrototypeBind,9,10,1 +block_hint,FastFunctionPrototypeBind,56,57,1 +block_hint,FastFunctionPrototypeBind,67,68,0 +block_hint,FastFunctionPrototypeBind,79,80,1 +block_hint,FastFunctionPrototypeBind,71,72,1 +block_hint,FastFunctionPrototypeBind,43,44,1 +block_hint,FastFunctionPrototypeBind,11,12,1 +block_hint,FastFunctionPrototypeBind,35,36,1 +block_hint,FastFunctionPrototypeBind,81,82,1 +block_hint,FastFunctionPrototypeBind,73,74,0 +block_hint,FastFunctionPrototypeBind,27,28,1 +block_hint,ForInNext,2,3,1 +block_hint,ForInNext,7,8,1 +block_hint,CallIteratorWithFeedback,56,57,1 +block_hint,CallIteratorWithFeedback,58,59,1 +block_hint,CallIteratorWithFeedback,26,27,1 +block_hint,CallIteratorWithFeedback,28,29,1 +block_hint,CallIteratorWithFeedback,30,31,1 +block_hint,CallIteratorWithFeedback,10,11,1 +block_hint,MathAbs,14,15,1 +block_hint,MathAbs,16,17,1 +block_hint,MathAbs,23,24,0 +block_hint,MathAbs,9,10,0 +block_hint,MathAbs,11,12,1 +block_hint,MathCeil,12,13,1 +block_hint,MathFloor,12,13,1 +block_hint,MathFloor,14,15,1 +block_hint,MathFloor,35,36,1 +block_hint,MathFloor,25,26,0 +block_hint,MathFloor,21,22,1 +block_hint,MathFloor,19,20,0 +block_hint,MathFloor,7,8,0 +block_hint,MathRound,12,13,1 +block_hint,MathRound,14,15,1 +block_hint,MathRound,32,33,0 +block_hint,MathRound,36,37,0 +block_hint,MathRound,28,29,0 +block_hint,MathRound,21,22,1 +block_hint,MathRound,7,8,0 +block_hint,MathRound,9,10,1 +block_hint,MathPow,12,13,1 +block_hint,MathPow,14,15,1 +block_hint,MathPow,18,19,1 +block_hint,MathPow,23,24,0 +block_hint,MathPow,7,8,0 +block_hint,MathPow,9,10,1 +block_hint,MathMax,13,14,1 +block_hint,MathMax,19,20,0 +block_hint,MathMax,17,18,1 +block_hint,MathMax,24,25,0 +block_hint,MathMax,8,9,0 +block_hint,MathMax,10,11,1 +block_hint,MathMin,13,14,1 +block_hint,MathMin,19,20,0 +block_hint,MathMin,17,18,1 +block_hint,MathMin,24,25,0 +block_hint,MathMin,8,9,0 +block_hint,MathMin,10,11,1 +block_hint,MathAtan2,34,35,1 +block_hint,MathAtan2,32,33,1 +block_hint,MathAtan2,23,24,1 +block_hint,MathAtan2,5,6,1 +block_hint,MathCos,25,26,1 +block_hint,MathCos,23,24,1 +block_hint,MathCos,9,10,1 +block_hint,MathCos,3,4,0 +block_hint,MathCos,5,6,1 +block_hint,MathExp,25,26,1 +block_hint,MathExp,20,21,1 +block_hint,MathExp,23,24,1 +block_hint,MathExp,16,17,1 +block_hint,MathExp,13,14,0 +block_hint,MathExp,5,6,1 +block_hint,MathFround,25,26,1 +block_hint,MathFround,23,24,1 +block_hint,MathFround,5,6,1 +block_hint,MathLog,25,26,1 +block_hint,MathLog,23,24,1 +block_hint,MathLog,13,14,0 +block_hint,MathLog,5,6,1 +block_hint,MathSin,25,26,1 +block_hint,MathSin,23,24,1 +block_hint,MathSin,9,10,0 +block_hint,MathSin,11,12,0 +block_hint,MathSin,3,4,0 +block_hint,MathSin,5,6,1 +block_hint,MathSign,16,17,1 +block_hint,MathSign,11,12,0 +block_hint,MathSign,7,8,0 +block_hint,MathSign,2,3,0 +block_hint,MathSign,4,5,1 +block_hint,MathSqrt,25,26,1 +block_hint,MathSqrt,23,24,1 +block_hint,MathSqrt,11,12,0 +block_hint,MathSqrt,3,4,0 +block_hint,MathSqrt,5,6,1 +block_hint,MathTan,25,26,1 +block_hint,MathTan,20,21,0 +block_hint,MathTan,16,17,0 +block_hint,MathTanh,25,26,1 +block_hint,MathTanh,20,21,1 +block_hint,MathTanh,23,24,1 +block_hint,MathTanh,16,17,1 +block_hint,MathTanh,13,14,0 +block_hint,MathTanh,5,6,1 +block_hint,MathRandom,15,16,1 +block_hint,MathRandom,3,4,1 +block_hint,MathRandom,17,18,1 +block_hint,MathRandom,5,6,1 +block_hint,MathRandom,7,8,1 +block_hint,MathRandom,9,10,1 +block_hint,MathRandom,13,14,1 +block_hint,NumberPrototypeToString,71,72,1 +block_hint,NumberPrototypeToString,113,114,0 +block_hint,NumberPrototypeToString,51,52,0 +block_hint,NumberPrototypeToString,59,60,1 +block_hint,NumberPrototypeToString,183,184,0 +block_hint,NumberPrototypeToString,154,155,0 +block_hint,NumberPrototypeToString,121,122,0 +block_hint,NumberPrototypeToString,180,181,0 +block_hint,NumberPrototypeToString,167,168,0 +block_hint,NumberPrototypeToString,85,86,0 +block_hint,NumberPrototypeToString,176,177,0 +block_hint,NumberPrototypeToString,97,98,0 +block_hint,NumberPrototypeToString,171,172,0 +block_hint,NumberPrototypeToString,129,130,0 +block_hint,NumberPrototypeToString,109,110,1 +block_hint,NumberPrototypeToString,42,43,1 +block_hint,NumberPrototypeToString,49,50,1 +block_hint,NumberPrototypeToString,73,74,0 +block_hint,NumberPrototypeToString,27,28,0 +block_hint,NumberPrototypeToString,116,117,1 +block_hint,NumberPrototypeToString,75,76,1 +block_hint,NumberPrototypeToString,29,30,1 +block_hint,NumberPrototypeToString,95,96,0 +block_hint,NumberPrototypeToString,111,112,0 +block_hint,NumberPrototypeToString,35,36,1 +block_hint,NumberPrototypeToString,132,133,1 +block_hint,NumberPrototypeToString,37,38,0 +block_hint,NumberPrototypeToString,134,135,1 +block_hint,NumberPrototypeToString,39,40,0 +block_hint,NumberPrototypeToString,162,163,1 +block_hint,NumberPrototypeToString,164,165,0 +block_hint,NumberPrototypeToString,139,140,0 +block_hint,NumberPrototypeToString,105,106,1 +block_hint,NumberIsInteger,13,14,1 +block_hint,NumberParseFloat,14,15,1 +block_hint,NumberParseFloat,2,3,1 +block_hint,NumberParseFloat,12,13,0 +block_hint,NumberParseFloat,17,18,0 +block_hint,NumberParseFloat,4,5,1 +block_hint,ParseInt,27,28,1 +block_hint,ParseInt,13,14,0 +block_hint,ParseInt,6,7,1 +block_hint,ParseInt,31,32,0 +block_hint,ParseInt,25,26,1 +block_hint,ParseInt,23,24,1 +block_hint,ParseInt,10,11,0 +block_hint,NumberParseInt,3,4,1 +block_hint,Add,66,67,1 +block_hint,Add,24,25,0 +block_hint,Add,52,53,1 +block_hint,Add,68,69,0 +block_hint,Add,35,36,0 +block_hint,Add,40,41,0 +block_hint,Add,70,71,1 +block_hint,Add,26,27,0 +block_hint,Add,29,30,1 +block_hint,Subtract,24,25,0 +block_hint,Subtract,9,10,0 +block_hint,Subtract,22,23,0 +block_hint,Subtract,7,8,0 +block_hint,Divide,50,51,0 +block_hint,Divide,23,24,0 +block_hint,Divide,9,10,0 +block_hint,Divide,44,45,1 +block_hint,Divide,48,49,1 +block_hint,Divide,33,34,0 +block_hint,Divide,7,8,1 +block_hint,CreateObjectWithoutProperties,52,53,1 +block_hint,CreateObjectWithoutProperties,42,43,1 +block_hint,CreateObjectWithoutProperties,34,35,0 +block_hint,CreateObjectWithoutProperties,17,18,1 +block_hint,CreateObjectWithoutProperties,56,57,0 +block_hint,CreateObjectWithoutProperties,44,45,0 +block_hint,CreateObjectWithoutProperties,48,49,1 +block_hint,CreateObjectWithoutProperties,36,37,0 +block_hint,CreateObjectWithoutProperties,38,39,0 +block_hint,CreateObjectWithoutProperties,5,6,1 +block_hint,CreateObjectWithoutProperties,40,41,1 +block_hint,CreateObjectWithoutProperties,7,8,1 +block_hint,CreateObjectWithoutProperties,9,10,1 +block_hint,CreateObjectWithoutProperties,11,12,1 +block_hint,CreateObjectWithoutProperties,13,14,1 +block_hint,CreateObjectWithoutProperties,15,16,1 +block_hint,CreateObjectWithoutProperties,20,21,0 +block_hint,CreateObjectWithoutProperties,50,51,1 +block_hint,ObjectGetPrototypeOf,11,12,1 +block_hint,ObjectGetPrototypeOf,8,9,1 +block_hint,ObjectGetPrototypeOf,5,6,1 +block_hint,ObjectGetPrototypeOf,2,3,0 +block_hint,ObjectSetPrototypeOf,18,19,1 +block_hint,ObjectSetPrototypeOf,4,5,0 +block_hint,ObjectSetPrototypeOf,13,14,1 +block_hint,ObjectSetPrototypeOf,20,21,0 +block_hint,ObjectSetPrototypeOf,15,16,0 +block_hint,ObjectSetPrototypeOf,6,7,1 +block_hint,ObjectSetPrototypeOf,8,9,0 +block_hint,ObjectSetPrototypeOf,10,11,0 +block_hint,ObjectPrototypeToString,3,4,1 +block_hint,ObjectPrototypeValueOf,8,9,1 +block_hint,ObjectPrototypeValueOf,5,6,1 +block_hint,ObjectPrototypeValueOf,2,3,1 +block_hint,FulfillPromise,32,33,1 +block_hint,FulfillPromise,15,16,0 +block_hint,FulfillPromise,34,35,1 +block_hint,FulfillPromise,17,18,0 +block_hint,FulfillPromise,19,20,1 +block_hint,FulfillPromise,21,22,0 +block_hint,PerformPromiseThen,101,102,1 +block_hint,PerformPromiseThen,57,58,0 +block_hint,PerformPromiseThen,103,104,1 +block_hint,PerformPromiseThen,59,60,0 +block_hint,PerformPromiseThen,61,62,1 +block_hint,PerformPromiseThen,63,64,0 +block_hint,PerformPromiseThen,18,19,1 +block_hint,PerformPromiseThen,72,73,1 +block_hint,PerformPromiseThen,25,26,1 +block_hint,PerformPromiseThen,93,94,1 +block_hint,PerformPromiseThen,45,46,0 +block_hint,PerformPromiseThen,95,96,1 +block_hint,PerformPromiseThen,47,48,0 +block_hint,PerformPromiseThen,49,50,1 +block_hint,PerformPromiseThen,51,52,0 +block_hint,PerformPromiseThen,20,21,1 +block_hint,PerformPromiseThen,115,116,1 +block_hint,PromiseFulfillReactionJob,22,23,0 +block_hint,PromiseFulfillReactionJob,2,3,1 +block_hint,ResolvePromise,29,30,0 +block_hint,ResolvePromise,31,32,0 +block_hint,ResolvePromise,15,16,1 +block_hint,ResolvePromise,47,48,0 +block_hint,ResolvePromise,33,34,0 +block_hint,ResolvePromise,6,7,1 +block_hint,ResolvePromise,17,18,0 +block_hint,ResolvePromise,19,20,1 +block_hint,ResolvePromise,53,54,1 +block_hint,ResolvePromise,49,50,0 +block_hint,ResolvePromise,23,24,0 +block_hint,ProxyConstructor,30,31,1 +block_hint,ProxyConstructor,10,11,0 +block_hint,ProxyConstructor,22,23,1 +block_hint,ProxyConstructor,24,25,0 +block_hint,ProxyConstructor,26,27,1 +block_hint,ProxyConstructor,28,29,0 +block_hint,ProxyConstructor,7,8,1 +block_hint,ProxyConstructor,17,18,1 +block_hint,ProxyConstructor,5,6,1 +block_hint,ProxyConstructor,12,13,1 +block_hint,ProxyGetProperty,153,154,1 +block_hint,ProxyGetProperty,34,35,0 +block_hint,ProxyGetProperty,10,11,0 +block_hint,ProxyGetProperty,89,90,0 +block_hint,ProxyGetProperty,91,92,0 +block_hint,ProxyGetProperty,85,86,1 +block_hint,ProxyGetProperty,87,88,1 +block_hint,ProxyGetProperty,176,177,1 +block_hint,ProxyGetProperty,180,181,0 +block_hint,ProxyGetProperty,118,119,0 +block_hint,ProxyGetProperty,40,41,1 +block_hint,ProxyGetProperty,114,115,1 +block_hint,ProxyGetProperty,24,25,1 +block_hint,ProxyGetProperty,26,27,0 +block_hint,ProxyGetProperty,208,209,1 +block_hint,ProxyGetProperty,198,199,0 +block_hint,ProxyGetProperty,149,150,1 +block_hint,ProxyGetProperty,28,29,0 +block_hint,ProxyGetProperty,167,168,0 +block_hint,ProxyGetProperty,187,188,1 +block_hint,ProxyGetProperty,131,132,1 +block_hint,ProxyGetProperty,169,170,1 +block_hint,ProxyGetProperty,171,172,0 +block_hint,ProxyGetProperty,60,61,0 +block_hint,ReflectGet,20,21,1 +block_hint,ReflectGet,15,16,0 +block_hint,ReflectGet,5,6,1 +block_hint,ReflectGet,7,8,0 +block_hint,ReflectGet,18,19,0 +block_hint,ReflectGet,9,10,0 +block_hint,ReflectHas,8,9,1 +block_hint,ReflectHas,5,6,1 +block_hint,ReflectHas,3,4,0 +block_hint,RegExpPrototypeExec,204,205,1 +block_hint,RegExpPrototypeExec,130,131,1 +block_hint,RegExpPrototypeExec,132,133,1 +block_hint,RegExpPrototypeExec,206,207,1 +block_hint,RegExpPrototypeExec,166,167,1 +block_hint,RegExpPrototypeExec,16,17,1 +block_hint,RegExpPrototypeExec,148,149,1 +block_hint,RegExpPrototypeExec,150,151,0 +block_hint,RegExpPrototypeExec,152,153,0 +block_hint,RegExpPrototypeExec,227,228,0 +block_hint,RegExpPrototypeExec,213,214,0 +block_hint,RegExpPrototypeExec,154,155,0 +block_hint,RegExpPrototypeExec,18,19,1 +block_hint,RegExpPrototypeExec,185,186,0 +block_hint,RegExpPrototypeExec,134,135,0 +block_hint,RegExpPrototypeExec,159,160,0 +block_hint,RegExpPrototypeExec,118,119,1 +block_hint,RegExpPrototypeExec,242,243,0 +block_hint,RegExpPrototypeExec,257,258,1 +block_hint,RegExpPrototypeExec,233,234,1 +block_hint,RegExpPrototypeExec,222,223,1 +block_hint,RegExpPrototypeExec,171,172,0 +block_hint,RegExpPrototypeExec,161,162,0 +block_hint,RegExpPrototypeExec,73,74,1 +block_hint,RegExpPrototypeExec,24,25,1 +block_hint,RegExpPrototypeExec,138,139,0 +block_hint,RegExpPrototypeExec,26,27,1 +block_hint,RegExpPrototypeExec,190,191,1 +block_hint,RegExpPrototypeExec,140,141,1 +block_hint,RegExpPrototypeExec,248,249,1 +block_hint,RegExpPrototypeExec,217,218,0 +block_hint,RegExpPrototypeExec,179,180,0 +block_hint,RegExpPrototypeExec,77,78,0 +block_hint,RegExpPrototypeExec,34,35,1 +block_hint,RegExpPrototypeExec,144,145,1 +block_hint,RegExpPrototypeExec,116,117,0 +block_hint,RegExpPrototypeExec,156,157,0 +block_hint,RegExpMatchFast,359,360,0 +block_hint,RegExpMatchFast,289,290,0 +block_hint,RegExpMatchFast,32,33,1 +block_hint,RegExpMatchFast,326,327,0 +block_hint,RegExpMatchFast,234,235,0 +block_hint,RegExpMatchFast,283,284,0 +block_hint,RegExpMatchFast,460,461,0 +block_hint,RegExpMatchFast,442,443,1 +block_hint,RegExpMatchFast,415,416,1 +block_hint,RegExpMatchFast,291,292,0 +block_hint,RegExpMatchFast,285,286,0 +block_hint,RegExpMatchFast,129,130,1 +block_hint,RegExpMatchFast,236,237,0 +block_hint,RegExpMatchFast,238,239,0 +block_hint,RegExpMatchFast,40,41,1 +block_hint,RegExpMatchFast,331,332,1 +block_hint,RegExpMatchFast,240,241,1 +block_hint,RegExpMatchFast,468,469,1 +block_hint,RegExpMatchFast,396,397,0 +block_hint,RegExpMatchFast,320,321,0 +block_hint,RegExpMatchFast,133,134,0 +block_hint,RegExpMatchFast,48,49,1 +block_hint,RegExpMatchFast,244,245,1 +block_hint,RegExpMatchFast,180,181,0 +block_hint,RegExpMatchFast,259,260,0 +block_hint,RegExpMatchFast,297,298,0 +block_hint,RegExpMatchFast,82,83,1 +block_hint,RegExpMatchFast,84,85,1 +block_hint,RegExpMatchFast,301,302,1 +block_hint,RegExpMatchFast,346,347,0 +block_hint,RegExpMatchFast,444,445,0 +block_hint,RegExpMatchFast,401,402,0 +block_hint,RegExpMatchFast,299,300,0 +block_hint,RegExpMatchFast,86,87,1 +block_hint,RegExpMatchFast,340,341,0 +block_hint,RegExpMatchFast,248,249,0 +block_hint,RegExpMatchFast,275,276,0 +block_hint,RegExpMatchFast,190,191,1 +block_hint,RegExpMatchFast,462,463,0 +block_hint,RegExpMatchFast,480,481,1 +block_hint,RegExpMatchFast,448,449,1 +block_hint,RegExpMatchFast,413,414,1 +block_hint,RegExpMatchFast,303,304,0 +block_hint,RegExpMatchFast,277,278,0 +block_hint,RegExpMatchFast,117,118,1 +block_hint,RegExpMatchFast,342,343,0 +block_hint,RegExpMatchFast,250,251,1 +block_hint,RegExpMatchFast,92,93,1 +block_hint,RegExpMatchFast,366,367,1 +block_hint,RegExpMatchFast,252,253,0 +block_hint,RegExpMatchFast,102,103,1 +block_hint,RegExpMatchFast,306,307,1 +block_hint,RegExpMatchFast,177,178,0 +block_hint,RegExpMatchFast,104,105,0 +block_hint,RegExpMatchFast,106,107,0 +block_hint,RegExpMatchFast,198,199,1 +block_hint,RegExpMatchFast,317,318,0 +block_hint,RegExpMatchFast,108,109,1 +block_hint,RegExpMatchFast,187,188,1 +block_hint,RegExpMatchFast,348,349,1 +block_hint,RegExpMatchFast,94,95,1 +block_hint,RegExpMatchFast,96,97,1 +block_hint,RegExpMatchFast,175,176,0 +block_hint,RegExpMatchFast,98,99,0 +block_hint,RegExpMatchFast,100,101,0 +block_hint,RegExpMatchFast,218,219,1 +block_hint,RegExpMatchFast,309,310,0 +block_hint,RegExpMatchFast,220,221,0 +block_hint,RegExpReplace,263,264,1 +block_hint,RegExpReplace,298,299,1 +block_hint,RegExpReplace,251,252,1 +block_hint,RegExpReplace,149,150,0 +block_hint,RegExpReplace,22,23,1 +block_hint,RegExpReplace,209,210,1 +block_hint,RegExpReplace,151,152,0 +block_hint,RegExpReplace,24,25,1 +block_hint,RegExpReplace,211,212,1 +block_hint,RegExpReplace,213,214,1 +block_hint,RegExpReplace,172,173,1 +block_hint,RegExpReplace,179,180,0 +block_hint,RegExpReplace,259,260,0 +block_hint,RegExpReplace,349,350,0 +block_hint,RegExpReplace,50,51,1 +block_hint,RegExpReplace,229,230,0 +block_hint,RegExpReplace,163,164,0 +block_hint,RegExpReplace,183,184,0 +block_hint,RegExpReplace,109,110,1 +block_hint,RegExpReplace,381,382,0 +block_hint,RegExpReplace,386,387,1 +block_hint,RegExpReplace,353,354,1 +block_hint,RegExpReplace,317,318,1 +block_hint,RegExpReplace,203,204,0 +block_hint,RegExpReplace,185,186,0 +block_hint,RegExpReplace,81,82,1 +block_hint,RegExpReplace,56,57,1 +block_hint,RegExpReplace,58,59,1 +block_hint,RegExpReplace,60,61,1 +block_hint,RegExpReplace,167,168,0 +block_hint,RegExpReplace,62,63,1 +block_hint,RegExpReplace,233,234,1 +block_hint,RegExpReplace,169,170,0 +block_hint,RegExpReplace,64,65,1 +block_hint,RegExpReplace,388,389,1 +block_hint,RegExpReplace,378,379,1 +block_hint,RegExpReplace,330,331,0 +block_hint,RegExpReplace,286,287,0 +block_hint,RegExpReplace,218,219,0 +block_hint,RegExpReplace,100,101,1 +block_hint,RegExpReplace,26,27,1 +block_hint,RegExpReplace,28,29,1 +block_hint,RegExpReplace,102,103,1 +block_hint,RegExpReplace,30,31,0 +block_hint,RegExpReplace,32,33,1 +block_hint,RegExpReplace,34,35,1 +block_hint,RegExpReplace,72,73,1 +block_hint,RegExpReplace,44,45,1 +block_hint,RegExpReplace,161,162,1 +block_hint,RegExpReplace,46,47,1 +block_hint,RegExpReplace,48,49,1 +block_hint,RegExpReplace,236,237,1 +block_hint,RegExpReplace,176,177,1 +block_hint,RegExpReplace,153,154,1 +block_hint,RegExpReplace,36,37,1 +block_hint,RegExpReplace,155,156,1 +block_hint,RegExpReplace,40,41,0 +block_hint,RegExpReplace,254,255,1 +block_hint,RegExpReplace,196,197,1 +block_hint,RegExpReplace,42,43,1 +block_hint,RegExpSearchFast,50,51,0 +block_hint,RegExpSearchFast,6,7,1 +block_hint,RegExpSearchFast,56,57,0 +block_hint,RegExpSearchFast,36,37,0 +block_hint,RegExpSearchFast,46,47,0 +block_hint,RegExpSearchFast,84,85,0 +block_hint,RegExpSearchFast,81,82,1 +block_hint,RegExpSearchFast,73,74,1 +block_hint,RegExpSearchFast,62,63,0 +block_hint,RegExpSearchFast,52,53,0 +block_hint,RegExpSearchFast,58,59,1 +block_hint,RegExpSearchFast,44,45,0 +block_hint,RegExpPrototypeSourceGetter,12,13,1 +block_hint,RegExpPrototypeSourceGetter,9,10,1 +block_hint,RegExpPrototypeSourceGetter,4,5,1 +block_hint,RegExpSplit,179,180,1 +block_hint,RegExpSplit,88,89,0 +block_hint,RegExpSplit,22,23,1 +block_hint,RegExpSplit,149,150,1 +block_hint,RegExpSplit,40,41,1 +block_hint,RegExpSplit,24,25,1 +block_hint,RegExpSplit,185,186,1 +block_hint,RegExpSplit,101,102,1 +block_hint,RegExpSplit,136,137,0 +block_hint,RegExpSplit,26,27,1 +block_hint,RegExpSplit,205,206,0 +block_hint,RegExpSplit,138,139,0 +block_hint,RegExpSplit,162,163,0 +block_hint,RegExpSplit,108,109,1 +block_hint,RegExpSplit,327,328,0 +block_hint,RegExpSplit,322,323,1 +block_hint,RegExpSplit,314,315,1 +block_hint,RegExpSplit,287,288,1 +block_hint,RegExpSplit,181,182,0 +block_hint,RegExpSplit,225,226,0 +block_hint,RegExpSplit,164,165,0 +block_hint,RegExpSplit,46,47,1 +block_hint,RegExpSplit,307,308,0 +block_hint,RegExpSplit,263,264,1 +block_hint,RegExpSplit,207,208,0 +block_hint,RegExpSplit,92,93,0 +block_hint,RegExpSplit,227,228,1 +block_hint,RegExpSplit,194,195,1 +block_hint,RegExpSplit,50,51,0 +block_hint,RegExpSplit,167,168,0 +block_hint,RegExpSplit,141,142,0 +block_hint,RegExpSplit,32,33,1 +block_hint,RegExpSplit,58,59,0 +block_hint,RegExpSplit,280,281,0 +block_hint,RegExpSplit,246,247,0 +block_hint,RegExpSplit,151,152,0 +block_hint,RegExpSplit,241,242,1 +block_hint,RegExpSplit,212,213,0 +block_hint,RegExpSplit,96,97,0 +block_hint,RegExpSplit,232,233,1 +block_hint,RegExpSplit,201,202,1 +block_hint,RegExpSplit,74,75,0 +block_hint,RegExpSplit,175,176,0 +block_hint,RegExpSplit,38,39,1 +block_hint,RegExpSplit,219,220,0 +block_hint,RegExpSplit,244,245,1 +block_hint,RegExpSplit,217,218,0 +block_hint,RegExpSplit,99,100,0 +block_hint,RegExpSplit,277,278,1 +block_hint,RegExpSplit,260,261,1 +block_hint,RegExpSplit,177,178,0 +block_hint,RegExpSplit,103,104,1 +block_hint,RegExpPrototypeTest,112,113,1 +block_hint,RegExpPrototypeTest,50,51,1 +block_hint,RegExpPrototypeTest,52,53,0 +block_hint,RegExpPrototypeTest,137,138,1 +block_hint,RegExpPrototypeTest,54,55,0 +block_hint,RegExpPrototypeTest,8,9,1 +block_hint,RegExpPrototypeTest,93,94,1 +block_hint,RegExpPrototypeTest,56,57,0 +block_hint,RegExpPrototypeTest,10,11,1 +block_hint,RegExpPrototypeTest,145,146,1 +block_hint,RegExpPrototypeTest,127,128,1 +block_hint,RegExpPrototypeTest,85,86,0 +block_hint,RegExpPrototypeTest,14,15,1 +block_hint,RegExpPrototypeTest,99,100,0 +block_hint,RegExpPrototypeTest,59,60,0 +block_hint,RegExpPrototypeTest,73,74,0 +block_hint,RegExpPrototypeTest,42,43,0 +block_hint,RegExpPrototypeTest,161,162,0 +block_hint,RegExpPrototypeTest,164,165,1 +block_hint,RegExpPrototypeTest,151,152,1 +block_hint,RegExpPrototypeTest,143,144,1 +block_hint,RegExpPrototypeTest,87,88,0 +block_hint,RegExpPrototypeTest,75,76,0 +block_hint,RegExpPrototypeTest,29,30,1 +block_hint,RegExpPrototypeTest,37,38,0 +block_hint,RegExpPrototypeTest,65,66,0 +block_hint,RegExpPrototypeTestFast,48,49,0 +block_hint,RegExpPrototypeTestFast,7,8,1 +block_hint,RegExpPrototypeTestFast,56,57,0 +block_hint,RegExpPrototypeTestFast,36,37,0 +block_hint,RegExpPrototypeTestFast,44,45,0 +block_hint,RegExpPrototypeTestFast,82,83,0 +block_hint,RegExpPrototypeTestFast,85,86,1 +block_hint,RegExpPrototypeTestFast,79,80,1 +block_hint,RegExpPrototypeTestFast,73,74,1 +block_hint,RegExpPrototypeTestFast,50,51,0 +block_hint,RegExpPrototypeTestFast,46,47,0 +block_hint,RegExpPrototypeTestFast,19,20,1 +block_hint,RegExpPrototypeTestFast,26,27,0 +block_hint,RegExpPrototypeTestFast,42,43,0 +block_hint,StringPrototypeEndsWith,288,289,1 +block_hint,StringPrototypeEndsWith,271,272,0 +block_hint,StringPrototypeEndsWith,251,252,1 +block_hint,StringPrototypeEndsWith,235,236,1 +block_hint,StringPrototypeEndsWith,174,175,1 +block_hint,StringPrototypeEndsWith,278,279,1 +block_hint,StringPrototypeEndsWith,267,268,1 +block_hint,StringPrototypeEndsWith,253,254,1 +block_hint,StringPrototypeEndsWith,244,245,1 +block_hint,StringPrototypeEndsWith,179,180,1 +block_hint,StringPrototypeEndsWith,29,30,0 +block_hint,StringPrototypeEndsWith,68,69,0 +block_hint,StringPrototypeEndsWith,70,71,0 +block_hint,StringPrototypeEndsWith,185,186,1 +block_hint,StringPrototypeEndsWith,84,85,0 +block_hint,StringPrototypeEndsWith,86,87,0 +block_hint,StringPrototypeEndsWith,164,165,0 +block_hint,StringPrototypeEndsWith,47,48,0 +block_hint,StringPrototypeEndsWith,144,145,0 +block_hint,StringPrototypeEndsWith,35,36,0 +block_hint,StringPrototypeEndsWith,49,50,0 +block_hint,StringPrototypeEndsWith,116,117,0 +block_hint,StringPrototypeIndexOf,39,40,1 +block_hint,StringPrototypeIndexOf,36,37,0 +block_hint,StringPrototypeIndexOf,19,20,1 +block_hint,StringPrototypeIndexOf,8,9,1 +block_hint,StringPrototypeIndexOf,28,29,1 +block_hint,StringPrototypeIndexOf,21,22,1 +block_hint,StringPrototypeIndexOf,33,34,0 +block_hint,StringPrototypeIndexOf,24,25,0 +block_hint,StringPrototypeIndexOf,11,12,0 +block_hint,StringPrototypeIterator,15,16,1 +block_hint,StringPrototypeIterator,12,13,1 +block_hint,StringPrototypeIterator,10,11,1 +block_hint,StringPrototypeIterator,3,4,1 +block_hint,StringPrototypeIterator,8,9,1 +block_hint,StringIteratorPrototypeNext,56,57,1 +block_hint,StringIteratorPrototypeNext,38,39,1 +block_hint,StringIteratorPrototypeNext,40,41,1 +block_hint,StringIteratorPrototypeNext,13,14,0 +block_hint,StringIteratorPrototypeNext,74,75,0 +block_hint,StringIteratorPrototypeNext,64,65,1 +block_hint,StringIteratorPrototypeNext,54,55,0 +block_hint,StringIteratorPrototypeNext,61,62,1 +block_hint,StringIteratorPrototypeNext,50,51,1 +block_hint,StringIteratorPrototypeNext,11,12,1 +block_hint,StringIteratorPrototypeNext,20,21,1 +block_hint,StringIteratorPrototypeNext,9,10,1 +block_hint,StringIteratorPrototypeNext,17,18,1 +block_hint,StringPrototypeMatch,67,68,1 +block_hint,StringPrototypeMatch,39,40,0 +block_hint,StringPrototypeMatch,99,100,1 +block_hint,StringPrototypeMatch,88,89,0 +block_hint,StringPrototypeMatch,69,70,1 +block_hint,StringPrototypeMatch,49,50,0 +block_hint,StringPrototypeMatch,6,7,1 +block_hint,StringPrototypeMatch,71,72,1 +block_hint,StringPrototypeMatch,51,52,0 +block_hint,StringPrototypeMatch,8,9,1 +block_hint,StringPrototypeMatch,83,84,1 +block_hint,StringPrototypeMatch,75,76,1 +block_hint,StringPrototypeMatch,43,44,1 +block_hint,StringPrototypeSearch,67,68,1 +block_hint,StringPrototypeSearch,39,40,0 +block_hint,StringPrototypeSearch,99,100,1 +block_hint,StringPrototypeSearch,88,89,0 +block_hint,StringPrototypeSearch,69,70,1 +block_hint,StringPrototypeSearch,49,50,0 +block_hint,StringPrototypeSearch,6,7,1 +block_hint,StringPrototypeSearch,71,72,1 +block_hint,StringPrototypeSearch,51,52,0 +block_hint,StringPrototypeSearch,8,9,1 +block_hint,StringPrototypeSearch,83,84,1 +block_hint,StringPrototypeSearch,75,76,1 +block_hint,StringPrototypeSearch,43,44,1 +block_hint,StringPrototypeSlice,167,168,1 +block_hint,StringPrototypeSlice,136,137,1 +block_hint,StringPrototypeSlice,103,104,1 +block_hint,StringPrototypeSlice,189,190,0 +block_hint,StringPrototypeSlice,175,176,0 +block_hint,StringPrototypeSlice,199,200,0 +block_hint,StringPrototypeSlice,196,197,0 +block_hint,StringPrototypeSlice,183,184,1 +block_hint,StringPrototypeSlice,179,180,1 +block_hint,StringPrototypeSlice,187,188,0 +block_hint,StringPrototypeSlice,170,171,0 +block_hint,StringPrototypeSlice,138,139,1 +block_hint,StringPrototypeSlice,31,32,0 +block_hint,StringPrototypeSlice,68,69,1 +block_hint,StringPrototypeSlice,63,64,1 +block_hint,StringPrototypeSlice,61,62,1 +block_hint,StringPrototypeSlice,124,125,0 +block_hint,StringPrototypeSlice,21,22,0 +block_hint,StringPrototypeSlice,23,24,0 +block_hint,StringPrototypeSlice,128,129,1 +block_hint,StringPrototypeSlice,115,116,1 +block_hint,StringPrototypeSlice,40,41,0 +block_hint,StringPrototypeSlice,19,20,0 +block_hint,StringPrototypeSlice,130,131,1 +block_hint,StringPrototypeSlice,117,118,1 +block_hint,StringPrototypeSlice,44,45,0 +block_hint,StringPrototypeSlice,154,155,0 +block_hint,StringPrototypeSlice,148,149,0 +block_hint,StringPrototypeSlice,36,37,1 +block_hint,StringPrototypeSlice,33,34,0 +block_hint,StringPrototypeStartsWith,288,289,1 +block_hint,StringPrototypeStartsWith,271,272,0 +block_hint,StringPrototypeStartsWith,251,252,1 +block_hint,StringPrototypeStartsWith,235,236,1 +block_hint,StringPrototypeStartsWith,174,175,1 +block_hint,StringPrototypeStartsWith,278,279,1 +block_hint,StringPrototypeStartsWith,267,268,1 +block_hint,StringPrototypeStartsWith,253,254,1 +block_hint,StringPrototypeStartsWith,244,245,1 +block_hint,StringPrototypeStartsWith,179,180,1 +block_hint,StringPrototypeStartsWith,29,30,0 +block_hint,StringPrototypeStartsWith,68,69,0 +block_hint,StringPrototypeStartsWith,70,71,0 +block_hint,StringPrototypeStartsWith,185,186,1 +block_hint,StringPrototypeStartsWith,84,85,0 +block_hint,StringPrototypeStartsWith,86,87,0 +block_hint,StringPrototypeStartsWith,164,165,0 +block_hint,StringPrototypeStartsWith,47,48,0 +block_hint,StringPrototypeStartsWith,35,36,0 +block_hint,StringPrototypeStartsWith,49,50,1 +block_hint,StringPrototypeStartsWith,116,117,1 +block_hint,StringPrototypeSubstr,163,164,1 +block_hint,StringPrototypeSubstr,141,142,1 +block_hint,StringPrototypeSubstr,103,104,1 +block_hint,StringPrototypeSubstr,182,183,0 +block_hint,StringPrototypeSubstr,171,172,0 +block_hint,StringPrototypeSubstr,192,193,0 +block_hint,StringPrototypeSubstr,189,190,0 +block_hint,StringPrototypeSubstr,166,167,0 +block_hint,StringPrototypeSubstr,148,149,0 +block_hint,StringPrototypeSubstr,120,121,0 +block_hint,StringPrototypeSubstr,31,32,0 +block_hint,StringPrototypeSubstr,61,62,1 +block_hint,StringPrototypeSubstr,129,130,0 +block_hint,StringPrototypeSubstr,19,20,0 +block_hint,StringPrototypeSubstr,135,136,1 +block_hint,StringPrototypeSubstr,114,115,1 +block_hint,StringPrototypeSubstr,44,45,0 +block_hint,StringPrototypeSubstr,153,154,0 +block_hint,StringPrototypeSubstr,36,37,1 +block_hint,StringPrototypeSubstr,33,34,0 +block_hint,StringPrototypeSubstring,147,148,1 +block_hint,StringPrototypeSubstring,127,128,1 +block_hint,StringPrototypeSubstring,99,100,1 +block_hint,StringPrototypeSubstring,182,183,0 +block_hint,StringPrototypeSubstring,169,170,0 +block_hint,StringPrototypeSubstring,186,187,0 +block_hint,StringPrototypeSubstring,180,181,0 +block_hint,StringPrototypeSubstring,171,172,0 +block_hint,StringPrototypeSubstring,167,168,0 +block_hint,StringPrototypeSubstring,160,161,0 +block_hint,StringPrototypeSubstring,151,152,0 +block_hint,StringPrototypeSubstring,131,132,0 +block_hint,StringPrototypeSubstring,89,90,0 +block_hint,StringPrototypeSubstring,65,66,1 +block_hint,StringPrototypeSubstring,101,102,1 +block_hint,StringPrototypeSubstring,58,59,1 +block_hint,StringPrototypeSubstring,115,116,0 +block_hint,StringPrototypeSubstring,85,86,1 +block_hint,StringPrototypeSubstring,17,18,0 +block_hint,StringPrototypeSubstring,121,122,1 +block_hint,StringPrototypeSubstring,109,110,1 +block_hint,StringPrototypeSubstring,42,43,0 +block_hint,StringPrototypeSubstring,54,55,0 +block_hint,StringPrototypeSubstring,138,139,0 +block_hint,StringPrototypeSubstring,104,105,1 +block_hint,StringPrototypeSubstring,34,35,1 +block_hint,StringPrototypeTrim,462,463,1 +block_hint,StringPrototypeTrim,263,264,1 +block_hint,StringPrototypeTrim,186,187,1 +block_hint,StringPrototypeTrim,188,189,0 +block_hint,StringPrototypeTrim,436,437,0 +block_hint,StringPrototypeTrim,265,266,1 +block_hint,StringPrototypeTrim,156,157,0 +block_hint,StringPrototypeTrim,158,159,0 +block_hint,StringPrototypeTrim,247,248,0 +block_hint,StringPrototypeTrim,63,64,1 +block_hint,StringPrototypeTrim,362,363,1 +block_hint,StringPrototypeTrim,83,84,0 +block_hint,StringPrototypeTrim,249,250,0 +block_hint,StringPrototypeTrim,65,66,1 +block_hint,StringPrototypeTrim,388,389,0 +block_hint,StringPrototypeTrim,390,391,1 +block_hint,StringPrototypeTrim,128,129,0 +block_hint,StringPrototypeTrim,85,86,0 +block_hint,StringPrototypeTrim,92,93,0 +block_hint,StringPrototypeTrim,285,286,0 +block_hint,StringPrototypeTrim,178,179,1 +block_hint,StringPrototypeTrim,430,431,0 +block_hint,StringPrototypeTrim,251,252,0 +block_hint,StringPrototypeTrim,69,70,0 +block_hint,StringPrototypeTrim,71,72,0 +block_hint,StringPrototypeTrim,446,447,1 +block_hint,StringPrototypeTrim,416,417,1 +block_hint,StringPrototypeTrim,132,133,0 +block_hint,StringPrototypeTrim,152,153,0 +block_hint,StringPrototypeTrim,154,155,0 +block_hint,StringPrototypeTrim,239,240,0 +block_hint,StringPrototypeTrim,47,48,1 +block_hint,StringPrototypeTrim,298,299,1 +block_hint,StringPrototypeTrim,241,242,0 +block_hint,StringPrototypeTrim,49,50,1 +block_hint,StringPrototypeTrim,326,327,1 +block_hint,StringPrototypeTrim,81,82,0 +block_hint,StringPrototypeTrim,87,88,0 +block_hint,StringPrototypeTrim,283,284,1 +block_hint,StringPrototypeTrim,172,173,1 +block_hint,StringPrototypeTrim,428,429,0 +block_hint,StringPrototypeTrim,243,244,1 +block_hint,StringPrototypeTrim,51,52,0 +block_hint,StringPrototypeTrim,440,441,1 +block_hint,StringPrototypeTrim,354,355,1 +block_hint,StringPrototypeTrim,112,113,0 +block_hint,StringPrototypeTrim,466,467,0 +block_hint,StringPrototypeTrim,287,288,1 +block_hint,StringPrototypeTrim,97,98,1 +block_hint,StringPrototypeTrim,89,90,0 +block_hint,SymbolPrototypeToString,9,10,1 +block_hint,SymbolPrototypeToString,11,12,1 +block_hint,SymbolPrototypeToString,5,6,0 +block_hint,SymbolPrototypeToString,7,8,1 +block_hint,CreateTypedArray,567,568,0 +block_hint,CreateTypedArray,597,598,0 +block_hint,CreateTypedArray,540,541,0 +block_hint,CreateTypedArray,454,455,0 +block_hint,CreateTypedArray,333,334,1 +block_hint,CreateTypedArray,335,336,1 +block_hint,CreateTypedArray,640,641,0 +block_hint,CreateTypedArray,489,490,1 +block_hint,CreateTypedArray,487,488,1 +block_hint,CreateTypedArray,385,386,1 +block_hint,CreateTypedArray,546,547,0 +block_hint,CreateTypedArray,621,622,0 +block_hint,CreateTypedArray,544,545,0 +block_hint,CreateTypedArray,458,459,0 +block_hint,CreateTypedArray,396,397,0 +block_hint,CreateTypedArray,398,399,0 +block_hint,CreateTypedArray,388,389,0 +block_hint,CreateTypedArray,104,105,1 +block_hint,CreateTypedArray,106,107,1 +block_hint,CreateTypedArray,648,649,1 +block_hint,CreateTypedArray,600,601,1 +block_hint,CreateTypedArray,646,647,1 +block_hint,CreateTypedArray,618,619,1 +block_hint,CreateTypedArray,491,492,0 +block_hint,CreateTypedArray,523,524,1 +block_hint,CreateTypedArray,362,363,0 +block_hint,CreateTypedArray,236,237,0 +block_hint,CreateTypedArray,301,302,0 +block_hint,CreateTypedArray,281,282,1 +block_hint,CreateTypedArray,283,284,1 +block_hint,CreateTypedArray,493,494,0 +block_hint,CreateTypedArray,525,526,1 +block_hint,CreateTypedArray,364,365,0 +block_hint,CreateTypedArray,252,253,0 +block_hint,CreateTypedArray,303,304,0 +block_hint,CreateTypedArray,480,481,0 +block_hint,CreateTypedArray,482,483,0 +block_hint,CreateTypedArray,634,635,0 +block_hint,CreateTypedArray,498,499,1 +block_hint,CreateTypedArray,496,497,1 +block_hint,CreateTypedArray,400,401,1 +block_hint,CreateTypedArray,506,507,0 +block_hint,CreateTypedArray,500,501,0 +block_hint,CreateTypedArray,403,404,0 +block_hint,CreateTypedArray,152,153,1 +block_hint,CreateTypedArray,342,343,0 +block_hint,CreateTypedArray,154,155,1 +block_hint,CreateTypedArray,652,653,1 +block_hint,CreateTypedArray,607,608,1 +block_hint,CreateTypedArray,650,651,1 +block_hint,CreateTypedArray,624,625,1 +block_hint,CreateTypedArray,502,503,0 +block_hint,CreateTypedArray,519,520,1 +block_hint,CreateTypedArray,358,359,0 +block_hint,CreateTypedArray,204,205,0 +block_hint,CreateTypedArray,627,628,0 +block_hint,CreateTypedArray,166,167,1 +block_hint,CreateTypedArray,291,292,1 +block_hint,CreateTypedArray,293,294,1 +block_hint,CreateTypedArray,504,505,0 +block_hint,CreateTypedArray,521,522,1 +block_hint,CreateTypedArray,360,361,0 +block_hint,CreateTypedArray,220,221,0 +block_hint,CreateTypedArray,629,630,0 +block_hint,CreateTypedArray,513,514,0 +block_hint,CreateTypedArray,508,509,0 +block_hint,CreateTypedArray,465,466,0 +block_hint,CreateTypedArray,348,349,0 +block_hint,CreateTypedArray,419,420,1 +block_hint,CreateTypedArray,352,353,1 +block_hint,CreateTypedArray,350,351,1 +block_hint,CreateTypedArray,421,422,0 +block_hint,CreateTypedArray,656,657,0 +block_hint,CreateTypedArray,609,610,0 +block_hint,CreateTypedArray,529,530,1 +block_hint,CreateTypedArray,527,528,1 +block_hint,CreateTypedArray,433,434,1 +block_hint,CreateTypedArray,613,614,0 +block_hint,CreateTypedArray,537,538,0 +block_hint,CreateTypedArray,447,448,0 +block_hint,CreateTypedArray,316,317,0 +block_hint,CreateTypedArray,611,612,0 +block_hint,CreateTypedArray,535,536,0 +block_hint,CreateTypedArray,443,444,0 +block_hint,CreateTypedArray,265,266,0 +block_hint,CreateTypedArray,592,593,0 +block_hint,CreateTypedArray,323,324,0 +block_hint,CreateTypedArray,325,326,0 +block_hint,CreateTypedArray,372,373,0 +block_hint,CreateTypedArray,374,375,0 +block_hint,CreateTypedArray,318,319,0 +block_hint,CreateTypedArray,328,329,0 +block_hint,CreateTypedArray,321,322,0 +block_hint,CreateTypedArray,474,475,0 +block_hint,CreateTypedArray,517,518,1 +block_hint,CreateTypedArray,356,357,0 +block_hint,CreateTypedArray,188,189,0 +block_hint,CreateTypedArray,451,452,0 +block_hint,CreateTypedArray,273,274,0 +block_hint,TypedArrayFrom,168,169,1 +block_hint,TypedArrayFrom,151,152,0 +block_hint,TypedArrayFrom,131,132,1 +block_hint,TypedArrayFrom,100,101,1 +block_hint,TypedArrayFrom,58,59,1 +block_hint,TypedArrayFrom,60,61,1 +block_hint,TypedArrayFrom,124,125,1 +block_hint,TypedArrayFrom,115,116,0 +block_hint,TypedArrayFrom,92,93,0 +block_hint,TypedArrayFrom,71,72,1 +block_hint,TypedArrayFrom,73,74,1 +block_hint,TypedArrayFrom,175,176,1 +block_hint,TypedArrayFrom,177,178,0 +block_hint,TypedArrayFrom,179,180,0 +block_hint,TypedArrayFrom,186,187,1 +block_hint,TypedArrayFrom,181,182,0 +block_hint,TypedArrayFrom,183,184,1 +block_hint,TypedArrayFrom,173,174,1 +block_hint,TypedArrayFrom,165,166,0 +block_hint,TypedArrayFrom,156,157,1 +block_hint,TypedArrayFrom,118,119,0 +block_hint,TypedArrayFrom,75,76,1 +block_hint,TypedArrayFrom,77,78,1 +block_hint,TypedArrayFrom,26,27,0 +block_hint,TypedArrayFrom,96,97,0 +block_hint,TypedArrayFrom,28,29,0 +block_hint,TypedArrayFrom,86,87,1 +block_hint,TypedArrayFrom,30,31,1 +block_hint,TypedArrayPrototypeSet,196,197,1 +block_hint,TypedArrayPrototypeSet,104,105,1 +block_hint,TypedArrayPrototypeSet,106,107,1 +block_hint,TypedArrayPrototypeSet,249,250,1 +block_hint,TypedArrayPrototypeSet,282,283,0 +block_hint,TypedArrayPrototypeSet,268,269,0 +block_hint,TypedArrayPrototypeSet,256,257,0 +block_hint,TypedArrayPrototypeSet,223,224,0 +block_hint,TypedArrayPrototypeSet,155,156,0 +block_hint,TypedArrayPrototypeSet,198,199,0 +block_hint,TypedArrayPrototypeSet,200,201,0 +block_hint,TypedArrayPrototypeSet,167,168,0 +block_hint,TypedArrayPrototypeSet,278,279,0 +block_hint,TypedArrayPrototypeSet,265,266,1 +block_hint,TypedArrayPrototypeSet,244,245,1 +block_hint,TypedArrayPrototypeSet,211,212,0 +block_hint,TypedArrayPrototypeSet,213,214,0 +block_hint,TypedArrayPrototypeSet,171,172,0 +block_hint,TypedArrayPrototypeSet,159,160,0 +block_hint,TypedArrayPrototypeSet,179,180,0 +block_hint,TypedArrayPrototypeSet,123,124,0 +block_hint,TypedArrayPrototypeSet,185,186,1 +block_hint,TypedArrayPrototypeSet,91,92,1 +block_hint,TypedArrayPrototypeSet,81,82,0 +block_hint,TypedArrayPrototypeSet,83,84,0 +block_hint,TypedArrayPrototypeSet,85,86,0 +block_hint,TypedArrayPrototypeSet,87,88,0 +block_hint,TypedArrayPrototypeSet,187,188,0 +block_hint,TypedArrayPrototypeSet,146,147,0 +block_hint,TypedArrayPrototypeSubArray,129,130,1 +block_hint,TypedArrayPrototypeSubArray,82,83,1 +block_hint,TypedArrayPrototypeSubArray,84,85,1 +block_hint,TypedArrayPrototypeSubArray,159,160,1 +block_hint,TypedArrayPrototypeSubArray,151,152,0 +block_hint,TypedArrayPrototypeSubArray,131,132,0 +block_hint,TypedArrayPrototypeSubArray,133,134,0 +block_hint,TypedArrayPrototypeSubArray,210,211,0 +block_hint,TypedArrayPrototypeSubArray,190,191,0 +block_hint,TypedArrayPrototypeSubArray,170,171,0 +block_hint,TypedArrayPrototypeSubArray,218,219,0 +block_hint,TypedArrayPrototypeSubArray,205,206,0 +block_hint,TypedArrayPrototypeSubArray,196,197,0 +block_hint,TypedArrayPrototypeSubArray,186,187,1 +block_hint,TypedArrayPrototypeSubArray,154,155,0 +block_hint,TypedArrayPrototypeSubArray,137,138,0 +block_hint,TypedArrayPrototypeSubArray,165,166,0 +block_hint,TypedArrayPrototypeSubArray,216,217,0 +block_hint,TypedArrayPrototypeSubArray,203,204,0 +block_hint,TypedArrayPrototypeSubArray,192,193,0 +block_hint,TypedArrayPrototypeSubArray,149,150,1 +block_hint,TypedArrayPrototypeSubArray,124,125,0 +block_hint,TypedArrayPrototypeSubArray,102,103,0 +block_hint,TypedArrayPrototypeSubArray,104,105,0 +block_hint,TypedArrayPrototypeSubArray,115,116,0 +block_hint,TypedArrayPrototypeSubArray,63,64,1 +block_hint,TypedArrayPrototypeSubArray,65,66,1 +block_hint,TypedArrayPrototypeSubArray,145,146,1 +block_hint,TypedArrayPrototypeSubArray,80,81,0 +block_hint,TypedArrayPrototypeSubArray,117,118,0 +block_hint,TypedArrayPrototypeSubArray,90,91,1 +block_hint,TypedArrayPrototypeSubArray,92,93,1 +block_hint,TypedArrayPrototypeSubArray,119,120,0 +block_hint,TypedArrayPrototypeSubArray,94,95,1 +block_hint,TypedArrayPrototypeSubArray,96,97,1 +block_hint,TypedArrayPrototypeSubArray,69,70,1 +block_hint,TypedArrayPrototypeSubArray,98,99,1 +block_hint,TypedArrayPrototypeSubArray,100,101,1 +block_hint,TypedArrayPrototypeSubArray,73,74,0 +block_hint,NewSloppyArgumentsElements,44,45,1 +block_hint,NewSloppyArgumentsElements,24,25,1 +block_hint,NewSloppyArgumentsElements,33,34,0 +block_hint,NewSloppyArgumentsElements,14,15,0 +block_hint,NewSloppyArgumentsElements,16,17,0 +block_hint,NewSloppyArgumentsElements,46,47,1 +block_hint,NewSloppyArgumentsElements,36,37,1 +block_hint,NewSloppyArgumentsElements,18,19,0 +block_hint,NewSloppyArgumentsElements,48,49,0 +block_hint,NewStrictArgumentsElements,9,10,0 +block_hint,NewStrictArgumentsElements,20,21,0 +block_hint,NewRestArgumentsElements,25,26,0 +block_hint,NewRestArgumentsElements,11,12,1 +block_hint,NewRestArgumentsElements,16,17,0 +block_hint,NewRestArgumentsElements,5,6,0 +block_hint,NewRestArgumentsElements,7,8,0 +block_hint,NewRestArgumentsElements,23,24,1 +block_hint,NewRestArgumentsElements,19,20,1 +block_hint,NewRestArgumentsElements,9,10,0 +block_hint,NewRestArgumentsElements,21,22,0 +block_hint,FastNewSloppyArguments,41,42,1 +block_hint,FastNewSloppyArguments,43,44,0 +block_hint,FastNewSloppyArguments,101,102,1 +block_hint,FastNewSloppyArguments,45,46,0 +block_hint,FastNewSloppyArguments,13,14,0 +block_hint,FastNewSloppyArguments,15,16,0 +block_hint,FastNewSloppyArguments,75,76,1 +block_hint,FastNewSloppyArguments,59,60,1 +block_hint,FastNewSloppyArguments,17,18,0 +block_hint,FastNewSloppyArguments,61,62,0 +block_hint,FastNewSloppyArguments,81,82,1 +block_hint,FastNewSloppyArguments,47,48,0 +block_hint,FastNewSloppyArguments,19,20,0 +block_hint,FastNewSloppyArguments,21,22,0 +block_hint,FastNewSloppyArguments,71,72,1 +block_hint,FastNewSloppyArguments,55,56,1 +block_hint,FastNewSloppyArguments,23,24,0 +block_hint,FastNewSloppyArguments,73,74,0 +block_hint,FastNewSloppyArguments,25,26,1 +block_hint,FastNewSloppyArguments,51,52,1 +block_hint,FastNewSloppyArguments,27,28,1 +block_hint,FastNewSloppyArguments,29,30,0 +block_hint,FastNewSloppyArguments,31,32,0 +block_hint,FastNewSloppyArguments,77,78,1 +block_hint,FastNewSloppyArguments,63,64,1 +block_hint,FastNewSloppyArguments,33,34,0 +block_hint,FastNewSloppyArguments,35,36,1 +block_hint,FastNewSloppyArguments,53,54,1 +block_hint,FastNewStrictArguments,16,17,1 +block_hint,FastNewStrictArguments,18,19,0 +block_hint,FastNewStrictArguments,20,21,0 +block_hint,FastNewStrictArguments,7,8,0 +block_hint,FastNewStrictArguments,9,10,0 +block_hint,FastNewStrictArguments,31,32,1 +block_hint,FastNewStrictArguments,25,26,1 +block_hint,FastNewStrictArguments,11,12,0 +block_hint,FastNewStrictArguments,27,28,0 +block_hint,FastNewStrictArguments,13,14,1 +block_hint,FastNewStrictArguments,23,24,1 +block_hint,FastNewRestArguments,16,17,1 +block_hint,FastNewRestArguments,18,19,0 +block_hint,FastNewRestArguments,34,35,1 +block_hint,FastNewRestArguments,7,8,1 +block_hint,FastNewRestArguments,21,22,0 +block_hint,FastNewRestArguments,9,10,0 +block_hint,FastNewRestArguments,11,12,0 +block_hint,FastNewRestArguments,32,33,1 +block_hint,FastNewRestArguments,25,26,1 +block_hint,FastNewRestArguments,13,14,0 +block_hint,FastNewRestArguments,27,28,0 +block_hint,FastNewRestArguments,23,24,1 +block_hint,StringSlowFlatten,35,36,1 +block_hint,StringSlowFlatten,20,21,1 +block_hint,StringSlowFlatten,4,5,0 +block_hint,StringSlowFlatten,30,31,1 +block_hint,StringSlowFlatten,22,23,1 +block_hint,StringIndexOf,160,161,0 +block_hint,StringIndexOf,112,113,1 +block_hint,StringIndexOf,125,126,1 +block_hint,StringIndexOf,91,92,0 +block_hint,StringIndexOf,117,118,1 +block_hint,StringIndexOf,136,137,0 +block_hint,StringIndexOf,44,45,0 +block_hint,StringIndexOf,46,47,0 +block_hint,StringIndexOf,133,134,0 +block_hint,StringIndexOf,76,77,0 +block_hint,StringIndexOf,78,79,0 +block_hint,StringIndexOf,72,73,0 +block_hint,StringIndexOf,74,75,0 +block_hint,StringIndexOf,40,41,0 +block_hint,StringIndexOf,42,43,0 +block_hint,StringIndexOf,127,128,1 +block_hint,StringIndexOf,56,57,0 +block_hint,StringIndexOf,58,59,0 +block_hint,Load_FastSmiElements_0,2,3,1 +block_hint,Load_FastObjectElements_0,2,3,1 +block_hint,Store_FastSmiElements_0,2,3,1 +block_hint,Store_FastObjectElements_0,2,3,1 +block_hint,SortCompareDefault,8,9,1 +block_hint,SortCompareDefault,20,21,1 +block_hint,SortCompareDefault,17,18,1 +block_hint,SortCompareDefault,14,15,1 +block_hint,SortCompareDefault,11,12,1 +block_hint,SortCompareDefault,6,7,1 +block_hint,SortCompareUserFn,9,10,0 +block_hint,SortCompareUserFn,5,6,0 +block_hint,Copy,17,18,1 +block_hint,Copy,9,10,1 +block_hint,Copy,11,12,1 +block_hint,Copy,5,6,1 +block_hint,Copy,7,8,1 +block_hint,MergeAt,13,14,1 +block_hint,MergeAt,15,16,1 +block_hint,MergeAt,17,18,1 +block_hint,MergeAt,19,20,1 +block_hint,MergeAt,140,141,0 +block_hint,MergeAt,29,30,1 +block_hint,MergeAt,31,32,0 +block_hint,MergeAt,33,34,1 +block_hint,MergeAt,35,36,1 +block_hint,MergeAt,123,124,0 +block_hint,MergeAt,236,237,1 +block_hint,MergeAt,225,226,1 +block_hint,MergeAt,69,70,1 +block_hint,MergeAt,71,72,1 +block_hint,MergeAt,150,151,1 +block_hint,MergeAt,103,104,0 +block_hint,MergeAt,73,74,1 +block_hint,MergeAt,75,76,1 +block_hint,MergeAt,227,228,0 +block_hint,MergeAt,81,82,1 +block_hint,MergeAt,83,84,1 +block_hint,MergeAt,198,199,0 +block_hint,MergeAt,134,135,0 +block_hint,MergeAt,77,78,1 +block_hint,MergeAt,79,80,1 +block_hint,MergeAt,196,197,1 +block_hint,MergeAt,132,133,0 +block_hint,MergeAt,152,153,0 +block_hint,MergeAt,182,183,1 +block_hint,MergeAt,85,86,1 +block_hint,MergeAt,87,88,1 +block_hint,MergeAt,89,90,1 +block_hint,MergeAt,147,148,0 +block_hint,MergeAt,91,92,1 +block_hint,MergeAt,93,94,1 +block_hint,MergeAt,95,96,1 +block_hint,MergeAt,107,108,1 +block_hint,MergeAt,194,195,1 +block_hint,MergeAt,97,98,1 +block_hint,MergeAt,99,100,1 +block_hint,MergeAt,230,231,1 +block_hint,MergeAt,116,117,0 +block_hint,MergeAt,232,233,1 +block_hint,MergeAt,220,221,1 +block_hint,MergeAt,37,38,1 +block_hint,MergeAt,39,40,1 +block_hint,MergeAt,154,155,1 +block_hint,MergeAt,109,110,0 +block_hint,MergeAt,41,42,1 +block_hint,MergeAt,43,44,1 +block_hint,MergeAt,222,223,0 +block_hint,MergeAt,49,50,1 +block_hint,MergeAt,51,52,1 +block_hint,MergeAt,202,203,0 +block_hint,MergeAt,138,139,0 +block_hint,MergeAt,45,46,1 +block_hint,MergeAt,47,48,1 +block_hint,MergeAt,200,201,1 +block_hint,MergeAt,136,137,0 +block_hint,MergeAt,111,112,0 +block_hint,MergeAt,165,166,1 +block_hint,MergeAt,53,54,1 +block_hint,MergeAt,207,208,0 +block_hint,MergeAt,169,170,0 +block_hint,MergeAt,55,56,1 +block_hint,MergeAt,57,58,1 +block_hint,MergeAt,143,144,1 +block_hint,MergeAt,59,60,1 +block_hint,MergeAt,173,174,0 +block_hint,MergeAt,61,62,1 +block_hint,MergeAt,63,64,1 +block_hint,MergeAt,113,114,0 +block_hint,MergeAt,192,193,1 +block_hint,MergeAt,65,66,1 +block_hint,MergeAt,67,68,1 +block_hint,GallopLeft,11,12,1 +block_hint,GallopLeft,47,48,0 +block_hint,GallopLeft,15,16,1 +block_hint,GallopLeft,63,64,0 +block_hint,GallopLeft,29,30,0 +block_hint,GallopLeft,41,42,0 +block_hint,GallopLeft,13,14,1 +block_hint,GallopLeft,65,66,0 +block_hint,GallopLeft,31,32,0 +block_hint,GallopLeft,39,40,0 +block_hint,GallopLeft,17,18,1 +block_hint,GallopLeft,61,62,0 +block_hint,GallopRight,11,12,1 +block_hint,GallopRight,47,48,0 +block_hint,GallopRight,35,36,1 +block_hint,GallopRight,15,16,1 +block_hint,GallopRight,63,64,0 +block_hint,GallopRight,29,30,0 +block_hint,GallopRight,41,42,0 +block_hint,GallopRight,13,14,1 +block_hint,GallopRight,65,66,0 +block_hint,GallopRight,31,32,0 +block_hint,GallopRight,39,40,0 +block_hint,GallopRight,17,18,1 +block_hint,GallopRight,61,62,0 +block_hint,ArrayTimSort,120,121,0 +block_hint,ArrayTimSort,240,241,0 +block_hint,ArrayTimSort,227,228,0 +block_hint,ArrayTimSort,122,123,0 +block_hint,ArrayTimSort,163,164,0 +block_hint,ArrayTimSort,140,141,0 +block_hint,ArrayTimSort,33,34,1 +block_hint,ArrayTimSort,93,94,0 +block_hint,ArrayTimSort,95,96,0 +block_hint,ArrayTimSort,143,144,0 +block_hint,ArrayTimSort,35,36,1 +block_hint,ArrayTimSort,37,38,1 +block_hint,ArrayTimSort,214,215,0 +block_hint,ArrayTimSort,145,146,1 +block_hint,ArrayTimSort,39,40,1 +block_hint,ArrayTimSort,218,219,0 +block_hint,ArrayTimSort,216,217,0 +block_hint,ArrayTimSort,41,42,1 +block_hint,ArrayTimSort,43,44,1 +block_hint,ArrayTimSort,45,46,1 +block_hint,ArrayTimSort,134,135,0 +block_hint,ArrayTimSort,47,48,1 +block_hint,ArrayTimSort,49,50,1 +block_hint,ArrayTimSort,222,223,0 +block_hint,ArrayTimSort,51,52,1 +block_hint,ArrayTimSort,53,54,1 +block_hint,ArrayTimSort,55,56,1 +block_hint,ArrayTimSort,57,58,1 +block_hint,ArrayTimSort,59,60,1 +block_hint,ArrayTimSort,61,62,1 +block_hint,ArrayTimSort,63,64,1 +block_hint,ArrayTimSort,65,66,1 +block_hint,ArrayTimSort,67,68,1 +block_hint,ArrayTimSort,69,70,1 +block_hint,ArrayTimSort,71,72,1 +block_hint,ArrayTimSort,157,158,1 +block_hint,ArrayTimSort,73,74,1 +block_hint,ArrayTimSort,75,76,1 +block_hint,ArrayTimSort,204,205,0 +block_hint,ArrayTimSort,77,78,1 +block_hint,ArrayTimSort,79,80,1 +block_hint,ArrayTimSort,209,210,0 +block_hint,ArrayTimSort,81,82,1 +block_hint,ArrayTimSort,83,84,1 +block_hint,ArrayTimSort,186,187,0 +block_hint,ArrayTimSort,236,237,1 +block_hint,ArrayTimSort,238,239,1 +block_hint,ArrayTimSort,211,212,1 +block_hint,ArrayTimSort,161,162,1 +block_hint,ArrayTimSort,85,86,1 +block_hint,ArrayTimSort,243,244,1 +block_hint,ArrayTimSort,230,231,0 +block_hint,ArrayTimSort,188,189,1 +block_hint,ArrayTimSort,138,139,0 +block_hint,ArrayTimSort,87,88,1 +block_hint,ArrayTimSort,113,114,0 +block_hint,ArrayTimSort,89,90,0 +block_hint,ArrayPrototypeSort,106,107,1 +block_hint,ArrayPrototypeSort,80,81,0 +block_hint,ArrayPrototypeSort,39,40,1 +block_hint,ArrayPrototypeSort,70,71,0 +block_hint,ArrayPrototypeSort,41,42,1 +block_hint,ArrayPrototypeSort,82,83,1 +block_hint,ArrayPrototypeSort,84,85,1 +block_hint,ArrayPrototypeSort,63,64,0 +block_hint,ArrayPrototypeSort,27,28,0 +block_hint,ArrayPrototypeSort,121,122,0 +block_hint,ArrayPrototypeSort,101,102,1 +block_hint,ArrayPrototypeSort,73,74,1 +block_hint,ArrayPrototypeSort,51,52,1 +block_hint,ArrayPrototypeSort,15,16,1 +block_hint,ArrayPrototypeSort,95,96,1 +block_hint,ArrayPrototypeSort,75,76,0 +block_hint,ArrayPrototypeSort,53,54,0 +block_hint,ArrayPrototypeSort,139,140,0 +block_hint,ArrayPrototypeSort,142,143,0 +block_hint,ArrayPrototypeSort,132,133,0 +block_hint,ArrayPrototypeSort,128,129,0 +block_hint,ArrayPrototypeSort,103,104,0 +block_hint,ArrayPrototypeSort,114,115,1 +block_hint,ArrayPrototypeSort,118,119,1 +block_hint,ArrayPrototypeSort,77,78,1 +block_hint,ArrayPrototypeSort,33,34,0 +block_hint,ArrayPrototypeSort,98,99,1 +block_hint,ArrayPrototypeSort,91,92,1 +block_hint,ArrayPrototypeSort,56,57,1 +block_hint,StringFastLocaleCompare,315,316,1 +block_hint,StringFastLocaleCompare,239,240,0 +block_hint,StringFastLocaleCompare,303,304,1 +block_hint,StringFastLocaleCompare,156,157,0 +block_hint,StringFastLocaleCompare,158,159,0 +block_hint,StringFastLocaleCompare,267,268,1 +block_hint,StringFastLocaleCompare,106,107,0 +block_hint,StringFastLocaleCompare,307,308,1 +block_hint,StringFastLocaleCompare,172,173,0 +block_hint,StringFastLocaleCompare,174,175,0 +block_hint,StringFastLocaleCompare,109,110,0 +block_hint,StringFastLocaleCompare,211,212,1 +block_hint,StringFastLocaleCompare,271,272,1 +block_hint,StringFastLocaleCompare,276,277,0 +block_hint,StringFastLocaleCompare,253,254,1 +block_hint,StringFastLocaleCompare,73,74,0 +block_hint,StringFastLocaleCompare,274,275,1 +block_hint,StringFastLocaleCompare,116,117,0 +block_hint,StringFastLocaleCompare,77,78,1 +block_hint,CanUseSameAccessor_FastObjectElements_0,2,3,1 +block_hint,CanUseSameAccessor_FastObjectElements_0,4,5,1 +block_hint,StringPrototypeToLowerCaseIntl,10,11,1 +block_hint,StringPrototypeToLowerCaseIntl,7,8,1 +block_hint,StringPrototypeToLowerCaseIntl,5,6,1 +block_hint,StringToLowerCaseIntl,23,24,1 +block_hint,StringToLowerCaseIntl,25,26,0 +block_hint,StringToLowerCaseIntl,34,35,1 +block_hint,StringToLowerCaseIntl,7,8,0 +block_hint,StringToLowerCaseIntl,43,44,1 +block_hint,StringToLowerCaseIntl,41,42,1 +block_hint,StringToLowerCaseIntl,19,20,0 +block_hint,StringToLowerCaseIntl,39,40,0 +block_hint,StringToLowerCaseIntl,14,15,0 +block_hint,LdaContextSlotHandler,3,4,1 +block_hint,LdaContextSlotHandler,5,6,1 +block_hint,LdaImmutableContextSlotHandler,3,4,1 +block_hint,LdaImmutableContextSlotHandler,5,6,1 +block_hint,LdaCurrentContextSlotHandler,2,3,1 +block_hint,LdaImmutableCurrentContextSlotHandler,2,3,1 +block_hint,TestTypeOfHandler,7,8,1 +block_hint,TestTypeOfHandler,15,16,0 +block_hint,TestTypeOfHandler,23,24,0 +block_hint,TestTypeOfHandler,27,28,1 +block_hint,TestTypeOfHandler,31,32,1 +block_hint,TestTypeOfHandler,50,51,0 +block_hint,TestTypeOfHandler,35,36,0 +block_hint,LdaGlobalHandler,7,8,1 +block_hint,LdaGlobalHandler,9,10,1 +block_hint,LdaGlobalHandler,11,12,1 +block_hint,LdaGlobalHandler,13,14,1 +block_hint,LdaGlobalHandler,183,184,0 +block_hint,LdaGlobalHandler,105,106,0 +block_hint,LdaGlobalHandler,109,110,1 +block_hint,StaContextSlotHandler,5,6,1 +block_hint,StaCurrentContextSlotHandler,2,3,1 +block_hint,GetNamedPropertyHandler,379,380,1 +block_hint,GetNamedPropertyHandler,219,220,0 +block_hint,GetNamedPropertyHandler,77,78,0 +block_hint,GetNamedPropertyHandler,35,36,1 +block_hint,GetNamedPropertyHandler,318,319,0 +block_hint,GetNamedPropertyHandler,342,343,0 +block_hint,GetNamedPropertyHandler,221,222,1 +block_hint,GetNamedPropertyHandler,293,294,1 +block_hint,GetNamedPropertyHandler,39,40,0 +block_hint,GetNamedPropertyHandler,223,224,0 +block_hint,GetNamedPropertyHandler,297,298,1 +block_hint,GetNamedPropertyHandler,98,99,1 +block_hint,GetNamedPropertyHandler,350,351,0 +block_hint,GetNamedPropertyHandler,245,246,0 +block_hint,GetNamedPropertyHandler,154,155,0 +block_hint,GetNamedPropertyHandler,122,123,1 +block_hint,GetNamedPropertyHandler,49,50,0 +block_hint,GetNamedPropertyHandler,87,88,0 +block_hint,GetNamedPropertyHandler,25,26,1 +block_hint,GetNamedPropertyHandler,144,145,0 +block_hint,GetNamedPropertyHandler,65,66,0 +block_hint,GetNamedPropertyHandler,306,307,1 +block_hint,GetNamedPropertyHandler,102,103,0 +block_hint,GetNamedPropertyHandler,251,252,1 +block_hint,GetNamedPropertyHandler,253,254,1 +block_hint,GetNamedPropertyHandler,247,248,1 +block_hint,GetNamedPropertyHandler,249,250,1 +block_hint,GetNamedPropertyHandler,164,165,1 +block_hint,AddHandler,53,54,0 +block_hint,AddHandler,37,38,0 +block_hint,AddHandler,28,29,1 +block_hint,AddHandler,80,81,0 +block_hint,AddHandler,60,61,1 +block_hint,AddHandler,40,41,1 +block_hint,AddHandler,74,75,1 +block_hint,AddHandler,43,44,1 +block_hint,AddHandler,56,57,1 +block_hint,AddHandler,22,23,1 +block_hint,SubHandler,35,36,0 +block_hint,SubHandler,23,24,1 +block_hint,SubHandler,64,65,1 +block_hint,SubHandler,75,76,1 +block_hint,SubHandler,66,67,1 +block_hint,SubHandler,45,46,1 +block_hint,SubHandler,19,20,1 +block_hint,MulHandler,79,80,1 +block_hint,MulHandler,75,76,1 +block_hint,MulHandler,26,27,1 +block_hint,MulHandler,85,86,1 +block_hint,MulHandler,69,70,1 +block_hint,MulHandler,47,48,1 +block_hint,MulHandler,21,22,1 +block_hint,DivHandler,81,82,0 +block_hint,DivHandler,75,76,0 +block_hint,DivHandler,64,65,0 +block_hint,DivHandler,43,44,1 +block_hint,DivHandler,23,24,1 +block_hint,DivHandler,83,84,1 +block_hint,DivHandler,70,71,1 +block_hint,DivHandler,46,47,1 +block_hint,DivHandler,17,18,1 +block_hint,ModHandler,77,78,1 +block_hint,ModHandler,74,75,0 +block_hint,ModHandler,70,71,0 +block_hint,ModHandler,56,57,1 +block_hint,ModHandler,51,52,1 +block_hint,ModHandler,28,29,0 +block_hint,ModHandler,8,9,0 +block_hint,ModHandler,15,16,1 +block_hint,BitwiseOrHandler,42,43,0 +block_hint,BitwiseOrHandler,30,31,1 +block_hint,BitwiseOrHandler,8,9,1 +block_hint,BitwiseOrHandler,56,57,1 +block_hint,BitwiseOrHandler,60,61,1 +block_hint,BitwiseOrHandler,24,25,1 +block_hint,BitwiseXorHandler,32,33,1 +block_hint,BitwiseXorHandler,56,57,1 +block_hint,BitwiseXorHandler,60,61,1 +block_hint,BitwiseXorHandler,24,25,1 +block_hint,BitwiseAndHandler,32,33,1 +block_hint,BitwiseAndHandler,56,57,1 +block_hint,BitwiseAndHandler,60,61,1 +block_hint,BitwiseAndHandler,24,25,1 +block_hint,ShiftLeftHandler,10,11,0 +block_hint,ShiftLeftHandler,60,61,1 +block_hint,ShiftLeftHandler,24,25,1 +block_hint,ShiftRightHandler,32,33,1 +block_hint,ShiftRightHandler,10,11,0 +block_hint,ShiftRightHandler,58,59,0 +block_hint,ShiftRightHandler,39,40,0 +block_hint,ShiftRightHandler,24,25,1 +block_hint,ShiftRightLogicalHandler,10,11,0 +block_hint,ShiftRightLogicalHandler,58,59,0 +block_hint,ShiftRightLogicalHandler,39,40,0 +block_hint,AddSmiHandler,53,54,0 +block_hint,AddSmiHandler,37,38,0 +block_hint,AddSmiHandler,28,29,1 +block_hint,SubSmiHandler,35,36,0 +block_hint,SubSmiHandler,23,24,1 +block_hint,SubSmiHandler,19,20,1 +block_hint,MulSmiHandler,78,79,0 +block_hint,MulSmiHandler,63,64,0 +block_hint,MulSmiHandler,65,66,0 +block_hint,MulSmiHandler,34,35,0 +block_hint,MulSmiHandler,23,24,1 +block_hint,DivSmiHandler,69,70,0 +block_hint,DivSmiHandler,78,79,0 +block_hint,DivSmiHandler,64,65,0 +block_hint,DivSmiHandler,43,44,1 +block_hint,DivSmiHandler,15,16,0 +block_hint,DivSmiHandler,23,24,1 +block_hint,ModSmiHandler,56,57,1 +block_hint,ModSmiHandler,51,52,1 +block_hint,ModSmiHandler,28,29,0 +block_hint,ModSmiHandler,15,16,1 +block_hint,BitwiseOrSmiHandler,31,32,1 +block_hint,BitwiseOrSmiHandler,37,38,1 +block_hint,BitwiseAndSmiHandler,6,7,0 +block_hint,BitwiseAndSmiHandler,18,19,1 +block_hint,ShiftLeftSmiHandler,44,45,1 +block_hint,ShiftLeftSmiHandler,34,35,1 +block_hint,ShiftLeftSmiHandler,46,47,1 +block_hint,ShiftLeftSmiHandler,18,19,1 +block_hint,ShiftRightSmiHandler,31,32,1 +block_hint,ShiftRightSmiHandler,35,36,0 +block_hint,ShiftRightSmiHandler,29,30,0 +block_hint,ShiftRightSmiHandler,18,19,1 +block_hint,ShiftRightLogicalSmiHandler,40,41,0 +block_hint,ShiftRightLogicalSmiHandler,30,31,0 +block_hint,ShiftRightLogicalSmiHandler,34,35,1 +block_hint,ShiftRightLogicalSmiHandler,42,43,0 +block_hint,ShiftRightLogicalSmiHandler,32,33,0 +block_hint,ShiftRightLogicalSmiHandler,18,19,1 +block_hint,IncHandler,27,28,0 +block_hint,IncHandler,23,24,0 +block_hint,IncHandler,18,19,1 +block_hint,DecHandler,27,28,0 +block_hint,DecHandler,23,24,0 +block_hint,DecHandler,18,19,1 +block_hint,NegateHandler,26,27,1 +block_hint,NegateHandler,24,25,1 +block_hint,ToBooleanLogicalNotHandler,15,16,0 +block_hint,ToBooleanLogicalNotHandler,21,22,0 +block_hint,ToBooleanLogicalNotHandler,7,8,0 +block_hint,TypeOfHandler,20,21,0 +block_hint,CallAnyReceiverHandler,21,22,1 +block_hint,CallProperty0Handler,7,8,1 +block_hint,CallProperty0Handler,62,63,0 +block_hint,CallProperty0Handler,14,15,1 +block_hint,CallProperty0Handler,16,17,0 +block_hint,CallProperty0Handler,72,73,0 +block_hint,CallProperty0Handler,55,56,1 +block_hint,CallProperty1Handler,86,87,0 +block_hint,CallProperty1Handler,83,84,0 +block_hint,CallProperty1Handler,64,65,0 +block_hint,CallProperty1Handler,35,36,0 +block_hint,CallProperty1Handler,70,71,1 +block_hint,CallProperty1Handler,51,52,0 +block_hint,CallProperty1Handler,7,8,1 +block_hint,CallProperty1Handler,62,63,0 +block_hint,CallProperty1Handler,14,15,1 +block_hint,CallProperty1Handler,16,17,0 +block_hint,CallProperty1Handler,72,73,0 +block_hint,CallProperty1Handler,55,56,1 +block_hint,CallProperty2Handler,23,24,0 +block_hint,CallProperty2Handler,86,87,0 +block_hint,CallProperty2Handler,83,84,0 +block_hint,CallProperty2Handler,64,65,0 +block_hint,CallProperty2Handler,5,6,1 +block_hint,CallProperty2Handler,47,48,1 +block_hint,CallProperty2Handler,25,26,1 +block_hint,CallProperty2Handler,7,8,1 +block_hint,CallProperty2Handler,14,15,1 +block_hint,CallProperty2Handler,16,17,0 +block_hint,CallProperty2Handler,72,73,0 +block_hint,CallProperty2Handler,55,56,1 +block_hint,CallUndefinedReceiverHandler,86,87,0 +block_hint,CallUndefinedReceiverHandler,83,84,0 +block_hint,CallUndefinedReceiverHandler,64,65,0 +block_hint,CallUndefinedReceiverHandler,35,36,0 +block_hint,CallUndefinedReceiverHandler,70,71,1 +block_hint,CallUndefinedReceiverHandler,51,52,0 +block_hint,CallUndefinedReceiverHandler,29,30,1 +block_hint,CallUndefinedReceiver0Handler,86,87,0 +block_hint,CallUndefinedReceiver0Handler,83,84,0 +block_hint,CallUndefinedReceiver0Handler,64,65,0 +block_hint,CallUndefinedReceiver0Handler,35,36,0 +block_hint,CallUndefinedReceiver0Handler,70,71,1 +block_hint,CallUndefinedReceiver0Handler,51,52,0 +block_hint,CallUndefinedReceiver0Handler,29,30,1 +block_hint,CallUndefinedReceiver1Handler,86,87,0 +block_hint,CallUndefinedReceiver1Handler,83,84,0 +block_hint,CallUndefinedReceiver1Handler,64,65,0 +block_hint,CallUndefinedReceiver1Handler,35,36,0 +block_hint,CallUndefinedReceiver1Handler,70,71,1 +block_hint,CallUndefinedReceiver1Handler,51,52,0 +block_hint,CallUndefinedReceiver1Handler,29,30,1 +block_hint,CallUndefinedReceiver1Handler,7,8,1 +block_hint,CallUndefinedReceiver1Handler,62,63,0 +block_hint,CallUndefinedReceiver1Handler,14,15,1 +block_hint,CallUndefinedReceiver1Handler,16,17,0 +block_hint,CallUndefinedReceiver1Handler,72,73,0 +block_hint,CallUndefinedReceiver1Handler,55,56,1 +block_hint,CallUndefinedReceiver2Handler,23,24,0 +block_hint,CallUndefinedReceiver2Handler,86,87,0 +block_hint,CallUndefinedReceiver2Handler,83,84,0 +block_hint,CallUndefinedReceiver2Handler,64,65,0 +block_hint,CallUndefinedReceiver2Handler,35,36,0 +block_hint,CallUndefinedReceiver2Handler,70,71,1 +block_hint,CallUndefinedReceiver2Handler,51,52,0 +block_hint,CallUndefinedReceiver2Handler,29,30,1 +block_hint,CallUndefinedReceiver2Handler,7,8,1 +block_hint,CallUndefinedReceiver2Handler,62,63,0 +block_hint,CallUndefinedReceiver2Handler,14,15,1 +block_hint,CallUndefinedReceiver2Handler,16,17,0 +block_hint,CallUndefinedReceiver2Handler,72,73,0 +block_hint,CallUndefinedReceiver2Handler,55,56,1 +block_hint,CallWithSpreadHandler,23,24,1 +block_hint,ConstructHandler,52,53,0 +block_hint,ConstructHandler,41,42,1 +block_hint,ConstructHandler,24,25,1 +block_hint,ConstructHandler,15,16,1 +block_hint,ConstructHandler,3,4,1 +block_hint,ConstructHandler,39,40,1 +block_hint,TestEqualHandler,103,104,0 +block_hint,TestEqualHandler,25,26,1 +block_hint,TestEqualHandler,72,73,0 +block_hint,TestEqualHandler,79,80,1 +block_hint,TestEqualHandler,27,28,1 +block_hint,TestEqualHandler,85,86,0 +block_hint,TestEqualHandler,114,115,0 +block_hint,TestEqualHandler,19,20,1 +block_hint,TestEqualStrictHandler,82,83,0 +block_hint,TestEqualStrictHandler,53,54,1 +block_hint,TestEqualStrictHandler,66,67,0 +block_hint,TestEqualStrictHandler,59,60,1 +block_hint,TestEqualStrictHandler,41,42,1 +block_hint,TestEqualStrictHandler,61,62,0 +block_hint,TestEqualStrictHandler,55,56,1 +block_hint,TestEqualStrictHandler,47,48,0 +block_hint,TestEqualStrictHandler,72,73,0 +block_hint,TestEqualStrictHandler,49,50,0 +block_hint,TestEqualStrictHandler,7,8,1 +block_hint,TestLessThanHandler,41,42,0 +block_hint,TestLessThanHandler,63,64,0 +block_hint,TestLessThanHandler,65,66,1 +block_hint,TestLessThanHandler,49,50,1 +block_hint,TestLessThanHandler,9,10,1 +block_hint,TestGreaterThanHandler,41,42,0 +block_hint,TestGreaterThanHandler,45,46,1 +block_hint,TestGreaterThanHandler,9,10,1 +block_hint,TestLessThanOrEqualHandler,41,42,0 +block_hint,TestLessThanOrEqualHandler,9,10,1 +block_hint,TestGreaterThanOrEqualHandler,61,62,0 +block_hint,TestGreaterThanOrEqualHandler,41,42,0 +block_hint,TestGreaterThanOrEqualHandler,63,64,0 +block_hint,TestGreaterThanOrEqualHandler,9,10,1 +block_hint,TestInstanceOfHandler,17,18,1 +block_hint,TestInstanceOfHandler,19,20,1 +block_hint,TestInstanceOfHandler,4,5,1 +block_hint,TestInstanceOfHandler,21,22,1 +block_hint,ToNumericHandler,12,13,0 +block_hint,ToNumericHandler,7,8,1 +block_hint,ToStringHandler,3,4,1 +block_hint,CreateRegExpLiteralHandler,7,8,1 +block_hint,CreateRegExpLiteralHandler,3,4,1 +block_hint,CreateArrayLiteralHandler,38,39,1 +block_hint,CreateArrayLiteralHandler,41,42,1 +block_hint,CreateArrayLiteralHandler,13,14,0 +block_hint,CreateArrayLiteralHandler,50,51,1 +block_hint,CreateArrayLiteralHandler,46,47,1 +block_hint,CreateArrayLiteralHandler,22,23,0 +block_hint,CreateArrayLiteralHandler,28,29,1 +block_hint,CreateArrayLiteralHandler,3,4,1 +block_hint,CreateArrayLiteralHandler,30,31,1 +block_hint,CreateArrayLiteralHandler,5,6,1 +block_hint,CreateEmptyArrayLiteralHandler,3,4,1 +block_hint,CreateEmptyArrayLiteralHandler,13,14,1 +block_hint,CreateEmptyArrayLiteralHandler,5,6,1 +block_hint,CreateEmptyArrayLiteralHandler,15,16,1 +block_hint,CreateObjectLiteralHandler,88,89,0 +block_hint,CreateObjectLiteralHandler,122,123,0 +block_hint,CreateObjectLiteralHandler,116,117,1 +block_hint,CreateObjectLiteralHandler,92,93,1 +block_hint,CreateObjectLiteralHandler,108,109,1 +block_hint,CreateObjectLiteralHandler,72,73,1 +block_hint,CreateObjectLiteralHandler,34,35,0 +block_hint,CreateObjectLiteralHandler,74,75,1 +block_hint,CreateObjectLiteralHandler,64,65,0 +block_hint,CreateEmptyObjectLiteralHandler,4,5,1 +block_hint,CreateEmptyObjectLiteralHandler,11,12,1 +block_hint,CreateEmptyObjectLiteralHandler,6,7,0 +block_hint,CreateClosureHandler,2,3,1 +block_hint,CreateFunctionContextHandler,11,12,1 +block_hint,CreateFunctionContextHandler,4,5,1 +block_hint,CreateFunctionContextHandler,6,7,0 +block_hint,CreateMappedArgumentsHandler,52,53,0 +block_hint,CreateMappedArgumentsHandler,42,43,1 +block_hint,CreateMappedArgumentsHandler,44,45,0 +block_hint,CreateMappedArgumentsHandler,104,105,1 +block_hint,CreateMappedArgumentsHandler,46,47,0 +block_hint,CreateMappedArgumentsHandler,12,13,0 +block_hint,CreateMappedArgumentsHandler,14,15,0 +block_hint,CreateMappedArgumentsHandler,78,79,1 +block_hint,CreateMappedArgumentsHandler,58,59,1 +block_hint,CreateMappedArgumentsHandler,16,17,0 +block_hint,CreateMappedArgumentsHandler,60,61,0 +block_hint,CreateMappedArgumentsHandler,24,25,1 +block_hint,CreateMappedArgumentsHandler,70,71,1 +block_hint,CreateUnmappedArgumentsHandler,16,17,1 +block_hint,CreateUnmappedArgumentsHandler,18,19,0 +block_hint,CreateUnmappedArgumentsHandler,20,21,0 +block_hint,CreateUnmappedArgumentsHandler,7,8,0 +block_hint,CreateUnmappedArgumentsHandler,9,10,0 +block_hint,CreateUnmappedArgumentsHandler,31,32,1 +block_hint,CreateUnmappedArgumentsHandler,25,26,1 +block_hint,CreateUnmappedArgumentsHandler,11,12,0 +block_hint,CreateUnmappedArgumentsHandler,27,28,0 +block_hint,CreateUnmappedArgumentsHandler,13,14,1 +block_hint,CreateUnmappedArgumentsHandler,23,24,1 +block_hint,CreateRestParameterHandler,13,14,0 +block_hint,CreateRestParameterHandler,27,28,0 +block_hint,JumpLoopHandler,34,35,1 +block_hint,JumpLoopHandler,23,24,0 +block_hint,JumpLoopHandler,9,10,1 +block_hint,JumpIfToBooleanTrueConstantHandler,20,21,0 +block_hint,JumpIfToBooleanTrueConstantHandler,6,7,0 +block_hint,JumpIfToBooleanFalseConstantHandler,14,15,0 +block_hint,JumpIfToBooleanFalseConstantHandler,20,21,0 +block_hint,JumpIfToBooleanFalseConstantHandler,6,7,0 +block_hint,JumpIfToBooleanTrueHandler,14,15,0 +block_hint,JumpIfToBooleanTrueHandler,6,7,0 +block_hint,JumpIfToBooleanTrueHandler,8,9,1 +block_hint,JumpIfToBooleanFalseHandler,14,15,0 +block_hint,JumpIfToBooleanFalseHandler,20,21,0 +block_hint,JumpIfToBooleanFalseHandler,6,7,0 +block_hint,JumpIfUndefinedOrNullHandler,3,4,0 +block_hint,JumpIfJSReceiverHandler,5,6,1 +block_hint,JumpIfJSReceiverHandler,3,4,1 +block_hint,SwitchOnSmiNoFeedbackHandler,3,4,0 +block_hint,ForInEnumerateHandler,34,35,1 +block_hint,ForInPrepareHandler,18,19,1 +block_hint,ForInNextHandler,2,3,1 +block_hint,ForInNextHandler,13,14,1 +block_hint,ReturnHandler,3,4,1 +block_hint,ThrowReferenceErrorIfHoleHandler,4,5,0 +block_hint,ThrowSuperNotCalledIfHoleHandler,2,3,0 +block_hint,ThrowSuperAlreadyCalledIfNotHoleHandler,2,3,1 +block_hint,ThrowIfNotSuperConstructorHandler,2,3,1 +block_hint,SuspendGeneratorHandler,14,15,1 +block_hint,SuspendGeneratorHandler,8,9,1 +block_hint,SuspendGeneratorHandler,12,13,1 +block_hint,ResumeGeneratorHandler,10,11,1 +block_hint,ResumeGeneratorHandler,4,5,1 +block_hint,ResumeGeneratorHandler,6,7,1 +block_hint,LdaImmutableContextSlotWideHandler,3,4,1 +block_hint,LdaImmutableContextSlotWideHandler,9,10,0 +block_hint,LdaImmutableContextSlotWideHandler,5,6,1 +block_hint,LdaImmutableCurrentContextSlotWideHandler,2,3,1 +block_hint,LdaGlobalWideHandler,265,266,0 +block_hint,LdaGlobalWideHandler,110,111,1 +block_hint,StaGlobalWideHandler,3,4,0 +block_hint,StaCurrentContextSlotWideHandler,2,3,1 +block_hint,GetNamedPropertyWideHandler,334,335,0 +block_hint,GetNamedPropertyWideHandler,140,141,1 +block_hint,GetKeyedPropertyWideHandler,3,4,0 +block_hint,SetNamedPropertyWideHandler,3,4,0 +block_hint,DefineNamedOwnPropertyWideHandler,3,4,0 +block_hint,SetKeyedPropertyWideHandler,3,4,0 +block_hint,DefineKeyedOwnPropertyWideHandler,3,4,0 +block_hint,StaInArrayLiteralWideHandler,3,4,0 +block_hint,AddWideHandler,82,83,0 +block_hint,AddWideHandler,49,50,0 +block_hint,AddWideHandler,35,36,0 +block_hint,AddWideHandler,78,79,0 +block_hint,AddWideHandler,64,65,1 +block_hint,AddWideHandler,45,46,1 +block_hint,AddWideHandler,27,28,1 +block_hint,AddWideHandler,43,44,1 +block_hint,AddWideHandler,16,17,1 +block_hint,SubWideHandler,75,76,0 +block_hint,SubWideHandler,53,54,0 +block_hint,SubWideHandler,33,34,0 +block_hint,SubWideHandler,13,14,1 +block_hint,MulWideHandler,90,91,0 +block_hint,MulWideHandler,83,84,1 +block_hint,MulWideHandler,69,70,1 +block_hint,MulWideHandler,67,68,1 +block_hint,MulWideHandler,36,37,1 +block_hint,MulWideHandler,17,18,1 +block_hint,BitwiseOrWideHandler,28,29,0 +block_hint,BitwiseOrWideHandler,20,21,1 +block_hint,AddSmiWideHandler,49,50,0 +block_hint,AddSmiWideHandler,35,36,0 +block_hint,MulSmiWideHandler,78,79,0 +block_hint,MulSmiWideHandler,65,66,0 +block_hint,MulSmiWideHandler,34,35,0 +block_hint,MulSmiWideHandler,54,55,1 +block_hint,MulSmiWideHandler,36,37,0 +block_hint,MulSmiWideHandler,42,43,1 +block_hint,MulSmiWideHandler,17,18,1 +block_hint,ModSmiWideHandler,67,68,1 +block_hint,ModSmiWideHandler,60,61,0 +block_hint,ModSmiWideHandler,56,57,1 +block_hint,ModSmiWideHandler,51,52,1 +block_hint,ModSmiWideHandler,28,29,0 +block_hint,ModSmiWideHandler,8,9,0 +block_hint,ModSmiWideHandler,15,16,1 +block_hint,BitwiseOrSmiWideHandler,23,24,0 +block_hint,BitwiseOrSmiWideHandler,6,7,0 +block_hint,BitwiseOrSmiWideHandler,11,12,1 +block_hint,BitwiseAndSmiWideHandler,6,7,0 +block_hint,BitwiseAndSmiWideHandler,18,19,1 +block_hint,ShiftLeftSmiWideHandler,24,25,0 +block_hint,ShiftLeftSmiWideHandler,6,7,0 +block_hint,ShiftLeftSmiWideHandler,40,41,0 +block_hint,ShiftLeftSmiWideHandler,30,31,0 +block_hint,ShiftLeftSmiWideHandler,11,12,1 +block_hint,ShiftRightSmiWideHandler,23,24,0 +block_hint,ShiftRightSmiWideHandler,6,7,0 +block_hint,ShiftRightSmiWideHandler,11,12,1 +block_hint,IncWideHandler,9,10,0 +block_hint,IncWideHandler,25,26,0 +block_hint,IncWideHandler,19,20,0 +block_hint,IncWideHandler,7,8,1 +block_hint,CallPropertyWideHandler,68,69,0 +block_hint,CallPropertyWideHandler,19,20,0 +block_hint,CallProperty0WideHandler,68,69,0 +block_hint,CallProperty0WideHandler,19,20,0 +block_hint,CallProperty1WideHandler,68,69,0 +block_hint,CallProperty1WideHandler,19,20,0 +block_hint,CallProperty2WideHandler,68,69,0 +block_hint,CallProperty2WideHandler,19,20,0 +block_hint,CallUndefinedReceiverWideHandler,68,69,0 +block_hint,CallUndefinedReceiverWideHandler,19,20,0 +block_hint,CallUndefinedReceiver0WideHandler,68,69,0 +block_hint,CallUndefinedReceiver0WideHandler,19,20,0 +block_hint,CallUndefinedReceiver1WideHandler,68,69,0 +block_hint,CallUndefinedReceiver1WideHandler,19,20,0 +block_hint,CallUndefinedReceiver2WideHandler,68,69,0 +block_hint,CallUndefinedReceiver2WideHandler,19,20,0 +block_hint,ConstructWideHandler,49,50,0 +block_hint,ConstructWideHandler,22,23,0 +block_hint,TestEqualWideHandler,103,104,0 +block_hint,TestEqualWideHandler,95,96,0 +block_hint,TestEqualWideHandler,47,48,0 +block_hint,TestEqualWideHandler,7,8,1 +block_hint,TestEqualStrictWideHandler,82,83,0 +block_hint,TestEqualStrictWideHandler,53,54,1 +block_hint,TestEqualStrictWideHandler,55,56,1 +block_hint,TestEqualStrictWideHandler,47,48,0 +block_hint,TestEqualStrictWideHandler,9,10,0 +block_hint,TestEqualStrictWideHandler,4,5,1 +block_hint,TestGreaterThanWideHandler,24,25,0 +block_hint,TestGreaterThanWideHandler,6,7,1 +block_hint,CreateRegExpLiteralWideHandler,14,15,0 +block_hint,CreateRegExpLiteralWideHandler,9,10,1 +block_hint,CreateArrayLiteralWideHandler,42,43,0 +block_hint,CreateArrayLiteralWideHandler,20,21,1 +block_hint,CreateEmptyArrayLiteralWideHandler,22,23,0 +block_hint,CreateEmptyArrayLiteralWideHandler,11,12,1 +block_hint,CreateEmptyArrayLiteralWideHandler,5,6,1 +block_hint,CreateEmptyArrayLiteralWideHandler,15,16,1 +block_hint,CreateObjectLiteralWideHandler,99,100,0 +block_hint,CreateObjectLiteralWideHandler,58,59,1 +block_hint,CreateClosureWideHandler,9,10,1 +block_hint,CreateClosureWideHandler,2,3,1 +block_hint,CreateFunctionContextWideHandler,8,9,0 +block_hint,JumpLoopWideHandler,34,35,1 +block_hint,JumpLoopWideHandler,9,10,1 +block_hint,JumpIfToBooleanTrueWideHandler,18,19,1 +block_hint,JumpIfToBooleanTrueWideHandler,14,15,0 +block_hint,JumpIfToBooleanFalseWideHandler,18,19,1 +block_hint,JumpIfToBooleanFalseWideHandler,14,15,0 +block_hint,JumpIfToBooleanFalseWideHandler,20,21,0 +block_hint,JumpIfToBooleanFalseWideHandler,6,7,0 +block_hint,SwitchOnSmiNoFeedbackWideHandler,5,6,0 +block_hint,SwitchOnSmiNoFeedbackWideHandler,3,4,0 +block_hint,ForInNextWideHandler,11,12,0 +block_hint,ForInNextWideHandler,2,3,1 +block_hint,ForInNextWideHandler,4,5,0 +block_hint,ForInNextWideHandler,9,10,1 +block_hint,LdaGlobalExtraWideHandler,265,266,0 +block_hint,LdaGlobalExtraWideHandler,110,111,1 +block_hint,AddSmiExtraWideHandler,69,70,1 +block_hint,AddSmiExtraWideHandler,43,44,0 +block_hint,AddSmiExtraWideHandler,52,53,1 +block_hint,AddSmiExtraWideHandler,16,17,1 +block_hint,DivSmiExtraWideHandler,73,74,0 +block_hint,DivSmiExtraWideHandler,69,70,0 +block_hint,DivSmiExtraWideHandler,78,79,0 +block_hint,DivSmiExtraWideHandler,64,65,0 +block_hint,DivSmiExtraWideHandler,35,36,0 +block_hint,DivSmiExtraWideHandler,43,44,1 +block_hint,DivSmiExtraWideHandler,23,24,1 +block_hint,BitwiseAndSmiExtraWideHandler,31,32,1 +block_hint,BitwiseAndSmiExtraWideHandler,35,36,0 +block_hint,BitwiseAndSmiExtraWideHandler,29,30,0 +block_hint,BitwiseAndSmiExtraWideHandler,18,19,1 +block_hint,CallUndefinedReceiver1ExtraWideHandler,68,69,0 +block_hint,CallUndefinedReceiver1ExtraWideHandler,19,20,0 +builtin_hash,RecordWriteSaveFP,626390513 +builtin_hash,RecordWriteIgnoreFP,626390513 +builtin_hash,EphemeronKeyBarrierSaveFP,-719755886 +builtin_hash,AdaptorWithBuiltinExitFrame,-506288945 +builtin_hash,Call_ReceiverIsNullOrUndefined_Baseline_Compact,-634465027 +builtin_hash,Call_ReceiverIsNotNullOrUndefined_Baseline_Compact,-585445450 +builtin_hash,Call_ReceiverIsAny_Baseline_Compact,-585445450 +builtin_hash,CallProxy,690784589 +builtin_hash,CallWithSpread,-1022926733 +builtin_hash,CallWithSpread_Baseline,-849069302 +builtin_hash,CallWithArrayLike,459945534 +builtin_hash,ConstructWithSpread,872318319 +builtin_hash,ConstructWithSpread_Baseline,-872100778 +builtin_hash,Construct_Baseline,-106849933 +builtin_hash,FastNewObject,-1018095454 +builtin_hash,FastNewClosure,-910014514 +builtin_hash,StringEqual,-711928119 +builtin_hash,StringGreaterThan,-416690019 +builtin_hash,StringGreaterThanOrEqual,-1054396153 +builtin_hash,StringLessThan,-1054396153 +builtin_hash,StringLessThanOrEqual,-416690019 +builtin_hash,StringSubstring,358251310 +builtin_hash,OrderedHashTableHealIndex,725955381 +builtin_hash,CompileLazy,504995397 +builtin_hash,CompileLazyDeoptimizedCode,748068919 +builtin_hash,InstantiateAsmJs,-697690741 +builtin_hash,AllocateInYoungGeneration,214124693 +builtin_hash,AllocateRegularInYoungGeneration,-141910266 +builtin_hash,AllocateRegularInOldGeneration,-141910266 +builtin_hash,CopyFastSmiOrObjectElements,-883587649 +builtin_hash,GrowFastDoubleElements,-1000340886 +builtin_hash,GrowFastSmiOrObjectElements,36929045 +builtin_hash,ToNumber,-151588116 +builtin_hash,ToNumber_Baseline,435851851 +builtin_hash,ToNumeric_Baseline,15745649 +builtin_hash,ToNumberConvertBigInt,251501190 +builtin_hash,Typeof,-685026400 +builtin_hash,KeyedLoadIC_PolymorphicName,59242921 +builtin_hash,KeyedStoreIC_Megamorphic,283484746 +builtin_hash,DefineKeyedOwnIC_Megamorphic,969546395 +builtin_hash,LoadGlobalIC_NoFeedback,-412177004 +builtin_hash,LoadIC_FunctionPrototype,-696483123 +builtin_hash,LoadIC_StringLength,695888016 +builtin_hash,LoadIC_StringWrapperLength,-366408146 +builtin_hash,LoadIC_NoFeedback,529908718 +builtin_hash,StoreIC_NoFeedback,519102631 +builtin_hash,DefineNamedOwnIC_NoFeedback,-983253079 +builtin_hash,KeyedLoadIC_SloppyArguments,-578047658 +builtin_hash,StoreFastElementIC_Standard,856806958 +builtin_hash,StoreFastElementIC_GrowNoTransitionHandleCOW,295024286 +builtin_hash,StoreFastElementIC_NoTransitionHandleCOW,825368452 +builtin_hash,ElementsTransitionAndStore_Standard,-891826531 +builtin_hash,ElementsTransitionAndStore_GrowNoTransitionHandleCOW,-562086358 +builtin_hash,ElementsTransitionAndStore_NoTransitionHandleCOW,154727773 +builtin_hash,KeyedHasIC_PolymorphicName,681066279 +builtin_hash,EnqueueMicrotask,997834086 +builtin_hash,RunMicrotasks,835697778 +builtin_hash,HasProperty,325671088 +builtin_hash,DeleteProperty,362124331 +builtin_hash,SetDataProperties,649615472 +builtin_hash,ReturnReceiver,-720171624 +builtin_hash,ArrayConstructor,-709634836 +builtin_hash,ArrayConstructorImpl,106723908 +builtin_hash,ArrayNoArgumentConstructor_PackedSmi_DontOverride,-22521131 +builtin_hash,ArrayNoArgumentConstructor_HoleySmi_DontOverride,-22521131 +builtin_hash,ArrayNoArgumentConstructor_PackedSmi_DisableAllocationSites,-1046128045 +builtin_hash,ArrayNoArgumentConstructor_Packed_DisableAllocationSites,-1046128045 +builtin_hash,ArrayNoArgumentConstructor_Holey_DisableAllocationSites,-1046128045 +builtin_hash,ArrayNoArgumentConstructor_PackedDouble_DisableAllocationSites,-89922726 +builtin_hash,ArraySingleArgumentConstructor_HoleySmi_DontOverride,-249912913 +builtin_hash,ArraySingleArgumentConstructor_HoleySmi_DisableAllocationSites,452895553 +builtin_hash,ArraySingleArgumentConstructor_Holey_DisableAllocationSites,452895553 +builtin_hash,ArraySingleArgumentConstructor_HoleyDouble_DisableAllocationSites,97007850 +builtin_hash,ArrayIncludesSmi,52014377 +builtin_hash,ArrayIncludesSmiOrObject,194648631 +builtin_hash,ArrayIncludes,-547277433 +builtin_hash,ArrayIndexOfSmi,-203639532 +builtin_hash,ArrayIndexOfSmiOrObject,547347825 +builtin_hash,ArrayIndexOf,917956553 +builtin_hash,ArrayPrototypePop,-480442047 +builtin_hash,ArrayPrototypePush,879036958 +builtin_hash,CloneFastJSArray,330965023 +builtin_hash,CloneFastJSArrayFillingHoles,356661348 +builtin_hash,ExtractFastJSArray,-420221067 +builtin_hash,ArrayPrototypeEntries,138422158 +builtin_hash,ArrayPrototypeKeys,-3226360 +builtin_hash,ArrayPrototypeValues,138422158 +builtin_hash,ArrayIteratorPrototypeNext,957293662 +builtin_hash,AsyncFunctionEnter,-1043254273 +builtin_hash,AsyncFunctionResolve,847009948 +builtin_hash,AsyncFunctionAwaitCaught,-616846371 +builtin_hash,AsyncFunctionAwaitUncaught,-616846371 +builtin_hash,AsyncFunctionAwaitResolveClosure,518113046 +builtin_hash,DatePrototypeGetDate,517103214 +builtin_hash,DatePrototypeGetDay,517103214 +builtin_hash,DatePrototypeGetFullYear,517103214 +builtin_hash,DatePrototypeGetHours,517103214 +builtin_hash,DatePrototypeGetMilliseconds,402424200 +builtin_hash,DatePrototypeGetMinutes,517103214 +builtin_hash,DatePrototypeGetMonth,517103214 +builtin_hash,DatePrototypeGetSeconds,517103214 +builtin_hash,DatePrototypeGetTime,-634509018 +builtin_hash,DatePrototypeGetTimezoneOffset,402424200 +builtin_hash,DatePrototypeValueOf,-634509018 +builtin_hash,DatePrototypeToPrimitive,601732193 +builtin_hash,CreateIterResultObject,277090833 +builtin_hash,CreateGeneratorObject,-109733150 +builtin_hash,GeneratorPrototypeNext,-337770274 +builtin_hash,GeneratorPrototypeReturn,-356725560 +builtin_hash,SuspendGeneratorBaseline,877095808 +builtin_hash,ResumeGeneratorBaseline,263125026 +builtin_hash,GlobalIsFinite,-487573831 +builtin_hash,GlobalIsNaN,-507424666 +builtin_hash,LoadIC,1072966044 +builtin_hash,LoadIC_Megamorphic,132390484 +builtin_hash,LoadIC_Noninlined,-1003101269 +builtin_hash,LoadICTrampoline,709645727 +builtin_hash,LoadICBaseline,150897937 +builtin_hash,LoadICTrampoline_Megamorphic,709645727 +builtin_hash,LoadSuperIC,-323552047 +builtin_hash,LoadSuperICBaseline,489833039 +builtin_hash,KeyedLoadIC,166545115 +builtin_hash,KeyedLoadIC_Megamorphic,483505882 +builtin_hash,KeyedLoadICTrampoline,709645727 +builtin_hash,KeyedLoadICBaseline,150897937 +builtin_hash,KeyedLoadICTrampoline_Megamorphic,709645727 +builtin_hash,StoreGlobalIC,-192111648 +builtin_hash,StoreGlobalICTrampoline,709645727 +builtin_hash,StoreGlobalICBaseline,150897937 +builtin_hash,StoreIC,1060265796 +builtin_hash,StoreICTrampoline,1046844053 +builtin_hash,StoreICBaseline,489833039 +builtin_hash,DefineNamedOwnIC,-883359916 +builtin_hash,DefineNamedOwnICBaseline,489833039 +builtin_hash,KeyedStoreIC,-187772846 +builtin_hash,KeyedStoreICTrampoline,1046844053 +builtin_hash,KeyedStoreICBaseline,489833039 +builtin_hash,DefineKeyedOwnIC,229093376 +builtin_hash,StoreInArrayLiteralIC,84243524 +builtin_hash,StoreInArrayLiteralICBaseline,489833039 +builtin_hash,LoadGlobalIC,443237854 +builtin_hash,LoadGlobalICInsideTypeof,34880284 +builtin_hash,LoadGlobalICTrampoline,-306966323 +builtin_hash,LoadGlobalICBaseline,162605878 +builtin_hash,LoadGlobalICInsideTypeofTrampoline,-306966323 +builtin_hash,LoadGlobalICInsideTypeofBaseline,162605878 +builtin_hash,LookupGlobalICBaseline,658263603 +builtin_hash,LookupGlobalICInsideTypeofBaseline,658263603 +builtin_hash,KeyedHasIC,-184159247 +builtin_hash,KeyedHasICBaseline,150897937 +builtin_hash,KeyedHasIC_Megamorphic,325671088 +builtin_hash,IterableToList,-927449562 +builtin_hash,IterableToListWithSymbolLookup,-708423733 +builtin_hash,IterableToListMayPreserveHoles,-1051869903 +builtin_hash,FindOrderedHashMapEntry,-320047027 +builtin_hash,MapConstructor,-273849543 +builtin_hash,MapPrototypeSet,-438010061 +builtin_hash,MapPrototypeDelete,-650460231 +builtin_hash,MapPrototypeGet,-169555850 +builtin_hash,MapPrototypeHas,801403049 +builtin_hash,MapPrototypeEntries,-1013383243 +builtin_hash,MapPrototypeGetSize,-643608746 +builtin_hash,MapPrototypeForEach,397971331 +builtin_hash,MapPrototypeKeys,-1013383243 +builtin_hash,MapPrototypeValues,-1013383243 +builtin_hash,MapIteratorPrototypeNext,-58230590 +builtin_hash,MapIteratorToList,-500405827 +builtin_hash,SameValueNumbersOnly,225193452 +builtin_hash,Add_Baseline,986710997 +builtin_hash,AddSmi_Baseline,422902597 +builtin_hash,Subtract_Baseline,515924102 +builtin_hash,SubtractSmi_Baseline,-270105773 +builtin_hash,Multiply_Baseline,1010707471 +builtin_hash,MultiplySmi_Baseline,-436355449 +builtin_hash,Divide_Baseline,-545594124 +builtin_hash,DivideSmi_Baseline,-730711433 +builtin_hash,Modulus_Baseline,368591854 +builtin_hash,ModulusSmi_Baseline,-170655443 +builtin_hash,Exponentiate_Baseline,957426187 +builtin_hash,BitwiseAnd_Baseline,354476460 +builtin_hash,BitwiseAndSmi_Baseline,828880803 +builtin_hash,BitwiseOr_Baseline,-583084875 +builtin_hash,BitwiseOrSmi_Baseline,-795226409 +builtin_hash,BitwiseXor_Baseline,1012539154 +builtin_hash,BitwiseXorSmi_Baseline,-392216447 +builtin_hash,ShiftLeft_Baseline,361604397 +builtin_hash,ShiftLeftSmi_Baseline,5007264 +builtin_hash,ShiftRight_Baseline,-420303545 +builtin_hash,ShiftRightSmi_Baseline,-112240434 +builtin_hash,ShiftRightLogical_Baseline,540201589 +builtin_hash,ShiftRightLogicalSmi_Baseline,788439127 +builtin_hash,Add_WithFeedback,-229648387 +builtin_hash,Subtract_WithFeedback,376218593 +builtin_hash,Modulus_WithFeedback,-94506121 +builtin_hash,BitwiseOr_WithFeedback,468043323 +builtin_hash,Equal_Baseline,124000003 +builtin_hash,StrictEqual_Baseline,-1036096086 +builtin_hash,LessThan_Baseline,215883171 +builtin_hash,GreaterThan_Baseline,-830551473 +builtin_hash,LessThanOrEqual_Baseline,580468296 +builtin_hash,GreaterThanOrEqual_Baseline,146507237 +builtin_hash,Equal_WithFeedback,-1006818792 +builtin_hash,StrictEqual_WithFeedback,-600679172 +builtin_hash,LessThan_WithFeedback,-280032142 +builtin_hash,GreaterThan_WithFeedback,-753289225 +builtin_hash,GreaterThanOrEqual_WithFeedback,-99952431 +builtin_hash,BitwiseNot_Baseline,-805521700 +builtin_hash,Decrement_Baseline,-802142449 +builtin_hash,Increment_Baseline,-165792101 +builtin_hash,Negate_Baseline,-490529739 +builtin_hash,ObjectAssign,-223190746 +builtin_hash,ObjectCreate,-412976913 +builtin_hash,ObjectEntries,-161513173 +builtin_hash,ObjectGetOwnPropertyDescriptor,-318975900 +builtin_hash,ObjectGetOwnPropertyNames,-771378931 +builtin_hash,ObjectIs,754529791 +builtin_hash,ObjectKeys,868266099 +builtin_hash,ObjectPrototypeHasOwnProperty,368139529 +builtin_hash,ObjectToString,-682499204 +builtin_hash,InstanceOf_WithFeedback,-1073126021 +builtin_hash,InstanceOf_Baseline,-322167849 +builtin_hash,ForInEnumerate,-155288347 +builtin_hash,ForInPrepare,-765799522 +builtin_hash,ForInFilter,562291169 +builtin_hash,RegExpConstructor,646833247 +builtin_hash,RegExpExecAtom,345347095 +builtin_hash,RegExpExecInternal,1011835160 +builtin_hash,FindOrderedHashSetEntry,-610957306 +builtin_hash,SetConstructor,-240775710 +builtin_hash,SetPrototypeHas,801403049 +builtin_hash,SetPrototypeAdd,173213477 +builtin_hash,SetPrototypeDelete,-593129092 +builtin_hash,SetPrototypeEntries,-1013383243 +builtin_hash,SetPrototypeGetSize,-643608746 +builtin_hash,SetPrototypeForEach,335951491 +builtin_hash,SetPrototypeValues,-1013383243 +builtin_hash,SetIteratorPrototypeNext,-665998715 +builtin_hash,SetOrSetIteratorToList,-885447300 +builtin_hash,StringFromCharCode,-912540455 +builtin_hash,StringPrototypeReplace,66129801 +builtin_hash,StringPrototypeSplit,-362413227 +builtin_hash,TypedArrayConstructor,784903226 +builtin_hash,TypedArrayPrototypeByteLength,-727961620 +builtin_hash,TypedArrayPrototypeLength,733765018 +builtin_hash,WeakMapConstructor,1062092506 +builtin_hash,WeakMapLookupHashIndex,-142601393 +builtin_hash,WeakMapGet,294891847 +builtin_hash,WeakMapPrototypeHas,849790636 +builtin_hash,WeakMapPrototypeSet,345042088 +builtin_hash,WeakSetConstructor,105581429 +builtin_hash,WeakSetPrototypeHas,849790636 +builtin_hash,WeakSetPrototypeAdd,617138317 +builtin_hash,WeakCollectionSet,166987452 +builtin_hash,AsyncGeneratorResolve,249154931 +builtin_hash,AsyncGeneratorYieldWithAwait,295024719 +builtin_hash,AsyncGeneratorResumeNext,-291585996 +builtin_hash,AsyncGeneratorPrototypeNext,-917251663 +builtin_hash,AsyncGeneratorAwaitUncaught,-475035157 +builtin_hash,AsyncGeneratorAwaitResolveClosure,252907365 +builtin_hash,AsyncGeneratorYieldWithAwaitResolveClosure,523444337 +builtin_hash,StringAdd_CheckNone,122784498 +builtin_hash,SubString,42338335 +builtin_hash,GetProperty,244104330 +builtin_hash,GetPropertyWithReceiver,-361896834 +builtin_hash,SetProperty,-14606917 +builtin_hash,CreateDataProperty,56144293 +builtin_hash,ArrayPrototypeConcat,-629648509 +builtin_hash,ArrayEvery,420214237 +builtin_hash,ArrayFilterLoopLazyDeoptContinuation,784278374 +builtin_hash,ArrayFilterLoopContinuation,-537020475 +builtin_hash,ArrayFilter,1060321948 +builtin_hash,ArrayPrototypeFind,-585495057 +builtin_hash,ArrayForEachLoopLazyDeoptContinuation,-696710490 +builtin_hash,ArrayForEachLoopContinuation,168795773 +builtin_hash,ArrayForEach,707321002 +builtin_hash,ArrayFrom,-659188931 +builtin_hash,ArrayIsArray,232602198 +builtin_hash,LoadJoinElement_FastSmiOrObjectElements_0,680814132 +builtin_hash,LoadJoinElement_FastDoubleElements_0,-901304444 +builtin_hash,JoinStackPush,-686648212 +builtin_hash,JoinStackPop,190013026 +builtin_hash,ArrayPrototypeJoin,-35537300 +builtin_hash,ArrayPrototypeToString,-918592343 +builtin_hash,ArrayPrototypeLastIndexOf,17251424 +builtin_hash,ArrayMapLoopLazyDeoptContinuation,571271309 +builtin_hash,ArrayMapLoopContinuation,323522123 +builtin_hash,ArrayMap,301006305 +builtin_hash,ArrayReduceLoopLazyDeoptContinuation,802810816 +builtin_hash,ArrayReduceLoopContinuation,90909940 +builtin_hash,ArrayReduce,-82429841 +builtin_hash,ArrayPrototypeReverse,419566432 +builtin_hash,ArrayPrototypeShift,70747297 +builtin_hash,ArrayPrototypeSlice,-52082583 +builtin_hash,ArraySome,793025165 +builtin_hash,ArrayPrototypeSplice,-778396927 +builtin_hash,ArrayPrototypeUnshift,874971167 +builtin_hash,ArrayBufferPrototypeGetByteLength,856452822 +builtin_hash,ArrayBufferIsView,1032022175 +builtin_hash,ToInteger,571151444 +builtin_hash,FastCreateDataProperty,870096873 +builtin_hash,BooleanConstructor,404195496 +builtin_hash,BooleanPrototypeToString,238471687 +builtin_hash,ToString,-884108556 +builtin_hash,StringPrototypeToString,-772653609 +builtin_hash,StringPrototypeValueOf,-772653609 +builtin_hash,StringPrototypeCharAt,888809448 +builtin_hash,StringPrototypeCharCodeAt,-227472234 +builtin_hash,StringPrototypeCodePointAt,61946543 +builtin_hash,StringPrototypeConcat,861254127 +builtin_hash,StringConstructor,-918118629 +builtin_hash,StringAddConvertLeft,-219299649 +builtin_hash,StringAddConvertRight,616228664 +builtin_hash,StringCharAt,-4884031 +builtin_hash,FastNewClosureBaseline,-52998587 +builtin_hash,FastNewFunctionContextFunction,-1009868545 +builtin_hash,CreateRegExpLiteral,1052197216 +builtin_hash,CreateShallowArrayLiteral,-964230745 +builtin_hash,CreateEmptyArrayLiteral,223771159 +builtin_hash,CreateShallowObjectLiteral,608722722 +builtin_hash,ObjectConstructor,964183658 +builtin_hash,CreateEmptyLiteralObject,-604735671 +builtin_hash,NumberConstructor,71247802 +builtin_hash,StringToNumber,338191864 +builtin_hash,NonNumberToNumber,-935639762 +builtin_hash,NonNumberToNumeric,772033950 +builtin_hash,ToNumeric,1038000251 +builtin_hash,NumberToString,349605053 +builtin_hash,ToBoolean,-651891533 +builtin_hash,ToBooleanForBaselineJump,-345306524 +builtin_hash,ToLength,334926030 +builtin_hash,ToName,-958704264 +builtin_hash,ToObject,466844704 +builtin_hash,NonPrimitiveToPrimitive_Default,-135149567 +builtin_hash,NonPrimitiveToPrimitive_Number,-135149567 +builtin_hash,NonPrimitiveToPrimitive_String,-135149567 +builtin_hash,OrdinaryToPrimitive_Number,977211335 +builtin_hash,OrdinaryToPrimitive_String,977211335 +builtin_hash,DataViewPrototypeGetByteLength,-813425866 +builtin_hash,DataViewPrototypeGetFloat64,-154734323 +builtin_hash,DataViewPrototypeSetUint32,816035861 +builtin_hash,DataViewPrototypeSetFloat64,779399418 +builtin_hash,FunctionPrototypeHasInstance,-95071236 +builtin_hash,FastFunctionPrototypeBind,-731291747 +builtin_hash,ForInNext,-935081187 +builtin_hash,GetIteratorWithFeedback,952367600 +builtin_hash,GetIteratorBaseline,-428929310 +builtin_hash,CallIteratorWithFeedback,-997498631 +builtin_hash,MathAbs,791053705 +builtin_hash,MathCeil,479583478 +builtin_hash,MathFloor,843767669 +builtin_hash,MathRound,679517696 +builtin_hash,MathPow,315117335 +builtin_hash,MathMax,170958432 +builtin_hash,MathMin,-697028591 +builtin_hash,MathAsin,546404306 +builtin_hash,MathAtan2,-509511765 +builtin_hash,MathCos,907147156 +builtin_hash,MathExp,-622287064 +builtin_hash,MathFround,951312971 +builtin_hash,MathImul,750440578 +builtin_hash,MathLog,-240154692 +builtin_hash,MathSin,-666255595 +builtin_hash,MathSign,358296598 +builtin_hash,MathSqrt,376524398 +builtin_hash,MathTan,509900921 +builtin_hash,MathTanh,-99681025 +builtin_hash,MathRandom,761144822 +builtin_hash,NumberPrototypeToString,-379708910 +builtin_hash,NumberIsInteger,-216955149 +builtin_hash,NumberIsNaN,-901582353 +builtin_hash,NumberParseFloat,-608188050 +builtin_hash,ParseInt,-160264841 +builtin_hash,NumberParseInt,-548370817 +builtin_hash,Add,564570422 +builtin_hash,Subtract,638154168 +builtin_hash,Multiply,492896985 +builtin_hash,Divide,-323417562 +builtin_hash,Modulus,-1073163320 +builtin_hash,CreateObjectWithoutProperties,-847663210 +builtin_hash,ObjectIsExtensible,152531438 +builtin_hash,ObjectPreventExtensions,-713378297 +builtin_hash,ObjectGetPrototypeOf,-270542452 +builtin_hash,ObjectSetPrototypeOf,-46279001 +builtin_hash,ObjectPrototypeToString,799867293 +builtin_hash,ObjectPrototypeValueOf,681014822 +builtin_hash,FulfillPromise,-157790719 +builtin_hash,NewPromiseCapability,-849832514 +builtin_hash,PromiseCapabilityDefaultResolve,198574955 +builtin_hash,PerformPromiseThen,-1003545837 +builtin_hash,PromiseAll,172257811 +builtin_hash,PromiseAllResolveElementClosure,-429070847 +builtin_hash,PromiseConstructor,-813002504 +builtin_hash,PromisePrototypeCatch,881185065 +builtin_hash,PromiseFulfillReactionJob,1069906663 +builtin_hash,PromiseResolveTrampoline,584305889 +builtin_hash,PromiseResolve,774376525 +builtin_hash,ResolvePromise,-348045499 +builtin_hash,PromisePrototypeThen,972876029 +builtin_hash,PromiseResolveThenableJob,-24601962 +builtin_hash,ProxyConstructor,462534935 +builtin_hash,ProxyGetProperty,-1026339756 +builtin_hash,ProxyIsExtensible,-863573816 +builtin_hash,ProxyPreventExtensions,139876653 +builtin_hash,ReflectGet,-917044458 +builtin_hash,ReflectHas,584305889 +builtin_hash,RegExpPrototypeExec,-1050512290 +builtin_hash,RegExpMatchFast,794780294 +builtin_hash,RegExpReplace,-599615598 +builtin_hash,RegExpPrototypeReplace,985947001 +builtin_hash,RegExpSearchFast,-655744379 +builtin_hash,RegExpPrototypeSourceGetter,-842127011 +builtin_hash,RegExpSplit,882917446 +builtin_hash,RegExpPrototypeTest,36603632 +builtin_hash,RegExpPrototypeTestFast,170577886 +builtin_hash,RegExpPrototypeGlobalGetter,-787799554 +builtin_hash,RegExpPrototypeIgnoreCaseGetter,198844413 +builtin_hash,RegExpPrototypeMultilineGetter,-910406363 +builtin_hash,RegExpPrototypeHasIndicesGetter,738086388 +builtin_hash,RegExpPrototypeDotAllGetter,334669429 +builtin_hash,RegExpPrototypeStickyGetter,800064458 +builtin_hash,RegExpPrototypeUnicodeGetter,489690780 +builtin_hash,RegExpPrototypeFlagsGetter,-224095936 +builtin_hash,StringPrototypeEndsWith,-987712417 +builtin_hash,StringPrototypeIncludes,-217252393 +builtin_hash,StringPrototypeIndexOf,633191246 +builtin_hash,StringPrototypeIterator,836933598 +builtin_hash,StringIteratorPrototypeNext,-913259598 +builtin_hash,StringPrototypeMatch,634725082 +builtin_hash,StringPrototypeSearch,634725082 +builtin_hash,StringRepeat,146146790 +builtin_hash,StringPrototypeSlice,-354759606 +builtin_hash,StringPrototypeStartsWith,-27064544 +builtin_hash,StringPrototypeSubstr,-580369335 +builtin_hash,StringPrototypeSubstring,432057087 +builtin_hash,StringPrototypeTrim,413903909 +builtin_hash,SymbolPrototypeToString,-299401864 +builtin_hash,CreateTypedArray,680205708 +builtin_hash,TypedArrayFrom,-1010794576 +builtin_hash,TypedArrayPrototypeSet,908791947 +builtin_hash,TypedArrayPrototypeSubArray,-259511191 +builtin_hash,NewSloppyArgumentsElements,987039656 +builtin_hash,NewStrictArgumentsElements,811685066 +builtin_hash,NewRestArgumentsElements,506972005 +builtin_hash,FastNewSloppyArguments,266310136 +builtin_hash,FastNewStrictArguments,292190921 +builtin_hash,FastNewRestArguments,757900916 +builtin_hash,StringSlowFlatten,-1052562445 +builtin_hash,StringIndexOf,257386893 +builtin_hash,Load_FastSmiElements_0,1073407008 +builtin_hash,Load_FastObjectElements_0,1073407008 +builtin_hash,Store_FastSmiElements_0,30666818 +builtin_hash,Store_FastObjectElements_0,951821143 +builtin_hash,SortCompareDefault,367817675 +builtin_hash,SortCompareUserFn,713062731 +builtin_hash,Copy,314823270 +builtin_hash,MergeAt,238953995 +builtin_hash,GallopLeft,-902489773 +builtin_hash,GallopRight,-89876014 +builtin_hash,ArrayTimSort,241000486 +builtin_hash,ArrayPrototypeSort,304686317 +builtin_hash,StringFastLocaleCompare,805312292 +builtin_hash,WasmInt32ToHeapNumber,-952774935 +builtin_hash,WasmTaggedNonSmiToInt32,-293378356 +builtin_hash,WasmTriggerTierUp,429384824 +builtin_hash,WasmStackGuard,1062523926 +builtin_hash,CanUseSameAccessor_FastSmiElements_0,185432218 +builtin_hash,CanUseSameAccessor_FastObjectElements_0,185432218 +builtin_hash,StringPrototypeToLowerCaseIntl,-318663010 +builtin_hash,StringToLowerCaseIntl,545745188 +builtin_hash,WideHandler,-1007335397 +builtin_hash,ExtraWideHandler,-1007335397 +builtin_hash,LdarHandler,-107846222 +builtin_hash,LdaZeroHandler,-1049743757 +builtin_hash,LdaSmiHandler,578097911 +builtin_hash,LdaUndefinedHandler,929864141 +builtin_hash,LdaNullHandler,929864141 +builtin_hash,LdaTheHoleHandler,929864141 +builtin_hash,LdaTrueHandler,-722702248 +builtin_hash,LdaFalseHandler,-722702248 +builtin_hash,LdaConstantHandler,16269541 +builtin_hash,LdaContextSlotHandler,-824239973 +builtin_hash,LdaImmutableContextSlotHandler,-824239973 +builtin_hash,LdaCurrentContextSlotHandler,797490234 +builtin_hash,LdaImmutableCurrentContextSlotHandler,797490234 +builtin_hash,StarHandler,833251632 +builtin_hash,MovHandler,154956537 +builtin_hash,PushContextHandler,-12168674 +builtin_hash,PopContextHandler,-245935867 +builtin_hash,TestReferenceEqualHandler,149010419 +builtin_hash,TestUndetectableHandler,-14656053 +builtin_hash,TestNullHandler,231206083 +builtin_hash,TestUndefinedHandler,231206083 +builtin_hash,TestTypeOfHandler,811742421 +builtin_hash,LdaGlobalHandler,-713975577 +builtin_hash,LdaGlobalInsideTypeofHandler,-1036637473 +builtin_hash,StaGlobalHandler,860070325 +builtin_hash,StaContextSlotHandler,506961255 +builtin_hash,StaCurrentContextSlotHandler,-476609776 +builtin_hash,LdaLookupGlobalSlotHandler,-579647044 +builtin_hash,LdaLookupGlobalSlotInsideTypeofHandler,-981566026 +builtin_hash,StaLookupSlotHandler,-26776709 +builtin_hash,GetNamedPropertyHandler,12677554 +builtin_hash,GetNamedPropertyFromSuperHandler,266004833 +builtin_hash,GetKeyedPropertyHandler,886814234 +builtin_hash,SetNamedPropertyHandler,535113985 +builtin_hash,DefineNamedOwnPropertyHandler,535113985 +builtin_hash,SetKeyedPropertyHandler,-966762662 +builtin_hash,DefineKeyedOwnPropertyHandler,-966762662 +builtin_hash,StaInArrayLiteralHandler,-966762662 +builtin_hash,DefineKeyedOwnPropertyInLiteralHandler,587951803 +builtin_hash,AddHandler,-1005617859 +builtin_hash,SubHandler,-392643217 +builtin_hash,MulHandler,774043098 +builtin_hash,DivHandler,-851134889 +builtin_hash,ModHandler,-974972127 +builtin_hash,ExpHandler,315906749 +builtin_hash,BitwiseOrHandler,-743595958 +builtin_hash,BitwiseXorHandler,250559085 +builtin_hash,BitwiseAndHandler,-517136702 +builtin_hash,ShiftLeftHandler,96191474 +builtin_hash,ShiftRightHandler,527145177 +builtin_hash,ShiftRightLogicalHandler,-262573075 +builtin_hash,AddSmiHandler,80685265 +builtin_hash,SubSmiHandler,3480492 +builtin_hash,MulSmiHandler,125802928 +builtin_hash,DivSmiHandler,639417969 +builtin_hash,ModSmiHandler,725314466 +builtin_hash,BitwiseOrSmiHandler,-982449315 +builtin_hash,BitwiseXorSmiHandler,-243744730 +builtin_hash,BitwiseAndSmiHandler,-177147003 +builtin_hash,ShiftLeftSmiHandler,-1064587438 +builtin_hash,ShiftRightSmiHandler,1063683921 +builtin_hash,ShiftRightLogicalSmiHandler,-471648394 +builtin_hash,IncHandler,-827986722 +builtin_hash,DecHandler,-353459880 +builtin_hash,NegateHandler,-400765437 +builtin_hash,BitwiseNotHandler,70967995 +builtin_hash,ToBooleanLogicalNotHandler,907700753 +builtin_hash,LogicalNotHandler,-207609416 +builtin_hash,TypeOfHandler,953802635 +builtin_hash,DeletePropertyStrictHandler,-316449707 +builtin_hash,DeletePropertySloppyHandler,808036376 +builtin_hash,GetSuperConstructorHandler,251341877 +builtin_hash,CallAnyReceiverHandler,439143156 +builtin_hash,CallPropertyHandler,439143156 +builtin_hash,CallProperty0Handler,-534603169 +builtin_hash,CallProperty1Handler,498249779 +builtin_hash,CallProperty2Handler,-819411032 +builtin_hash,CallUndefinedReceiverHandler,594463859 +builtin_hash,CallUndefinedReceiver0Handler,-846775891 +builtin_hash,CallUndefinedReceiver1Handler,-295634644 +builtin_hash,CallUndefinedReceiver2Handler,-184450155 +builtin_hash,CallWithSpreadHandler,439143156 +builtin_hash,CallRuntimeHandler,577184417 +builtin_hash,CallJSRuntimeHandler,611687015 +builtin_hash,InvokeIntrinsicHandler,-984127378 +builtin_hash,ConstructHandler,-721343708 +builtin_hash,ConstructWithSpreadHandler,-306460848 +builtin_hash,TestEqualHandler,808271421 +builtin_hash,TestEqualStrictHandler,329888735 +builtin_hash,TestLessThanHandler,470012740 +builtin_hash,TestGreaterThanHandler,-1062728522 +builtin_hash,TestLessThanOrEqualHandler,567090722 +builtin_hash,TestGreaterThanOrEqualHandler,386329922 +builtin_hash,TestInstanceOfHandler,490446195 +builtin_hash,TestInHandler,839610057 +builtin_hash,ToNameHandler,-725569033 +builtin_hash,ToNumberHandler,1020792744 +builtin_hash,ToNumericHandler,157199207 +builtin_hash,ToObjectHandler,-725569033 +builtin_hash,ToStringHandler,-98710919 +builtin_hash,CreateRegExpLiteralHandler,-121212255 +builtin_hash,CreateArrayLiteralHandler,266431889 +builtin_hash,CreateArrayFromIterableHandler,52334283 +builtin_hash,CreateEmptyArrayLiteralHandler,115537807 +builtin_hash,CreateObjectLiteralHandler,618199371 +builtin_hash,CreateEmptyObjectLiteralHandler,740591432 +builtin_hash,CreateClosureHandler,2501418 +builtin_hash,CreateBlockContextHandler,666399130 +builtin_hash,CreateCatchContextHandler,22630037 +builtin_hash,CreateFunctionContextHandler,-865490982 +builtin_hash,CreateMappedArgumentsHandler,697758196 +builtin_hash,CreateUnmappedArgumentsHandler,-1055788256 +builtin_hash,CreateRestParameterHandler,-320411861 +builtin_hash,JumpLoopHandler,-662751978 +builtin_hash,JumpHandler,182907248 +builtin_hash,JumpConstantHandler,736061566 +builtin_hash,JumpIfUndefinedConstantHandler,-462384230 +builtin_hash,JumpIfNotUndefinedConstantHandler,148504964 +builtin_hash,JumpIfUndefinedOrNullConstantHandler,444469070 +builtin_hash,JumpIfTrueConstantHandler,-462384230 +builtin_hash,JumpIfFalseConstantHandler,-462384230 +builtin_hash,JumpIfToBooleanTrueConstantHandler,-247852105 +builtin_hash,JumpIfToBooleanFalseConstantHandler,-525508881 +builtin_hash,JumpIfToBooleanTrueHandler,725690453 +builtin_hash,JumpIfToBooleanFalseHandler,-413432567 +builtin_hash,JumpIfTrueHandler,2511228 +builtin_hash,JumpIfFalseHandler,2511228 +builtin_hash,JumpIfNullHandler,2511228 +builtin_hash,JumpIfNotNullHandler,-489824053 +builtin_hash,JumpIfUndefinedHandler,2511228 +builtin_hash,JumpIfNotUndefinedHandler,-489824053 +builtin_hash,JumpIfUndefinedOrNullHandler,-718321269 +builtin_hash,JumpIfJSReceiverHandler,-171543406 +builtin_hash,SwitchOnSmiNoFeedbackHandler,-188066699 +builtin_hash,ForInEnumerateHandler,-998219819 +builtin_hash,ForInPrepareHandler,480083331 +builtin_hash,ForInContinueHandler,829644977 +builtin_hash,ForInNextHandler,-132478305 +builtin_hash,ForInStepHandler,115172731 +builtin_hash,SetPendingMessageHandler,962212923 +builtin_hash,ThrowHandler,781858395 +builtin_hash,ReThrowHandler,781858395 +builtin_hash,ReturnHandler,557919322 +builtin_hash,ThrowReferenceErrorIfHoleHandler,-516587462 +builtin_hash,ThrowSuperNotCalledIfHoleHandler,-773024801 +builtin_hash,ThrowSuperAlreadyCalledIfNotHoleHandler,357828706 +builtin_hash,ThrowIfNotSuperConstructorHandler,-125687893 +builtin_hash,SwitchOnGeneratorStateHandler,-423602269 +builtin_hash,SuspendGeneratorHandler,-380438202 +builtin_hash,ResumeGeneratorHandler,-518827895 +builtin_hash,GetIteratorHandler,-378048438 +builtin_hash,ShortStarHandler,368091777 +builtin_hash,LdarWideHandler,-405004763 +builtin_hash,LdaSmiWideHandler,976476814 +builtin_hash,LdaConstantWideHandler,-171575655 +builtin_hash,LdaContextSlotWideHandler,679792213 +builtin_hash,LdaImmutableContextSlotWideHandler,679792213 +builtin_hash,LdaImmutableCurrentContextSlotWideHandler,796095575 +builtin_hash,StarWideHandler,728945870 +builtin_hash,MovWideHandler,-483424163 +builtin_hash,PushContextWideHandler,325978832 +builtin_hash,PopContextWideHandler,1044899411 +builtin_hash,TestReferenceEqualWideHandler,-444014273 +builtin_hash,LdaGlobalWideHandler,976796507 +builtin_hash,LdaGlobalInsideTypeofWideHandler,-19868457 +builtin_hash,StaGlobalWideHandler,-216106571 +builtin_hash,StaContextSlotWideHandler,-553577385 +builtin_hash,StaCurrentContextSlotWideHandler,-429895179 +builtin_hash,LdaLookupGlobalSlotWideHandler,-291087637 +builtin_hash,GetNamedPropertyWideHandler,-241439423 +builtin_hash,GetKeyedPropertyWideHandler,364433897 +builtin_hash,SetNamedPropertyWideHandler,-31139520 +builtin_hash,DefineNamedOwnPropertyWideHandler,-31139520 +builtin_hash,SetKeyedPropertyWideHandler,-363367368 +builtin_hash,DefineKeyedOwnPropertyWideHandler,-363367368 +builtin_hash,StaInArrayLiteralWideHandler,-363367368 +builtin_hash,AddWideHandler,-667096135 +builtin_hash,SubWideHandler,-383279960 +builtin_hash,MulWideHandler,1029076402 +builtin_hash,DivWideHandler,-547887104 +builtin_hash,BitwiseOrWideHandler,9634898 +builtin_hash,BitwiseAndWideHandler,498024986 +builtin_hash,ShiftLeftWideHandler,-839529705 +builtin_hash,AddSmiWideHandler,-930183091 +builtin_hash,SubSmiWideHandler,-150212329 +builtin_hash,MulSmiWideHandler,591583886 +builtin_hash,DivSmiWideHandler,343395191 +builtin_hash,ModSmiWideHandler,-801743696 +builtin_hash,BitwiseOrSmiWideHandler,-944128869 +builtin_hash,BitwiseXorSmiWideHandler,-944761076 +builtin_hash,BitwiseAndSmiWideHandler,512282363 +builtin_hash,ShiftLeftSmiWideHandler,895748637 +builtin_hash,ShiftRightSmiWideHandler,-526800833 +builtin_hash,ShiftRightLogicalSmiWideHandler,306897250 +builtin_hash,IncWideHandler,243851885 +builtin_hash,DecWideHandler,573422516 +builtin_hash,NegateWideHandler,-256119499 +builtin_hash,CallPropertyWideHandler,899629797 +builtin_hash,CallProperty0WideHandler,649245559 +builtin_hash,CallProperty1WideHandler,-508092594 +builtin_hash,CallProperty2WideHandler,375883648 +builtin_hash,CallUndefinedReceiverWideHandler,-544740528 +builtin_hash,CallUndefinedReceiver0WideHandler,-908716826 +builtin_hash,CallUndefinedReceiver1WideHandler,-63091942 +builtin_hash,CallUndefinedReceiver2WideHandler,332968542 +builtin_hash,CallWithSpreadWideHandler,899629797 +builtin_hash,ConstructWideHandler,-133823750 +builtin_hash,TestEqualWideHandler,-560396321 +builtin_hash,TestEqualStrictWideHandler,-723086528 +builtin_hash,TestLessThanWideHandler,-954086139 +builtin_hash,TestGreaterThanWideHandler,8081046 +builtin_hash,TestLessThanOrEqualWideHandler,-869672701 +builtin_hash,TestGreaterThanOrEqualWideHandler,915149033 +builtin_hash,TestInstanceOfWideHandler,-907585471 +builtin_hash,TestInWideHandler,952743861 +builtin_hash,ToNumericWideHandler,848961913 +builtin_hash,CreateRegExpLiteralWideHandler,-434410141 +builtin_hash,CreateArrayLiteralWideHandler,-5646835 +builtin_hash,CreateEmptyArrayLiteralWideHandler,-585282797 +builtin_hash,CreateObjectLiteralWideHandler,484880724 +builtin_hash,CreateClosureWideHandler,158285856 +builtin_hash,CreateBlockContextWideHandler,63996092 +builtin_hash,CreateFunctionContextWideHandler,-965079469 +builtin_hash,JumpLoopWideHandler,999208902 +builtin_hash,JumpWideHandler,182907248 +builtin_hash,JumpIfToBooleanTrueWideHandler,592062439 +builtin_hash,JumpIfToBooleanFalseWideHandler,-182416778 +builtin_hash,JumpIfTrueWideHandler,675926914 +builtin_hash,JumpIfFalseWideHandler,675926914 +builtin_hash,SwitchOnSmiNoFeedbackWideHandler,438009109 +builtin_hash,ForInPrepareWideHandler,4093040 +builtin_hash,ForInNextWideHandler,1049458669 +builtin_hash,ThrowReferenceErrorIfHoleWideHandler,269750921 +builtin_hash,GetIteratorWideHandler,-359333408 +builtin_hash,LdaSmiExtraWideHandler,976476814 +builtin_hash,LdaGlobalExtraWideHandler,-300848501 +builtin_hash,AddSmiExtraWideHandler,-986018015 +builtin_hash,SubSmiExtraWideHandler,-1056721725 +builtin_hash,MulSmiExtraWideHandler,-437583101 +builtin_hash,DivSmiExtraWideHandler,91019683 +builtin_hash,BitwiseOrSmiExtraWideHandler,-280647725 +builtin_hash,BitwiseXorSmiExtraWideHandler,945663448 +builtin_hash,BitwiseAndSmiExtraWideHandler,764701867 +builtin_hash,CallUndefinedReceiverExtraWideHandler,-252891433 +builtin_hash,CallUndefinedReceiver1ExtraWideHandler,984557820 +builtin_hash,CallUndefinedReceiver2ExtraWideHandler,-972056227 diff --git a/deps/v8/tools/builtins-pgo/arm64.profile b/deps/v8/tools/builtins-pgo/arm64.profile index 15b5393b017c13..1173d8ab94dc67 100644 --- a/deps/v8/tools/builtins-pgo/arm64.profile +++ b/deps/v8/tools/builtins-pgo/arm64.profile @@ -245,290 +245,328 @@ block_hint,ToNumberConvertBigInt,9,10,1 block_hint,Typeof,17,18,0 block_hint,Typeof,9,10,0 block_hint,Typeof,13,14,1 -block_hint,KeyedLoadIC_PolymorphicName,238,239,1 -block_hint,KeyedLoadIC_PolymorphicName,98,99,1 -block_hint,KeyedLoadIC_PolymorphicName,256,257,0 -block_hint,KeyedLoadIC_PolymorphicName,60,61,0 -block_hint,KeyedLoadIC_PolymorphicName,129,130,1 -block_hint,KeyedLoadIC_PolymorphicName,292,293,1 -block_hint,KeyedLoadIC_PolymorphicName,240,241,1 -block_hint,KeyedLoadIC_PolymorphicName,24,25,1 -block_hint,KeyedLoadIC_PolymorphicName,161,162,0 -block_hint,KeyedLoadIC_PolymorphicName,118,119,1 -block_hint,KeyedLoadIC_PolymorphicName,242,243,1 -block_hint,KeyedLoadIC_PolymorphicName,171,172,0 -block_hint,KeyedLoadIC_PolymorphicName,45,46,1 -block_hint,KeyedLoadIC_PolymorphicName,76,77,0 -block_hint,KeyedLoadIC_PolymorphicName,246,247,0 -block_hint,KeyedLoadIC_PolymorphicName,281,282,1 -block_hint,KeyedLoadIC_PolymorphicName,28,29,0 +block_hint,KeyedLoadIC_PolymorphicName,244,245,1 +block_hint,KeyedLoadIC_PolymorphicName,96,97,1 +block_hint,KeyedLoadIC_PolymorphicName,260,261,0 +block_hint,KeyedLoadIC_PolymorphicName,58,59,0 +block_hint,KeyedLoadIC_PolymorphicName,133,134,1 +block_hint,KeyedLoadIC_PolymorphicName,298,299,1 +block_hint,KeyedLoadIC_PolymorphicName,330,331,1 +block_hint,KeyedLoadIC_PolymorphicName,98,99,0 +block_hint,KeyedLoadIC_PolymorphicName,100,101,0 +block_hint,KeyedLoadIC_PolymorphicName,22,23,1 +block_hint,KeyedLoadIC_PolymorphicName,165,166,0 +block_hint,KeyedLoadIC_PolymorphicName,122,123,1 +block_hint,KeyedLoadIC_PolymorphicName,332,333,1 +block_hint,KeyedLoadIC_PolymorphicName,110,111,0 +block_hint,KeyedLoadIC_PolymorphicName,175,176,0 +block_hint,KeyedLoadIC_PolymorphicName,43,44,1 +block_hint,KeyedLoadIC_PolymorphicName,74,75,0 +block_hint,KeyedLoadIC_PolymorphicName,250,251,0 +block_hint,KeyedLoadIC_PolymorphicName,287,288,1 block_hint,KeyedLoadIC_PolymorphicName,26,27,0 -block_hint,KeyedStoreIC_Megamorphic,375,376,1 -block_hint,KeyedStoreIC_Megamorphic,377,378,0 -block_hint,KeyedStoreIC_Megamorphic,1175,1176,0 -block_hint,KeyedStoreIC_Megamorphic,1177,1178,1 -block_hint,KeyedStoreIC_Megamorphic,1161,1162,1 -block_hint,KeyedStoreIC_Megamorphic,1110,1111,0 -block_hint,KeyedStoreIC_Megamorphic,905,906,1 +block_hint,KeyedLoadIC_PolymorphicName,24,25,0 block_hint,KeyedStoreIC_Megamorphic,379,380,1 -block_hint,KeyedStoreIC_Megamorphic,1169,1170,0 -block_hint,KeyedStoreIC_Megamorphic,1154,1155,0 -block_hint,KeyedStoreIC_Megamorphic,597,598,0 -block_hint,KeyedStoreIC_Megamorphic,191,192,1 -block_hint,KeyedStoreIC_Megamorphic,1025,1026,0 -block_hint,KeyedStoreIC_Megamorphic,195,196,1 -block_hint,KeyedStoreIC_Megamorphic,197,198,0 -block_hint,KeyedStoreIC_Megamorphic,1104,1105,0 -block_hint,KeyedStoreIC_Megamorphic,1113,1114,0 -block_hint,KeyedStoreIC_Megamorphic,917,918,0 -block_hint,KeyedStoreIC_Megamorphic,487,488,0 -block_hint,KeyedStoreIC_Megamorphic,887,888,0 -block_hint,KeyedStoreIC_Megamorphic,921,922,0 -block_hint,KeyedStoreIC_Megamorphic,919,920,1 -block_hint,KeyedStoreIC_Megamorphic,489,490,1 -block_hint,KeyedStoreIC_Megamorphic,495,496,1 -block_hint,KeyedStoreIC_Megamorphic,497,498,0 -block_hint,KeyedStoreIC_Megamorphic,925,926,1 -block_hint,KeyedStoreIC_Megamorphic,499,500,0 -block_hint,KeyedStoreIC_Megamorphic,501,502,1 -block_hint,KeyedStoreIC_Megamorphic,923,924,1 -block_hint,KeyedStoreIC_Megamorphic,493,494,1 +block_hint,KeyedStoreIC_Megamorphic,381,382,0 +block_hint,KeyedStoreIC_Megamorphic,1216,1217,0 +block_hint,KeyedStoreIC_Megamorphic,1218,1219,1 +block_hint,KeyedStoreIC_Megamorphic,1203,1204,1 +block_hint,KeyedStoreIC_Megamorphic,1140,1141,0 +block_hint,KeyedStoreIC_Megamorphic,915,916,1 +block_hint,KeyedStoreIC_Megamorphic,383,384,1 +block_hint,KeyedStoreIC_Megamorphic,1228,1229,0 +block_hint,KeyedStoreIC_Megamorphic,1211,1212,0 +block_hint,KeyedStoreIC_Megamorphic,601,602,0 +block_hint,KeyedStoreIC_Megamorphic,746,747,1 +block_hint,KeyedStoreIC_Megamorphic,603,604,0 +block_hint,KeyedStoreIC_Megamorphic,1191,1192,0 +block_hint,KeyedStoreIC_Megamorphic,1041,1042,0 +block_hint,KeyedStoreIC_Megamorphic,1168,1169,0 +block_hint,KeyedStoreIC_Megamorphic,192,193,1 +block_hint,KeyedStoreIC_Megamorphic,194,195,0 +block_hint,KeyedStoreIC_Megamorphic,1134,1135,0 +block_hint,KeyedStoreIC_Megamorphic,1143,1144,0 +block_hint,KeyedStoreIC_Megamorphic,927,928,0 block_hint,KeyedStoreIC_Megamorphic,491,492,0 -block_hint,KeyedStoreIC_Megamorphic,1087,1088,1 -block_hint,KeyedStoreIC_Megamorphic,1140,1141,1 -block_hint,KeyedStoreIC_Megamorphic,885,886,0 -block_hint,KeyedStoreIC_Megamorphic,347,348,1 -block_hint,KeyedStoreIC_Megamorphic,333,334,1 -block_hint,KeyedStoreIC_Megamorphic,1085,1086,1 -block_hint,KeyedStoreIC_Megamorphic,678,679,0 -block_hint,KeyedStoreIC_Megamorphic,535,536,0 -block_hint,KeyedStoreIC_Megamorphic,537,538,0 -block_hint,KeyedStoreIC_Megamorphic,1029,1030,0 -block_hint,KeyedStoreIC_Megamorphic,543,544,1 -block_hint,KeyedStoreIC_Megamorphic,600,601,0 -block_hint,KeyedStoreIC_Megamorphic,545,546,0 +block_hint,KeyedStoreIC_Megamorphic,895,896,0 +block_hint,KeyedStoreIC_Megamorphic,931,932,0 +block_hint,KeyedStoreIC_Megamorphic,929,930,1 +block_hint,KeyedStoreIC_Megamorphic,493,494,1 +block_hint,KeyedStoreIC_Megamorphic,499,500,1 +block_hint,KeyedStoreIC_Megamorphic,501,502,0 +block_hint,KeyedStoreIC_Megamorphic,935,936,1 +block_hint,KeyedStoreIC_Megamorphic,503,504,0 +block_hint,KeyedStoreIC_Megamorphic,505,506,1 +block_hint,KeyedStoreIC_Megamorphic,933,934,1 +block_hint,KeyedStoreIC_Megamorphic,497,498,1 +block_hint,KeyedStoreIC_Megamorphic,495,496,0 +block_hint,KeyedStoreIC_Megamorphic,1115,1116,1 +block_hint,KeyedStoreIC_Megamorphic,1177,1178,1 +block_hint,KeyedStoreIC_Megamorphic,893,894,0 +block_hint,KeyedStoreIC_Megamorphic,350,351,1 +block_hint,KeyedStoreIC_Megamorphic,336,337,1 +block_hint,KeyedStoreIC_Megamorphic,1113,1114,1 +block_hint,KeyedStoreIC_Megamorphic,683,684,0 +block_hint,KeyedStoreIC_Megamorphic,539,540,0 +block_hint,KeyedStoreIC_Megamorphic,541,542,0 +block_hint,KeyedStoreIC_Megamorphic,1045,1046,0 block_hint,KeyedStoreIC_Megamorphic,547,548,1 -block_hint,KeyedStoreIC_Megamorphic,203,204,1 +block_hint,KeyedStoreIC_Megamorphic,1071,1072,0 +block_hint,KeyedStoreIC_Megamorphic,606,607,0 +block_hint,KeyedStoreIC_Megamorphic,1193,1194,0 block_hint,KeyedStoreIC_Megamorphic,549,550,0 -block_hint,KeyedStoreIC_Megamorphic,205,206,0 -block_hint,KeyedStoreIC_Megamorphic,207,208,0 -block_hint,KeyedStoreIC_Megamorphic,940,941,0 +block_hint,KeyedStoreIC_Megamorphic,1047,1048,0 block_hint,KeyedStoreIC_Megamorphic,551,552,1 +block_hint,KeyedStoreIC_Megamorphic,200,201,1 block_hint,KeyedStoreIC_Megamorphic,553,554,0 +block_hint,KeyedStoreIC_Megamorphic,202,203,0 +block_hint,KeyedStoreIC_Megamorphic,204,205,0 +block_hint,KeyedStoreIC_Megamorphic,950,951,0 block_hint,KeyedStoreIC_Megamorphic,555,556,1 block_hint,KeyedStoreIC_Megamorphic,557,558,0 -block_hint,KeyedStoreIC_Megamorphic,1118,1119,0 block_hint,KeyedStoreIC_Megamorphic,559,560,1 -block_hint,KeyedStoreIC_Megamorphic,894,895,0 -block_hint,KeyedStoreIC_Megamorphic,1120,1121,0 -block_hint,KeyedStoreIC_Megamorphic,561,562,1 -block_hint,KeyedStoreIC_Megamorphic,567,568,1 -block_hint,KeyedStoreIC_Megamorphic,569,570,0 -block_hint,KeyedStoreIC_Megamorphic,571,572,0 -block_hint,KeyedStoreIC_Megamorphic,573,574,1 -block_hint,KeyedStoreIC_Megamorphic,947,948,1 +block_hint,KeyedStoreIC_Megamorphic,561,562,0 +block_hint,KeyedStoreIC_Megamorphic,1148,1149,0 +block_hint,KeyedStoreIC_Megamorphic,563,564,1 +block_hint,KeyedStoreIC_Megamorphic,902,903,0 +block_hint,KeyedStoreIC_Megamorphic,1150,1151,0 block_hint,KeyedStoreIC_Megamorphic,565,566,1 -block_hint,KeyedStoreIC_Megamorphic,563,564,0 -block_hint,KeyedStoreIC_Megamorphic,1173,1174,0 -block_hint,KeyedStoreIC_Megamorphic,1186,1187,1 -block_hint,KeyedStoreIC_Megamorphic,1183,1184,1 -block_hint,KeyedStoreIC_Megamorphic,1102,1103,1 -block_hint,KeyedStoreIC_Megamorphic,964,965,1 -block_hint,KeyedStoreIC_Megamorphic,209,210,0 -block_hint,KeyedStoreIC_Megamorphic,359,360,0 -block_hint,KeyedStoreIC_Megamorphic,605,606,1 -block_hint,KeyedStoreIC_Megamorphic,1013,1014,0 -block_hint,KeyedStoreIC_Megamorphic,688,689,0 +block_hint,KeyedStoreIC_Megamorphic,571,572,1 +block_hint,KeyedStoreIC_Megamorphic,573,574,0 block_hint,KeyedStoreIC_Megamorphic,575,576,0 -block_hint,KeyedStoreIC_Megamorphic,168,169,1 -block_hint,KeyedStoreIC_Megamorphic,577,578,0 +block_hint,KeyedStoreIC_Megamorphic,577,578,1 +block_hint,KeyedStoreIC_Megamorphic,957,958,1 +block_hint,KeyedStoreIC_Megamorphic,569,570,1 +block_hint,KeyedStoreIC_Megamorphic,567,568,0 +block_hint,KeyedStoreIC_Megamorphic,1214,1215,0 +block_hint,KeyedStoreIC_Megamorphic,1231,1232,1 +block_hint,KeyedStoreIC_Megamorphic,1224,1225,1 +block_hint,KeyedStoreIC_Megamorphic,1130,1131,1 +block_hint,KeyedStoreIC_Megamorphic,975,976,1 +block_hint,KeyedStoreIC_Megamorphic,206,207,0 +block_hint,KeyedStoreIC_Megamorphic,362,363,0 +block_hint,KeyedStoreIC_Megamorphic,977,978,1 +block_hint,KeyedStoreIC_Megamorphic,214,215,0 +block_hint,KeyedStoreIC_Megamorphic,1027,1028,0 +block_hint,KeyedStoreIC_Megamorphic,693,694,0 block_hint,KeyedStoreIC_Megamorphic,579,580,0 -block_hint,KeyedStoreIC_Megamorphic,1034,1035,0 -block_hint,KeyedStoreIC_Megamorphic,581,582,1 -block_hint,KeyedStoreIC_Megamorphic,953,954,0 -block_hint,KeyedStoreIC_Megamorphic,970,971,0 -block_hint,KeyedStoreIC_Megamorphic,749,750,1 -block_hint,KeyedStoreIC_Megamorphic,221,222,1 -block_hint,KeyedStoreIC_Megamorphic,1036,1037,0 -block_hint,KeyedStoreIC_Megamorphic,227,228,0 -block_hint,KeyedStoreIC_Megamorphic,753,754,0 -block_hint,KeyedStoreIC_Megamorphic,589,590,0 -block_hint,KeyedStoreIC_Megamorphic,1106,1107,0 -block_hint,KeyedStoreIC_Megamorphic,1143,1144,0 -block_hint,KeyedStoreIC_Megamorphic,896,897,0 -block_hint,KeyedStoreIC_Megamorphic,174,175,1 -block_hint,KeyedStoreIC_Megamorphic,176,177,1 -block_hint,KeyedStoreIC_Megamorphic,369,370,0 -block_hint,KeyedStoreIC_Megamorphic,178,179,1 -block_hint,KeyedStoreIC_Megamorphic,371,372,0 -block_hint,KeyedStoreIC_Megamorphic,180,181,1 -block_hint,KeyedStoreIC_Megamorphic,233,234,0 -block_hint,KeyedStoreIC_Megamorphic,182,183,1 -block_hint,KeyedStoreIC_Megamorphic,184,185,1 -block_hint,KeyedStoreIC_Megamorphic,1018,1019,0 -block_hint,KeyedStoreIC_Megamorphic,186,187,1 -block_hint,KeyedStoreIC_Megamorphic,915,916,1 -block_hint,KeyedStoreIC_Megamorphic,481,482,1 -block_hint,KeyedStoreIC_Megamorphic,728,729,0 -block_hint,KeyedStoreIC_Megamorphic,909,910,1 -block_hint,KeyedStoreIC_Megamorphic,409,410,0 -block_hint,KeyedStoreIC_Megamorphic,411,412,0 -block_hint,KeyedStoreIC_Megamorphic,251,252,1 +block_hint,KeyedStoreIC_Megamorphic,167,168,1 +block_hint,KeyedStoreIC_Megamorphic,581,582,0 +block_hint,KeyedStoreIC_Megamorphic,583,584,0 +block_hint,KeyedStoreIC_Megamorphic,1054,1055,0 +block_hint,KeyedStoreIC_Megamorphic,585,586,1 +block_hint,KeyedStoreIC_Megamorphic,963,964,0 +block_hint,KeyedStoreIC_Megamorphic,1174,1175,0 +block_hint,KeyedStoreIC_Megamorphic,1056,1057,1 +block_hint,KeyedStoreIC_Megamorphic,759,760,1 +block_hint,KeyedStoreIC_Megamorphic,612,613,0 +block_hint,KeyedStoreIC_Megamorphic,1196,1197,0 +block_hint,KeyedStoreIC_Megamorphic,1058,1059,0 +block_hint,KeyedStoreIC_Megamorphic,1172,1173,0 +block_hint,KeyedStoreIC_Megamorphic,224,225,0 +block_hint,KeyedStoreIC_Megamorphic,761,762,0 +block_hint,KeyedStoreIC_Megamorphic,593,594,0 +block_hint,KeyedStoreIC_Megamorphic,1136,1137,0 +block_hint,KeyedStoreIC_Megamorphic,1180,1181,0 +block_hint,KeyedStoreIC_Megamorphic,906,907,0 +block_hint,KeyedStoreIC_Megamorphic,173,174,1 +block_hint,KeyedStoreIC_Megamorphic,175,176,1 +block_hint,KeyedStoreIC_Megamorphic,373,374,0 +block_hint,KeyedStoreIC_Megamorphic,177,178,1 +block_hint,KeyedStoreIC_Megamorphic,375,376,0 +block_hint,KeyedStoreIC_Megamorphic,179,180,1 +block_hint,KeyedStoreIC_Megamorphic,234,235,0 +block_hint,KeyedStoreIC_Megamorphic,236,237,0 +block_hint,KeyedStoreIC_Megamorphic,181,182,1 +block_hint,KeyedStoreIC_Megamorphic,183,184,1 +block_hint,KeyedStoreIC_Megamorphic,1032,1033,0 +block_hint,KeyedStoreIC_Megamorphic,185,186,1 +block_hint,KeyedStoreIC_Megamorphic,925,926,1 +block_hint,KeyedStoreIC_Megamorphic,485,486,1 +block_hint,KeyedStoreIC_Megamorphic,733,734,0 +block_hint,KeyedStoreIC_Megamorphic,919,920,1 block_hint,KeyedStoreIC_Megamorphic,413,414,0 -block_hint,KeyedStoreIC_Megamorphic,625,626,1 -block_hint,KeyedStoreIC_Megamorphic,93,94,1 -block_hint,KeyedStoreIC_Megamorphic,95,96,0 -block_hint,KeyedStoreIC_Megamorphic,761,762,1 -block_hint,KeyedStoreIC_Megamorphic,383,384,0 -block_hint,KeyedStoreIC_Megamorphic,634,635,1 -block_hint,KeyedStoreIC_Megamorphic,65,66,1 -block_hint,KeyedStoreIC_Megamorphic,67,68,0 -block_hint,DefineKeyedOwnIC_Megamorphic,310,311,1 -block_hint,DefineKeyedOwnIC_Megamorphic,312,313,0 -block_hint,DefineKeyedOwnIC_Megamorphic,865,866,0 -block_hint,DefineKeyedOwnIC_Megamorphic,418,419,0 -block_hint,DefineKeyedOwnIC_Megamorphic,416,417,1 -block_hint,DefineKeyedOwnIC_Megamorphic,794,795,0 -block_hint,DefineKeyedOwnIC_Megamorphic,570,571,1 -block_hint,DefineKeyedOwnIC_Megamorphic,593,594,1 -block_hint,DefineKeyedOwnIC_Megamorphic,230,231,0 +block_hint,KeyedStoreIC_Megamorphic,415,416,0 +block_hint,KeyedStoreIC_Megamorphic,254,255,1 +block_hint,KeyedStoreIC_Megamorphic,417,418,0 +block_hint,KeyedStoreIC_Megamorphic,630,631,1 +block_hint,KeyedStoreIC_Megamorphic,92,93,1 +block_hint,KeyedStoreIC_Megamorphic,94,95,0 +block_hint,KeyedStoreIC_Megamorphic,769,770,1 +block_hint,KeyedStoreIC_Megamorphic,387,388,0 +block_hint,KeyedStoreIC_Megamorphic,639,640,1 +block_hint,KeyedStoreIC_Megamorphic,64,65,1 +block_hint,KeyedStoreIC_Megamorphic,66,67,0 +block_hint,DefineKeyedOwnIC_Megamorphic,312,313,1 +block_hint,DefineKeyedOwnIC_Megamorphic,314,315,0 +block_hint,DefineKeyedOwnIC_Megamorphic,887,888,0 +block_hint,DefineKeyedOwnIC_Megamorphic,420,421,0 +block_hint,DefineKeyedOwnIC_Megamorphic,418,419,1 +block_hint,DefineKeyedOwnIC_Megamorphic,803,804,0 +block_hint,DefineKeyedOwnIC_Megamorphic,575,576,1 +block_hint,DefineKeyedOwnIC_Megamorphic,601,602,1 +block_hint,DefineKeyedOwnIC_Megamorphic,232,233,0 block_hint,DefineKeyedOwnIC_Megamorphic,53,54,1 block_hint,DefineKeyedOwnIC_Megamorphic,55,56,0 -block_hint,LoadGlobalIC_NoFeedback,39,40,1 +block_hint,LoadGlobalIC_NoFeedback,41,42,1 block_hint,LoadGlobalIC_NoFeedback,6,7,1 block_hint,LoadGlobalIC_NoFeedback,8,9,1 block_hint,LoadGlobalIC_NoFeedback,10,11,1 block_hint,LoadGlobalIC_NoFeedback,12,13,1 -block_hint,LoadGlobalIC_NoFeedback,28,29,1 -block_hint,LoadGlobalIC_NoFeedback,43,44,1 +block_hint,LoadGlobalIC_NoFeedback,31,32,1 +block_hint,LoadGlobalIC_NoFeedback,49,50,1 block_hint,LoadGlobalIC_NoFeedback,18,19,1 +block_hint,LoadGlobalIC_NoFeedback,27,28,0 block_hint,LoadGlobalIC_NoFeedback,14,15,1 -block_hint,LoadGlobalIC_NoFeedback,30,31,0 +block_hint,LoadGlobalIC_NoFeedback,33,34,0 block_hint,LoadGlobalIC_NoFeedback,16,17,1 block_hint,LoadGlobalIC_NoFeedback,20,21,1 block_hint,LoadGlobalIC_NoFeedback,22,23,0 block_hint,LoadGlobalIC_NoFeedback,24,25,1 block_hint,LoadIC_FunctionPrototype,2,3,0 block_hint,LoadIC_FunctionPrototype,4,5,1 -block_hint,LoadIC_NoFeedback,93,94,1 -block_hint,LoadIC_NoFeedback,95,96,0 -block_hint,LoadIC_NoFeedback,277,278,1 -block_hint,LoadIC_NoFeedback,212,213,0 -block_hint,LoadIC_NoFeedback,201,202,1 -block_hint,LoadIC_NoFeedback,265,266,0 -block_hint,LoadIC_NoFeedback,56,57,1 -block_hint,LoadIC_NoFeedback,267,268,0 -block_hint,LoadIC_NoFeedback,58,59,0 -block_hint,LoadIC_NoFeedback,291,292,1 -block_hint,LoadIC_NoFeedback,253,254,0 -block_hint,LoadIC_NoFeedback,270,271,1 -block_hint,LoadIC_NoFeedback,138,139,1 -block_hint,LoadIC_NoFeedback,23,24,1 -block_hint,LoadIC_NoFeedback,36,37,1 -block_hint,LoadIC_NoFeedback,126,127,1 -block_hint,LoadIC_NoFeedback,140,141,0 -block_hint,LoadIC_NoFeedback,121,122,0 -block_hint,LoadIC_NoFeedback,242,243,0 -block_hint,LoadIC_NoFeedback,235,236,0 -block_hint,LoadIC_NoFeedback,144,145,1 -block_hint,LoadIC_NoFeedback,146,147,0 -block_hint,LoadIC_NoFeedback,73,74,1 -block_hint,LoadIC_NoFeedback,150,151,0 -block_hint,LoadIC_NoFeedback,75,76,0 -block_hint,LoadIC_NoFeedback,159,160,1 -block_hint,LoadIC_NoFeedback,293,294,1 -block_hint,LoadIC_NoFeedback,259,260,0 -block_hint,LoadIC_NoFeedback,257,258,0 -block_hint,LoadIC_NoFeedback,228,229,1 -block_hint,LoadIC_NoFeedback,131,132,1 -block_hint,LoadIC_NoFeedback,89,90,0 -block_hint,StoreIC_NoFeedback,143,144,1 -block_hint,StoreIC_NoFeedback,145,146,0 -block_hint,StoreIC_NoFeedback,427,428,0 -block_hint,StoreIC_NoFeedback,61,62,1 -block_hint,StoreIC_NoFeedback,63,64,0 -block_hint,StoreIC_NoFeedback,467,468,0 -block_hint,StoreIC_NoFeedback,357,358,0 -block_hint,StoreIC_NoFeedback,147,148,0 -block_hint,StoreIC_NoFeedback,341,342,0 -block_hint,StoreIC_NoFeedback,149,150,1 -block_hint,StoreIC_NoFeedback,155,156,1 -block_hint,StoreIC_NoFeedback,157,158,0 -block_hint,StoreIC_NoFeedback,159,160,0 -block_hint,StoreIC_NoFeedback,153,154,1 +block_hint,LoadIC_NoFeedback,97,98,1 +block_hint,LoadIC_NoFeedback,99,100,0 +block_hint,LoadIC_NoFeedback,306,307,1 +block_hint,LoadIC_NoFeedback,226,227,0 +block_hint,LoadIC_NoFeedback,285,286,1 +block_hint,LoadIC_NoFeedback,141,142,0 +block_hint,LoadIC_NoFeedback,320,321,0 +block_hint,LoadIC_NoFeedback,287,288,0 +block_hint,LoadIC_NoFeedback,302,303,0 +block_hint,LoadIC_NoFeedback,53,54,1 +block_hint,LoadIC_NoFeedback,289,290,0 +block_hint,LoadIC_NoFeedback,55,56,0 +block_hint,LoadIC_NoFeedback,324,325,1 +block_hint,LoadIC_NoFeedback,272,273,0 +block_hint,LoadIC_NoFeedback,295,296,1 +block_hint,LoadIC_NoFeedback,247,248,1 +block_hint,LoadIC_NoFeedback,59,60,0 +block_hint,LoadIC_NoFeedback,22,23,1 +block_hint,LoadIC_NoFeedback,35,36,1 +block_hint,LoadIC_NoFeedback,130,131,1 +block_hint,LoadIC_NoFeedback,145,146,0 +block_hint,LoadIC_NoFeedback,125,126,0 +block_hint,LoadIC_NoFeedback,261,262,0 +block_hint,LoadIC_NoFeedback,250,251,0 +block_hint,LoadIC_NoFeedback,149,150,1 +block_hint,LoadIC_NoFeedback,167,168,0 +block_hint,LoadIC_NoFeedback,322,323,0 +block_hint,LoadIC_NoFeedback,151,152,0 +block_hint,LoadIC_NoFeedback,291,292,0 +block_hint,LoadIC_NoFeedback,70,71,1 +block_hint,LoadIC_NoFeedback,155,156,0 +block_hint,LoadIC_NoFeedback,72,73,0 +block_hint,LoadIC_NoFeedback,254,255,1 +block_hint,LoadIC_NoFeedback,76,77,0 +block_hint,LoadIC_NoFeedback,326,327,1 +block_hint,LoadIC_NoFeedback,278,279,0 +block_hint,LoadIC_NoFeedback,276,277,0 +block_hint,LoadIC_NoFeedback,242,243,1 +block_hint,LoadIC_NoFeedback,135,136,1 +block_hint,LoadIC_NoFeedback,93,94,0 +block_hint,StoreIC_NoFeedback,147,148,1 +block_hint,StoreIC_NoFeedback,149,150,0 +block_hint,StoreIC_NoFeedback,259,260,0 +block_hint,StoreIC_NoFeedback,549,550,0 +block_hint,StoreIC_NoFeedback,443,444,0 +block_hint,StoreIC_NoFeedback,527,528,0 +block_hint,StoreIC_NoFeedback,58,59,1 +block_hint,StoreIC_NoFeedback,60,61,0 +block_hint,StoreIC_NoFeedback,498,499,0 +block_hint,StoreIC_NoFeedback,367,368,0 block_hint,StoreIC_NoFeedback,151,152,0 -block_hint,StoreIC_NoFeedback,497,498,1 -block_hint,StoreIC_NoFeedback,371,372,1 -block_hint,StoreIC_NoFeedback,175,176,0 -block_hint,StoreIC_NoFeedback,488,489,1 -block_hint,StoreIC_NoFeedback,195,196,0 -block_hint,StoreIC_NoFeedback,197,198,0 -block_hint,StoreIC_NoFeedback,431,432,0 -block_hint,StoreIC_NoFeedback,203,204,1 -block_hint,StoreIC_NoFeedback,256,257,0 -block_hint,StoreIC_NoFeedback,205,206,0 -block_hint,StoreIC_NoFeedback,69,70,1 +block_hint,StoreIC_NoFeedback,349,350,0 +block_hint,StoreIC_NoFeedback,153,154,1 +block_hint,StoreIC_NoFeedback,159,160,1 +block_hint,StoreIC_NoFeedback,161,162,0 +block_hint,StoreIC_NoFeedback,163,164,0 +block_hint,StoreIC_NoFeedback,157,158,1 +block_hint,StoreIC_NoFeedback,155,156,0 +block_hint,StoreIC_NoFeedback,536,537,1 +block_hint,StoreIC_NoFeedback,381,382,1 +block_hint,StoreIC_NoFeedback,179,180,0 +block_hint,StoreIC_NoFeedback,519,520,1 +block_hint,StoreIC_NoFeedback,199,200,0 +block_hint,StoreIC_NoFeedback,201,202,0 +block_hint,StoreIC_NoFeedback,447,448,0 +block_hint,StoreIC_NoFeedback,207,208,1 +block_hint,StoreIC_NoFeedback,473,474,0 +block_hint,StoreIC_NoFeedback,262,263,0 +block_hint,StoreIC_NoFeedback,551,552,0 block_hint,StoreIC_NoFeedback,209,210,0 -block_hint,StoreIC_NoFeedback,71,72,0 -block_hint,StoreIC_NoFeedback,380,381,0 -block_hint,StoreIC_NoFeedback,211,212,1 +block_hint,StoreIC_NoFeedback,449,450,0 +block_hint,StoreIC_NoFeedback,66,67,1 block_hint,StoreIC_NoFeedback,213,214,0 +block_hint,StoreIC_NoFeedback,68,69,0 +block_hint,StoreIC_NoFeedback,390,391,0 block_hint,StoreIC_NoFeedback,215,216,1 block_hint,StoreIC_NoFeedback,217,218,0 -block_hint,StoreIC_NoFeedback,478,479,0 block_hint,StoreIC_NoFeedback,219,220,1 -block_hint,StoreIC_NoFeedback,348,349,0 -block_hint,StoreIC_NoFeedback,480,481,0 -block_hint,StoreIC_NoFeedback,383,384,1 -block_hint,StoreIC_NoFeedback,227,228,1 -block_hint,StoreIC_NoFeedback,229,230,0 -block_hint,StoreIC_NoFeedback,231,232,0 -block_hint,StoreIC_NoFeedback,233,234,1 -block_hint,StoreIC_NoFeedback,223,224,0 -block_hint,StoreIC_NoFeedback,519,520,0 -block_hint,StoreIC_NoFeedback,465,466,1 -block_hint,StoreIC_NoFeedback,402,403,1 -block_hint,StoreIC_NoFeedback,75,76,0 -block_hint,StoreIC_NoFeedback,81,82,0 -block_hint,StoreIC_NoFeedback,127,128,0 -block_hint,StoreIC_NoFeedback,261,262,1 -block_hint,StoreIC_NoFeedback,83,84,0 -block_hint,StoreIC_NoFeedback,237,238,0 -block_hint,StoreIC_NoFeedback,239,240,0 -block_hint,StoreIC_NoFeedback,436,437,0 -block_hint,StoreIC_NoFeedback,241,242,1 -block_hint,StoreIC_NoFeedback,482,483,0 -block_hint,StoreIC_NoFeedback,393,394,0 -block_hint,StoreIC_NoFeedback,320,321,1 -block_hint,StoreIC_NoFeedback,438,439,0 -block_hint,StoreIC_NoFeedback,93,94,0 -block_hint,StoreIC_NoFeedback,324,325,0 -block_hint,StoreIC_NoFeedback,264,265,1 -block_hint,StoreIC_NoFeedback,97,98,0 -block_hint,StoreIC_NoFeedback,249,250,0 -block_hint,StoreIC_NoFeedback,251,252,1 -block_hint,StoreIC_NoFeedback,352,353,0 -block_hint,StoreIC_NoFeedback,41,42,1 -block_hint,StoreIC_NoFeedback,43,44,1 -block_hint,StoreIC_NoFeedback,137,138,0 -block_hint,StoreIC_NoFeedback,45,46,1 -block_hint,StoreIC_NoFeedback,139,140,0 -block_hint,StoreIC_NoFeedback,47,48,1 -block_hint,StoreIC_NoFeedback,99,100,0 -block_hint,StoreIC_NoFeedback,49,50,1 -block_hint,StoreIC_NoFeedback,51,52,1 -block_hint,StoreIC_NoFeedback,425,426,0 -block_hint,StoreIC_NoFeedback,53,54,1 -block_hint,DefineNamedOwnIC_NoFeedback,78,79,1 -block_hint,DefineNamedOwnIC_NoFeedback,80,81,0 -block_hint,DefineNamedOwnIC_NoFeedback,195,196,0 -block_hint,DefineNamedOwnIC_NoFeedback,150,151,1 -block_hint,DefineNamedOwnIC_NoFeedback,201,202,0 -block_hint,DefineNamedOwnIC_NoFeedback,152,153,1 -block_hint,DefineNamedOwnIC_NoFeedback,38,39,1 -block_hint,DefineNamedOwnIC_NoFeedback,84,85,0 +block_hint,StoreIC_NoFeedback,221,222,0 +block_hint,StoreIC_NoFeedback,509,510,0 +block_hint,StoreIC_NoFeedback,223,224,1 +block_hint,StoreIC_NoFeedback,356,357,0 +block_hint,StoreIC_NoFeedback,511,512,0 +block_hint,StoreIC_NoFeedback,393,394,1 +block_hint,StoreIC_NoFeedback,231,232,1 +block_hint,StoreIC_NoFeedback,233,234,0 +block_hint,StoreIC_NoFeedback,235,236,0 +block_hint,StoreIC_NoFeedback,237,238,1 +block_hint,StoreIC_NoFeedback,227,228,0 +block_hint,StoreIC_NoFeedback,564,565,0 +block_hint,StoreIC_NoFeedback,494,495,1 +block_hint,StoreIC_NoFeedback,413,414,1 +block_hint,StoreIC_NoFeedback,72,73,0 +block_hint,StoreIC_NoFeedback,78,79,0 +block_hint,StoreIC_NoFeedback,130,131,0 +block_hint,StoreIC_NoFeedback,415,416,1 +block_hint,StoreIC_NoFeedback,80,81,0 +block_hint,StoreIC_NoFeedback,82,83,0 +block_hint,StoreIC_NoFeedback,241,242,0 +block_hint,StoreIC_NoFeedback,243,244,0 +block_hint,StoreIC_NoFeedback,456,457,0 +block_hint,StoreIC_NoFeedback,245,246,1 +block_hint,StoreIC_NoFeedback,513,514,0 +block_hint,StoreIC_NoFeedback,403,404,0 +block_hint,StoreIC_NoFeedback,458,459,1 +block_hint,StoreIC_NoFeedback,268,269,0 +block_hint,StoreIC_NoFeedback,553,554,0 +block_hint,StoreIC_NoFeedback,460,461,0 +block_hint,StoreIC_NoFeedback,531,532,0 +block_hint,StoreIC_NoFeedback,90,91,0 +block_hint,StoreIC_NoFeedback,332,333,0 +block_hint,StoreIC_NoFeedback,420,421,1 +block_hint,StoreIC_NoFeedback,94,95,0 +block_hint,StoreIC_NoFeedback,96,97,0 +block_hint,StoreIC_NoFeedback,253,254,0 +block_hint,StoreIC_NoFeedback,255,256,1 +block_hint,StoreIC_NoFeedback,362,363,0 +block_hint,StoreIC_NoFeedback,40,41,1 +block_hint,StoreIC_NoFeedback,42,43,1 +block_hint,StoreIC_NoFeedback,141,142,0 +block_hint,StoreIC_NoFeedback,44,45,1 +block_hint,StoreIC_NoFeedback,143,144,0 +block_hint,StoreIC_NoFeedback,46,47,1 +block_hint,StoreIC_NoFeedback,100,101,0 +block_hint,StoreIC_NoFeedback,102,103,0 +block_hint,StoreIC_NoFeedback,48,49,1 +block_hint,StoreIC_NoFeedback,50,51,1 +block_hint,StoreIC_NoFeedback,439,440,0 +block_hint,StoreIC_NoFeedback,52,53,1 +block_hint,DefineNamedOwnIC_NoFeedback,80,81,1 +block_hint,DefineNamedOwnIC_NoFeedback,82,83,0 +block_hint,DefineNamedOwnIC_NoFeedback,236,237,0 +block_hint,DefineNamedOwnIC_NoFeedback,210,211,1 +block_hint,DefineNamedOwnIC_NoFeedback,136,137,0 +block_hint,DefineNamedOwnIC_NoFeedback,239,240,0 +block_hint,DefineNamedOwnIC_NoFeedback,212,213,0 +block_hint,DefineNamedOwnIC_NoFeedback,234,235,0 +block_hint,DefineNamedOwnIC_NoFeedback,157,158,1 +block_hint,DefineNamedOwnIC_NoFeedback,36,37,1 +block_hint,DefineNamedOwnIC_NoFeedback,86,87,0 +block_hint,DefineNamedOwnIC_NoFeedback,38,39,0 block_hint,DefineNamedOwnIC_NoFeedback,40,41,0 -block_hint,DefineNamedOwnIC_NoFeedback,42,43,0 block_hint,KeyedLoadIC_SloppyArguments,12,13,0 block_hint,KeyedLoadIC_SloppyArguments,14,15,1 block_hint,KeyedLoadIC_SloppyArguments,4,5,1 @@ -623,6 +661,7 @@ block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,459,460,0 block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,219,220,0 block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,561,562,1 block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,182,183,0 +block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,184,185,0 block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,645,646,1 block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,461,462,0 block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,36,37,1 @@ -720,15 +759,17 @@ block_hint,ElementsTransitionAndStore_GrowNoTransitionHandleCOW,1059,1060,1 block_hint,ElementsTransitionAndStore_GrowNoTransitionHandleCOW,703,704,0 block_hint,ElementsTransitionAndStore_GrowNoTransitionHandleCOW,62,63,1 block_hint,ElementsTransitionAndStore_GrowNoTransitionHandleCOW,95,96,0 -block_hint,KeyedHasIC_PolymorphicName,61,62,1 -block_hint,KeyedHasIC_PolymorphicName,32,33,1 -block_hint,KeyedHasIC_PolymorphicName,28,29,0 +block_hint,KeyedHasIC_PolymorphicName,69,70,1 +block_hint,KeyedHasIC_PolymorphicName,28,29,1 +block_hint,KeyedHasIC_PolymorphicName,24,25,0 +block_hint,KeyedHasIC_PolymorphicName,26,27,0 +block_hint,KeyedHasIC_PolymorphicName,55,56,1 +block_hint,KeyedHasIC_PolymorphicName,89,90,1 +block_hint,KeyedHasIC_PolymorphicName,93,94,1 block_hint,KeyedHasIC_PolymorphicName,30,31,0 -block_hint,KeyedHasIC_PolymorphicName,47,48,1 -block_hint,KeyedHasIC_PolymorphicName,83,84,1 -block_hint,KeyedHasIC_PolymorphicName,63,64,1 +block_hint,KeyedHasIC_PolymorphicName,32,33,0 +block_hint,KeyedHasIC_PolymorphicName,14,15,1 block_hint,KeyedHasIC_PolymorphicName,16,17,1 -block_hint,KeyedHasIC_PolymorphicName,18,19,1 block_hint,EnqueueMicrotask,4,5,0 block_hint,EnqueueMicrotask,2,3,0 block_hint,RunMicrotasks,18,19,0 @@ -738,110 +779,120 @@ block_hint,RunMicrotasks,36,37,1 block_hint,RunMicrotasks,85,86,0 block_hint,RunMicrotasks,67,68,0 block_hint,RunMicrotasks,38,39,1 -block_hint,HasProperty,133,134,1 -block_hint,HasProperty,135,136,1 -block_hint,HasProperty,253,254,0 -block_hint,HasProperty,207,208,1 -block_hint,HasProperty,245,246,0 -block_hint,HasProperty,93,94,0 -block_hint,HasProperty,228,229,1 -block_hint,HasProperty,119,120,1 block_hint,HasProperty,137,138,1 -block_hint,HasProperty,195,196,0 -block_hint,HasProperty,197,198,0 +block_hint,HasProperty,139,140,1 +block_hint,HasProperty,263,264,0 +block_hint,HasProperty,211,212,1 +block_hint,HasProperty,254,255,0 block_hint,HasProperty,97,98,0 -block_hint,HasProperty,95,96,0 -block_hint,HasProperty,241,242,0 -block_hint,HasProperty,232,233,0 -block_hint,HasProperty,199,200,1 -block_hint,HasProperty,249,250,0 -block_hint,HasProperty,201,202,1 -block_hint,HasProperty,45,46,1 -block_hint,HasProperty,63,64,0 -block_hint,HasProperty,47,48,0 -block_hint,HasProperty,103,104,1 -block_hint,HasProperty,258,259,0 -block_hint,HasProperty,222,223,0 -block_hint,HasProperty,39,40,0 -block_hint,DeleteProperty,35,36,1 -block_hint,DeleteProperty,60,61,0 -block_hint,DeleteProperty,37,38,0 -block_hint,DeleteProperty,64,65,1 -block_hint,DeleteProperty,86,87,0 -block_hint,DeleteProperty,69,70,0 -block_hint,DeleteProperty,62,63,1 -block_hint,DeleteProperty,54,55,1 -block_hint,DeleteProperty,39,40,1 -block_hint,DeleteProperty,82,83,0 -block_hint,DeleteProperty,84,85,0 +block_hint,HasProperty,234,235,1 +block_hint,HasProperty,123,124,1 +block_hint,HasProperty,141,142,1 +block_hint,HasProperty,199,200,0 +block_hint,HasProperty,201,202,0 +block_hint,HasProperty,101,102,0 +block_hint,HasProperty,99,100,0 +block_hint,HasProperty,250,251,0 +block_hint,HasProperty,270,271,0 +block_hint,HasProperty,259,260,1 +block_hint,HasProperty,106,107,0 +block_hint,HasProperty,277,278,0 +block_hint,HasProperty,282,283,0 +block_hint,HasProperty,268,269,0 +block_hint,HasProperty,203,204,1 +block_hint,HasProperty,42,43,1 +block_hint,HasProperty,65,66,0 +block_hint,HasProperty,44,45,0 +block_hint,HasProperty,239,240,1 +block_hint,HasProperty,48,49,0 +block_hint,HasProperty,207,208,1 +block_hint,HasProperty,272,273,0 +block_hint,HasProperty,228,229,0 +block_hint,HasProperty,38,39,0 +block_hint,DeleteProperty,38,39,1 +block_hint,DeleteProperty,62,63,0 +block_hint,DeleteProperty,40,41,0 +block_hint,DeleteProperty,66,67,1 +block_hint,DeleteProperty,91,92,0 block_hint,DeleteProperty,73,74,0 -block_hint,DeleteProperty,71,72,0 -block_hint,DeleteProperty,44,45,0 -block_hint,DeleteProperty,46,47,0 +block_hint,DeleteProperty,64,65,1 +block_hint,DeleteProperty,56,57,1 +block_hint,DeleteProperty,42,43,1 +block_hint,DeleteProperty,83,84,0 +block_hint,DeleteProperty,85,86,0 +block_hint,DeleteProperty,77,78,0 block_hint,DeleteProperty,75,76,0 -block_hint,DeleteProperty,50,51,1 -block_hint,DeleteProperty,52,53,0 -block_hint,DeleteProperty,8,9,1 -block_hint,DeleteProperty,10,11,1 -block_hint,DeleteProperty,12,13,1 -block_hint,DeleteProperty,14,15,1 -block_hint,DeleteProperty,16,17,1 -block_hint,SetDataProperties,132,133,1 -block_hint,SetDataProperties,253,254,1 -block_hint,SetDataProperties,251,252,1 -block_hint,SetDataProperties,140,141,0 -block_hint,SetDataProperties,298,299,0 -block_hint,SetDataProperties,142,143,0 -block_hint,SetDataProperties,60,61,0 -block_hint,SetDataProperties,317,318,0 -block_hint,SetDataProperties,257,258,0 -block_hint,SetDataProperties,322,323,1 -block_hint,SetDataProperties,263,264,0 -block_hint,SetDataProperties,681,682,0 -block_hint,SetDataProperties,703,704,1 -block_hint,SetDataProperties,679,680,0 -block_hint,SetDataProperties,677,678,0 +block_hint,DeleteProperty,47,48,0 +block_hint,DeleteProperty,49,50,0 +block_hint,DeleteProperty,87,88,0 +block_hint,DeleteProperty,71,72,1 +block_hint,DeleteProperty,20,21,0 +block_hint,DeleteProperty,54,55,0 +block_hint,DeleteProperty,7,8,1 +block_hint,DeleteProperty,9,10,1 +block_hint,DeleteProperty,11,12,1 +block_hint,DeleteProperty,13,14,1 +block_hint,DeleteProperty,15,16,1 +block_hint,SetDataProperties,136,137,1 +block_hint,SetDataProperties,263,264,1 +block_hint,SetDataProperties,261,262,1 +block_hint,SetDataProperties,144,145,0 +block_hint,SetDataProperties,316,317,0 +block_hint,SetDataProperties,146,147,0 +block_hint,SetDataProperties,59,60,0 +block_hint,SetDataProperties,341,342,0 +block_hint,SetDataProperties,267,268,0 +block_hint,SetDataProperties,385,386,1 +block_hint,SetDataProperties,277,278,0 +block_hint,SetDataProperties,752,753,0 +block_hint,SetDataProperties,762,763,1 +block_hint,SetDataProperties,750,751,0 +block_hint,SetDataProperties,748,749,0 +block_hint,SetDataProperties,659,660,0 +block_hint,SetDataProperties,451,452,1 +block_hint,SetDataProperties,221,222,1 +block_hint,SetDataProperties,87,88,0 +block_hint,SetDataProperties,223,224,0 +block_hint,SetDataProperties,513,514,0 +block_hint,SetDataProperties,515,516,0 +block_hint,SetDataProperties,519,520,1 +block_hint,SetDataProperties,449,450,0 +block_hint,SetDataProperties,329,330,1 +block_hint,SetDataProperties,326,327,0 +block_hint,SetDataProperties,158,159,0 +block_hint,SetDataProperties,399,400,0 +block_hint,SetDataProperties,447,448,0 +block_hint,SetDataProperties,352,353,0 +block_hint,SetDataProperties,226,227,1 +block_hint,SetDataProperties,93,94,1 +block_hint,SetDataProperties,521,522,0 +block_hint,SetDataProperties,95,96,0 +block_hint,SetDataProperties,97,98,0 +block_hint,SetDataProperties,617,618,0 +block_hint,SetDataProperties,523,524,1 +block_hint,SetDataProperties,525,526,0 +block_hint,SetDataProperties,527,528,1 block_hint,SetDataProperties,529,530,0 -block_hint,SetDataProperties,327,328,1 -block_hint,SetDataProperties,84,85,1 -block_hint,SetDataProperties,90,91,0 -block_hint,SetDataProperties,217,218,0 -block_hint,SetDataProperties,457,458,0 -block_hint,SetDataProperties,459,460,0 -block_hint,SetDataProperties,463,464,1 -block_hint,SetDataProperties,398,399,0 -block_hint,SetDataProperties,308,309,1 -block_hint,SetDataProperties,151,152,0 -block_hint,SetDataProperties,306,307,0 -block_hint,SetDataProperties,219,220,1 -block_hint,SetDataProperties,96,97,1 -block_hint,SetDataProperties,465,466,0 -block_hint,SetDataProperties,98,99,0 -block_hint,SetDataProperties,100,101,0 -block_hint,SetDataProperties,566,567,0 -block_hint,SetDataProperties,467,468,1 -block_hint,SetDataProperties,469,470,0 -block_hint,SetDataProperties,471,472,1 -block_hint,SetDataProperties,473,474,0 -block_hint,SetDataProperties,643,644,0 -block_hint,SetDataProperties,475,476,1 -block_hint,SetDataProperties,522,523,0 -block_hint,SetDataProperties,645,646,0 -block_hint,SetDataProperties,569,570,1 -block_hint,SetDataProperties,483,484,1 -block_hint,SetDataProperties,485,486,0 -block_hint,SetDataProperties,487,488,0 -block_hint,SetDataProperties,489,490,1 -block_hint,SetDataProperties,479,480,0 -block_hint,SetDataProperties,627,628,0 -block_hint,SetDataProperties,499,500,1 -block_hint,SetDataProperties,275,276,1 -block_hint,SetDataProperties,102,103,0 -block_hint,SetDataProperties,390,391,0 -block_hint,SetDataProperties,234,235,0 -block_hint,SetDataProperties,265,266,1 -block_hint,SetDataProperties,198,199,0 -block_hint,SetDataProperties,62,63,0 +block_hint,SetDataProperties,673,674,0 +block_hint,SetDataProperties,531,532,1 +block_hint,SetDataProperties,577,578,0 +block_hint,SetDataProperties,675,676,0 +block_hint,SetDataProperties,620,621,1 +block_hint,SetDataProperties,539,540,1 +block_hint,SetDataProperties,541,542,0 +block_hint,SetDataProperties,543,544,0 +block_hint,SetDataProperties,545,546,1 +block_hint,SetDataProperties,535,536,0 +block_hint,SetDataProperties,657,658,0 +block_hint,SetDataProperties,555,556,1 +block_hint,SetDataProperties,292,293,1 +block_hint,SetDataProperties,99,100,0 +block_hint,SetDataProperties,437,438,0 +block_hint,SetDataProperties,241,242,0 +block_hint,SetDataProperties,129,130,0 +block_hint,SetDataProperties,279,280,1 +block_hint,SetDataProperties,204,205,0 +block_hint,SetDataProperties,61,62,0 block_hint,ReturnReceiver,3,4,1 block_hint,ArrayConstructorImpl,40,41,0 block_hint,ArrayConstructorImpl,15,16,1 @@ -1170,448 +1221,474 @@ block_hint,ResumeGeneratorBaseline,6,7,0 block_hint,GlobalIsFinite,9,10,1 block_hint,GlobalIsNaN,9,10,1 block_hint,GlobalIsNaN,11,12,1 -block_hint,LoadIC,360,361,1 -block_hint,LoadIC,135,136,0 -block_hint,LoadIC,61,62,0 -block_hint,LoadIC,227,228,0 -block_hint,LoadIC,339,340,1 -block_hint,LoadIC,229,230,0 -block_hint,LoadIC,374,375,1 -block_hint,LoadIC,371,372,1 -block_hint,LoadIC,288,289,1 -block_hint,LoadIC,102,103,1 -block_hint,LoadIC,274,275,0 -block_hint,LoadIC,313,314,0 -block_hint,LoadIC,137,138,1 +block_hint,LoadIC,370,371,1 block_hint,LoadIC,139,140,0 -block_hint,LoadIC,302,303,1 -block_hint,LoadIC,258,259,1 +block_hint,LoadIC,59,60,0 +block_hint,LoadIC,233,234,0 +block_hint,LoadIC,345,346,1 +block_hint,LoadIC,235,236,0 +block_hint,LoadIC,387,388,1 +block_hint,LoadIC,384,385,0 +block_hint,LoadIC,381,382,1 +block_hint,LoadIC,292,293,1 +block_hint,LoadIC,100,101,1 +block_hint,LoadIC,278,279,0 +block_hint,LoadIC,319,320,0 +block_hint,LoadIC,141,142,1 +block_hint,LoadIC,143,144,0 +block_hint,LoadIC,308,309,1 +block_hint,LoadIC,358,359,1 +block_hint,LoadIC,102,103,0 +block_hint,LoadIC,19,20,1 +block_hint,LoadIC,62,63,0 block_hint,LoadIC,21,22,1 -block_hint,LoadIC,64,65,0 -block_hint,LoadIC,23,24,1 -block_hint,LoadIC,169,170,0 -block_hint,LoadIC,354,355,0 -block_hint,LoadIC,356,357,0 -block_hint,LoadIC,311,312,0 -block_hint,LoadIC,125,126,0 -block_hint,LoadIC,51,52,1 -block_hint,LoadIC,203,204,0 -block_hint,LoadIC,86,87,0 -block_hint,LoadIC,46,47,0 -block_hint,LoadIC,260,261,1 -block_hint,LoadIC,179,180,0 -block_hint,LoadIC,44,45,1 -block_hint,LoadIC,78,79,0 -block_hint,LoadIC,264,265,0 -block_hint,LoadIC,304,305,1 -block_hint,LoadIC,27,28,0 +block_hint,LoadIC,173,174,0 +block_hint,LoadIC,364,365,0 +block_hint,LoadIC,366,367,0 +block_hint,LoadIC,317,318,0 +block_hint,LoadIC,129,130,0 +block_hint,LoadIC,49,50,1 +block_hint,LoadIC,209,210,0 +block_hint,LoadIC,84,85,0 +block_hint,LoadIC,44,45,0 +block_hint,LoadIC,360,361,1 +block_hint,LoadIC,114,115,0 +block_hint,LoadIC,183,184,0 +block_hint,LoadIC,42,43,1 +block_hint,LoadIC,76,77,0 +block_hint,LoadIC,268,269,0 +block_hint,LoadIC,310,311,1 +block_hint,LoadIC,25,26,0 +block_hint,LoadIC,179,180,1 +block_hint,LoadIC,181,182,1 block_hint,LoadIC,175,176,1 block_hint,LoadIC,177,178,1 -block_hint,LoadIC,171,172,1 -block_hint,LoadIC,173,174,1 -block_hint,LoadIC,129,130,1 -block_hint,LoadIC,131,132,0 -block_hint,LoadIC_Megamorphic,342,343,1 -block_hint,LoadIC_Megamorphic,339,340,1 -block_hint,LoadIC_Megamorphic,253,254,1 -block_hint,LoadIC_Megamorphic,255,256,1 -block_hint,LoadIC_Megamorphic,251,252,0 -block_hint,LoadIC_Megamorphic,288,289,0 -block_hint,LoadIC_Megamorphic,126,127,1 -block_hint,LoadIC_Megamorphic,290,291,0 -block_hint,LoadIC_Megamorphic,128,129,0 -block_hint,LoadIC_Megamorphic,276,277,1 -block_hint,LoadIC_Megamorphic,235,236,1 -block_hint,LoadIC_Megamorphic,22,23,1 -block_hint,LoadIC_Megamorphic,158,159,0 -block_hint,LoadIC_Megamorphic,281,282,0 -block_hint,LoadIC_Megamorphic,245,246,1 -block_hint,LoadIC_Megamorphic,324,325,0 -block_hint,LoadIC_Megamorphic,326,327,0 -block_hint,LoadIC_Megamorphic,285,286,0 -block_hint,LoadIC_Megamorphic,118,119,0 -block_hint,LoadIC_Megamorphic,50,51,1 -block_hint,LoadIC_Megamorphic,45,46,0 -block_hint,LoadIC_Megamorphic,241,242,0 -block_hint,LoadIC_Megamorphic,278,279,1 -block_hint,LoadIC_Megamorphic,26,27,0 +block_hint,LoadIC,133,134,1 +block_hint,LoadIC,135,136,0 +block_hint,LoadIC_Megamorphic,355,356,1 +block_hint,LoadIC_Megamorphic,352,353,0 +block_hint,LoadIC_Megamorphic,349,350,1 +block_hint,LoadIC_Megamorphic,257,258,1 +block_hint,LoadIC_Megamorphic,259,260,1 +block_hint,LoadIC_Megamorphic,255,256,0 +block_hint,LoadIC_Megamorphic,56,57,0 +block_hint,LoadIC_Megamorphic,294,295,0 +block_hint,LoadIC_Megamorphic,130,131,1 +block_hint,LoadIC_Megamorphic,280,281,1 +block_hint,LoadIC_Megamorphic,132,133,0 +block_hint,LoadIC_Megamorphic,282,283,1 +block_hint,LoadIC_Megamorphic,328,329,1 +block_hint,LoadIC_Megamorphic,95,96,0 +block_hint,LoadIC_Megamorphic,20,21,1 +block_hint,LoadIC_Megamorphic,162,163,0 +block_hint,LoadIC_Megamorphic,287,288,0 +block_hint,LoadIC_Megamorphic,249,250,1 +block_hint,LoadIC_Megamorphic,334,335,0 +block_hint,LoadIC_Megamorphic,336,337,0 +block_hint,LoadIC_Megamorphic,291,292,0 +block_hint,LoadIC_Megamorphic,122,123,0 +block_hint,LoadIC_Megamorphic,48,49,1 +block_hint,LoadIC_Megamorphic,43,44,0 +block_hint,LoadIC_Megamorphic,245,246,0 +block_hint,LoadIC_Megamorphic,284,285,1 block_hint,LoadIC_Megamorphic,24,25,0 -block_hint,LoadIC_Megamorphic,160,161,1 -block_hint,LoadIC_Megamorphic,162,163,1 -block_hint,LoadIC_Megamorphic,122,123,1 -block_hint,LoadIC_Noninlined,356,357,1 -block_hint,LoadIC_Noninlined,128,129,0 -block_hint,LoadIC_Noninlined,359,360,1 -block_hint,LoadIC_Noninlined,354,355,1 -block_hint,LoadIC_Noninlined,263,264,0 -block_hint,LoadIC_Noninlined,58,59,0 -block_hint,LoadIC_Noninlined,302,303,0 -block_hint,LoadIC_Noninlined,138,139,1 -block_hint,LoadIC_Noninlined,286,287,1 -block_hint,LoadIC_Noninlined,22,23,1 -block_hint,LoadIC_Noninlined,170,171,0 -block_hint,LoadIC_Noninlined,39,40,1 -block_hint,LoadIC_Noninlined,253,254,0 -block_hint,LoadIC_Noninlined,290,291,1 -block_hint,LoadIC_Noninlined,26,27,0 +block_hint,LoadIC_Megamorphic,22,23,0 +block_hint,LoadIC_Megamorphic,164,165,1 +block_hint,LoadIC_Megamorphic,166,167,1 +block_hint,LoadIC_Megamorphic,126,127,1 +block_hint,LoadIC_Noninlined,366,367,1 +block_hint,LoadIC_Noninlined,132,133,0 +block_hint,LoadIC_Noninlined,372,373,1 +block_hint,LoadIC_Noninlined,369,370,0 +block_hint,LoadIC_Noninlined,364,365,1 +block_hint,LoadIC_Noninlined,267,268,0 +block_hint,LoadIC_Noninlined,56,57,0 +block_hint,LoadIC_Noninlined,308,309,0 +block_hint,LoadIC_Noninlined,142,143,1 +block_hint,LoadIC_Noninlined,292,293,1 +block_hint,LoadIC_Noninlined,20,21,1 +block_hint,LoadIC_Noninlined,174,175,0 +block_hint,LoadIC_Noninlined,37,38,1 +block_hint,LoadIC_Noninlined,257,258,0 +block_hint,LoadIC_Noninlined,296,297,1 block_hint,LoadIC_Noninlined,24,25,0 +block_hint,LoadIC_Noninlined,22,23,0 block_hint,LoadICTrampoline,3,4,1 block_hint,LoadICTrampoline_Megamorphic,3,4,1 -block_hint,LoadSuperIC,508,509,0 -block_hint,LoadSuperIC,245,246,0 -block_hint,LoadSuperIC,544,545,1 +block_hint,LoadSuperIC,528,529,0 +block_hint,LoadSuperIC,253,254,0 +block_hint,LoadSuperIC,564,565,1 +block_hint,LoadSuperIC,440,441,0 +block_hint,LoadSuperIC,75,76,0 +block_hint,LoadSuperIC,540,541,0 +block_hint,LoadSuperIC,255,256,1 +block_hint,LoadSuperIC,515,516,1 +block_hint,LoadSuperIC,41,42,1 +block_hint,LoadSuperIC,550,551,0 +block_hint,LoadSuperIC,287,288,0 +block_hint,LoadSuperIC,60,61,1 +block_hint,LoadSuperIC,429,430,0 block_hint,LoadSuperIC,427,428,0 -block_hint,LoadSuperIC,78,79,0 -block_hint,LoadSuperIC,520,521,0 -block_hint,LoadSuperIC,247,248,1 -block_hint,LoadSuperIC,497,498,1 -block_hint,LoadSuperIC,44,45,1 -block_hint,LoadSuperIC,530,531,0 -block_hint,LoadSuperIC,279,280,0 -block_hint,LoadSuperIC,63,64,1 -block_hint,LoadSuperIC,416,417,0 -block_hint,LoadSuperIC,414,415,0 -block_hint,LoadSuperIC,501,502,1 -block_hint,LoadSuperIC,48,49,0 -block_hint,KeyedLoadIC,619,620,1 -block_hint,KeyedLoadIC,253,254,0 -block_hint,KeyedLoadIC,245,246,0 -block_hint,KeyedLoadIC,379,380,0 -block_hint,KeyedLoadIC,490,491,1 -block_hint,KeyedLoadIC,661,662,0 -block_hint,KeyedLoadIC,613,614,0 -block_hint,KeyedLoadIC,568,569,1 -block_hint,KeyedLoadIC,385,386,1 -block_hint,KeyedLoadIC,383,384,1 -block_hint,KeyedLoadIC,647,648,0 -block_hint,KeyedLoadIC,649,650,0 -block_hint,KeyedLoadIC,617,618,0 -block_hint,KeyedLoadIC,570,571,1 -block_hint,KeyedLoadIC,153,154,1 -block_hint,KeyedLoadIC,611,612,0 -block_hint,KeyedLoadIC,471,472,0 -block_hint,KeyedLoadIC,103,104,1 +block_hint,LoadSuperIC,519,520,1 +block_hint,LoadSuperIC,45,46,0 +block_hint,LoadSuperIC,671,672,0 +block_hint,KeyedLoadIC,629,630,1 +block_hint,KeyedLoadIC,257,258,0 +block_hint,KeyedLoadIC,249,250,0 +block_hint,KeyedLoadIC,385,386,0 +block_hint,KeyedLoadIC,494,495,1 +block_hint,KeyedLoadIC,671,672,0 +block_hint,KeyedLoadIC,623,624,0 +block_hint,KeyedLoadIC,574,575,1 +block_hint,KeyedLoadIC,391,392,1 +block_hint,KeyedLoadIC,389,390,1 block_hint,KeyedLoadIC,657,658,0 block_hint,KeyedLoadIC,659,660,0 -block_hint,KeyedLoadIC,625,626,1 -block_hint,KeyedLoadIC,627,628,1 -block_hint,KeyedLoadIC,289,290,1 -block_hint,KeyedLoadIC,291,292,0 -block_hint,KeyedLoadIC,653,654,1 -block_hint,KeyedLoadIC,511,512,1 -block_hint,KeyedLoadIC,609,610,0 -block_hint,KeyedLoadIC,596,597,0 -block_hint,KeyedLoadIC,548,549,1 -block_hint,KeyedLoadIC,311,312,1 -block_hint,KeyedLoadIC,65,66,0 -block_hint,KeyedLoadIC,301,302,0 -block_hint,KeyedLoadIC,515,516,1 -block_hint,KeyedLoadIC,303,304,1 -block_hint,KeyedLoadIC,218,219,0 -block_hint,KeyedLoadIC,175,176,1 -block_hint,KeyedLoadIC,550,551,0 -block_hint,KeyedLoadIC,446,447,1 +block_hint,KeyedLoadIC,627,628,0 +block_hint,KeyedLoadIC,576,577,1 +block_hint,KeyedLoadIC,151,152,1 +block_hint,KeyedLoadIC,621,622,0 +block_hint,KeyedLoadIC,475,476,0 +block_hint,KeyedLoadIC,101,102,1 +block_hint,KeyedLoadIC,667,668,0 +block_hint,KeyedLoadIC,669,670,0 +block_hint,KeyedLoadIC,635,636,1 +block_hint,KeyedLoadIC,637,638,1 +block_hint,KeyedLoadIC,293,294,1 +block_hint,KeyedLoadIC,295,296,0 +block_hint,KeyedLoadIC,663,664,1 +block_hint,KeyedLoadIC,517,518,1 +block_hint,KeyedLoadIC,619,620,0 +block_hint,KeyedLoadIC,606,607,0 +block_hint,KeyedLoadIC,554,555,1 +block_hint,KeyedLoadIC,315,316,1 +block_hint,KeyedLoadIC,63,64,0 +block_hint,KeyedLoadIC,305,306,0 +block_hint,KeyedLoadIC,521,522,1 +block_hint,KeyedLoadIC,307,308,1 +block_hint,KeyedLoadIC,221,222,0 +block_hint,KeyedLoadIC,177,178,1 +block_hint,KeyedLoadIC,556,557,0 +block_hint,KeyedLoadIC,450,451,1 +block_hint,KeyedLoadIC,113,114,0 block_hint,KeyedLoadIC,115,116,0 -block_hint,KeyedLoadIC,117,118,0 -block_hint,KeyedLoadIC,400,401,1 -block_hint,KeyedLoadIC,599,600,1 -block_hint,KeyedLoadIC,241,242,1 -block_hint,KeyedLoadIC,552,553,0 -block_hint,KeyedLoadIC,504,505,0 -block_hint,KeyedLoadIC,432,433,1 -block_hint,KeyedLoadIC,663,664,0 -block_hint,KeyedLoadIC,121,122,1 -block_hint,KeyedLoadIC,319,320,1 -block_hint,KeyedLoadIC,321,322,1 -block_hint,KeyedLoadIC,67,68,0 -block_hint,KeyedLoadIC_Megamorphic,482,483,1 -block_hint,KeyedLoadIC_Megamorphic,484,485,0 -block_hint,KeyedLoadIC_Megamorphic,1139,1140,0 -block_hint,KeyedLoadIC_Megamorphic,1141,1142,1 -block_hint,KeyedLoadIC_Megamorphic,1117,1118,1 -block_hint,KeyedLoadIC_Megamorphic,1078,1079,0 -block_hint,KeyedLoadIC_Megamorphic,1133,1134,1 -block_hint,KeyedLoadIC_Megamorphic,1143,1144,1 -block_hint,KeyedLoadIC_Megamorphic,1119,1120,1 -block_hint,KeyedLoadIC_Megamorphic,1135,1136,0 -block_hint,KeyedLoadIC_Megamorphic,1053,1054,0 -block_hint,KeyedLoadIC_Megamorphic,893,894,1 -block_hint,KeyedLoadIC_Megamorphic,891,892,1 -block_hint,KeyedLoadIC_Megamorphic,514,515,1 +block_hint,KeyedLoadIC,406,407,1 +block_hint,KeyedLoadIC,609,610,1 +block_hint,KeyedLoadIC,245,246,1 +block_hint,KeyedLoadIC,558,559,0 +block_hint,KeyedLoadIC,508,509,0 +block_hint,KeyedLoadIC,436,437,1 +block_hint,KeyedLoadIC,673,674,0 +block_hint,KeyedLoadIC,119,120,1 +block_hint,KeyedLoadIC,323,324,1 +block_hint,KeyedLoadIC,325,326,1 +block_hint,KeyedLoadIC,65,66,0 +block_hint,KeyedLoadIC_Megamorphic,496,497,1 +block_hint,KeyedLoadIC_Megamorphic,498,499,0 +block_hint,KeyedLoadIC_Megamorphic,1218,1219,0 +block_hint,KeyedLoadIC_Megamorphic,1220,1221,1 +block_hint,KeyedLoadIC_Megamorphic,1192,1193,1 +block_hint,KeyedLoadIC_Megamorphic,1145,1146,0 +block_hint,KeyedLoadIC_Megamorphic,1212,1213,1 +block_hint,KeyedLoadIC_Megamorphic,1222,1223,1 +block_hint,KeyedLoadIC_Megamorphic,1194,1195,1 +block_hint,KeyedLoadIC_Megamorphic,1214,1215,0 block_hint,KeyedLoadIC_Megamorphic,1111,1112,0 -block_hint,KeyedLoadIC_Megamorphic,1113,1114,0 -block_hint,KeyedLoadIC_Megamorphic,1088,1089,0 -block_hint,KeyedLoadIC_Megamorphic,1086,1087,1 -block_hint,KeyedLoadIC_Megamorphic,1107,1108,0 -block_hint,KeyedLoadIC_Megamorphic,1082,1083,0 -block_hint,KeyedLoadIC_Megamorphic,895,896,1 -block_hint,KeyedLoadIC_Megamorphic,905,906,0 -block_hint,KeyedLoadIC_Megamorphic,1034,1035,0 -block_hint,KeyedLoadIC_Megamorphic,239,240,1 -block_hint,KeyedLoadIC_Megamorphic,1036,1037,0 -block_hint,KeyedLoadIC_Megamorphic,241,242,0 -block_hint,KeyedLoadIC_Megamorphic,1022,1023,0 -block_hint,KeyedLoadIC_Megamorphic,1129,1130,1 -block_hint,KeyedLoadIC_Megamorphic,1020,1021,0 -block_hint,KeyedLoadIC_Megamorphic,1018,1019,0 -block_hint,KeyedLoadIC_Megamorphic,627,628,1 -block_hint,KeyedLoadIC_Megamorphic,125,126,1 -block_hint,KeyedLoadIC_Megamorphic,199,200,0 -block_hint,KeyedLoadIC_Megamorphic,629,630,0 -block_hint,KeyedLoadIC_Megamorphic,544,545,0 -block_hint,KeyedLoadIC_Megamorphic,995,996,0 -block_hint,KeyedLoadIC_Megamorphic,948,949,0 -block_hint,KeyedLoadIC_Megamorphic,633,634,1 -block_hint,KeyedLoadIC_Megamorphic,635,636,0 -block_hint,KeyedLoadIC_Megamorphic,637,638,1 -block_hint,KeyedLoadIC_Megamorphic,256,257,1 -block_hint,KeyedLoadIC_Megamorphic,639,640,0 -block_hint,KeyedLoadIC_Megamorphic,258,259,0 -block_hint,KeyedLoadIC_Megamorphic,817,818,0 +block_hint,KeyedLoadIC_Megamorphic,927,928,1 +block_hint,KeyedLoadIC_Megamorphic,925,926,1 +block_hint,KeyedLoadIC_Megamorphic,528,529,1 +block_hint,KeyedLoadIC_Megamorphic,1186,1187,0 +block_hint,KeyedLoadIC_Megamorphic,1188,1189,0 +block_hint,KeyedLoadIC_Megamorphic,1155,1156,0 +block_hint,KeyedLoadIC_Megamorphic,1153,1154,1 +block_hint,KeyedLoadIC_Megamorphic,1182,1183,0 +block_hint,KeyedLoadIC_Megamorphic,1149,1150,0 +block_hint,KeyedLoadIC_Megamorphic,929,930,1 +block_hint,KeyedLoadIC_Megamorphic,939,940,0 +block_hint,KeyedLoadIC_Megamorphic,640,641,0 +block_hint,KeyedLoadIC_Megamorphic,1172,1173,0 +block_hint,KeyedLoadIC_Megamorphic,1085,1086,0 +block_hint,KeyedLoadIC_Megamorphic,1131,1132,0 +block_hint,KeyedLoadIC_Megamorphic,233,234,1 +block_hint,KeyedLoadIC_Megamorphic,1087,1088,0 +block_hint,KeyedLoadIC_Megamorphic,235,236,0 +block_hint,KeyedLoadIC_Megamorphic,1070,1071,0 +block_hint,KeyedLoadIC_Megamorphic,1204,1205,1 +block_hint,KeyedLoadIC_Megamorphic,1068,1069,0 +block_hint,KeyedLoadIC_Megamorphic,1066,1067,0 +block_hint,KeyedLoadIC_Megamorphic,980,981,1 +block_hint,KeyedLoadIC_Megamorphic,239,240,0 +block_hint,KeyedLoadIC_Megamorphic,121,122,1 +block_hint,KeyedLoadIC_Megamorphic,195,196,0 +block_hint,KeyedLoadIC_Megamorphic,644,645,0 +block_hint,KeyedLoadIC_Megamorphic,558,559,0 +block_hint,KeyedLoadIC_Megamorphic,1043,1044,0 +block_hint,KeyedLoadIC_Megamorphic,983,984,0 block_hint,KeyedLoadIC_Megamorphic,648,649,1 -block_hint,KeyedLoadIC_Megamorphic,641,642,0 -block_hint,KeyedLoadIC_Megamorphic,1028,1029,0 -block_hint,KeyedLoadIC_Megamorphic,1102,1103,0 -block_hint,KeyedLoadIC_Megamorphic,1131,1132,1 +block_hint,KeyedLoadIC_Megamorphic,666,667,0 +block_hint,KeyedLoadIC_Megamorphic,1174,1175,0 +block_hint,KeyedLoadIC_Megamorphic,650,651,0 +block_hint,KeyedLoadIC_Megamorphic,1089,1090,0 +block_hint,KeyedLoadIC_Megamorphic,652,653,1 +block_hint,KeyedLoadIC_Megamorphic,250,251,1 +block_hint,KeyedLoadIC_Megamorphic,654,655,0 +block_hint,KeyedLoadIC_Megamorphic,252,253,0 +block_hint,KeyedLoadIC_Megamorphic,842,843,0 +block_hint,KeyedLoadIC_Megamorphic,987,988,1 +block_hint,KeyedLoadIC_Megamorphic,256,257,0 +block_hint,KeyedLoadIC_Megamorphic,656,657,0 +block_hint,KeyedLoadIC_Megamorphic,258,259,1 +block_hint,KeyedLoadIC_Megamorphic,1076,1077,0 +block_hint,KeyedLoadIC_Megamorphic,1169,1170,0 +block_hint,KeyedLoadIC_Megamorphic,1206,1207,1 +block_hint,KeyedLoadIC_Megamorphic,1074,1075,0 +block_hint,KeyedLoadIC_Megamorphic,123,124,1 +block_hint,KeyedLoadIC_Megamorphic,203,204,0 +block_hint,KeyedLoadIC_Megamorphic,923,924,0 +block_hint,KeyedLoadIC_Megamorphic,675,676,0 +block_hint,KeyedLoadIC_Megamorphic,1176,1177,0 +block_hint,KeyedLoadIC_Megamorphic,1208,1209,0 +block_hint,KeyedLoadIC_Megamorphic,1135,1136,0 +block_hint,KeyedLoadIC_Megamorphic,844,845,1 +block_hint,KeyedLoadIC_Megamorphic,268,269,1 +block_hint,KeyedLoadIC_Megamorphic,1200,1201,0 +block_hint,KeyedLoadIC_Megamorphic,270,271,0 +block_hint,KeyedLoadIC_Megamorphic,1056,1057,0 +block_hint,KeyedLoadIC_Megamorphic,1198,1199,1 +block_hint,KeyedLoadIC_Megamorphic,1054,1055,0 +block_hint,KeyedLoadIC_Megamorphic,1116,1117,1 +block_hint,KeyedLoadIC_Megamorphic,1107,1108,0 +block_hint,KeyedLoadIC_Megamorphic,1210,1211,0 +block_hint,KeyedLoadIC_Megamorphic,1101,1102,1 +block_hint,KeyedLoadIC_Megamorphic,740,741,1 +block_hint,KeyedLoadIC_Megamorphic,1017,1018,1 +block_hint,KeyedLoadIC_Megamorphic,736,737,0 +block_hint,KeyedLoadIC_Megamorphic,112,113,0 +block_hint,KeyedLoadIC_Megamorphic,877,878,0 +block_hint,KeyedLoadIC_Megamorphic,338,339,1 +block_hint,KeyedLoadIC_Megamorphic,863,864,1 +block_hint,KeyedLoadIC_Megamorphic,76,77,1 +block_hint,KeyedLoadIC_Megamorphic,368,369,0 +block_hint,KeyedLoadIC_Megamorphic,728,729,0 +block_hint,KeyedLoadIC_Megamorphic,93,94,1 +block_hint,KeyedLoadIC_Megamorphic,998,999,1 +block_hint,KeyedLoadIC_Megamorphic,294,295,0 +block_hint,KeyedLoadIC_Megamorphic,115,116,1 +block_hint,KeyedLoadIC_Megamorphic,179,180,0 +block_hint,KeyedLoadIC_Megamorphic,960,961,0 +block_hint,KeyedLoadIC_Megamorphic,817,818,1 +block_hint,KeyedLoadIC_Megamorphic,183,184,1 +block_hint,KeyedLoadIC_Megamorphic,681,682,0 +block_hint,KeyedLoadIC_Megamorphic,524,525,0 +block_hint,KeyedLoadIC_Megamorphic,1031,1032,0 +block_hint,KeyedLoadIC_Megamorphic,1001,1002,0 +block_hint,KeyedLoadIC_Megamorphic,685,686,1 +block_hint,KeyedLoadIC_Megamorphic,856,857,1 +block_hint,KeyedLoadIC_Megamorphic,1178,1179,0 +block_hint,KeyedLoadIC_Megamorphic,307,308,0 +block_hint,KeyedLoadIC_Megamorphic,858,859,0 +block_hint,KeyedLoadIC_Megamorphic,1062,1063,0 +block_hint,KeyedLoadIC_Megamorphic,187,188,0 +block_hint,KeyedLoadIC_Megamorphic,947,948,0 +block_hint,KeyedLoadIC_Megamorphic,1142,1143,0 +block_hint,KeyedLoadIC_Megamorphic,905,906,1 +block_hint,KeyedLoadIC_Megamorphic,127,128,0 +block_hint,KeyedLoadIC_Megamorphic,718,719,0 block_hint,KeyedLoadIC_Megamorphic,1026,1027,0 -block_hint,KeyedLoadIC_Megamorphic,127,128,1 -block_hint,KeyedLoadIC_Megamorphic,207,208,0 -block_hint,KeyedLoadIC_Megamorphic,889,890,0 -block_hint,KeyedLoadIC_Megamorphic,1038,1039,0 -block_hint,KeyedLoadIC_Megamorphic,821,822,1 -block_hint,KeyedLoadIC_Megamorphic,272,273,1 -block_hint,KeyedLoadIC_Megamorphic,1125,1126,0 -block_hint,KeyedLoadIC_Megamorphic,274,275,0 -block_hint,KeyedLoadIC_Megamorphic,1008,1009,0 -block_hint,KeyedLoadIC_Megamorphic,1123,1124,1 -block_hint,KeyedLoadIC_Megamorphic,1006,1007,0 -block_hint,KeyedLoadIC_Megamorphic,1058,1059,1 -block_hint,KeyedLoadIC_Megamorphic,1049,1050,0 -block_hint,KeyedLoadIC_Megamorphic,1043,1044,1 -block_hint,KeyedLoadIC_Megamorphic,718,719,1 -block_hint,KeyedLoadIC_Megamorphic,969,970,1 +block_hint,KeyedLoadIC_Megamorphic,562,563,1 +block_hint,KeyedLoadIC_Megamorphic,321,322,0 block_hint,KeyedLoadIC_Megamorphic,714,715,0 -block_hint,KeyedLoadIC_Megamorphic,116,117,0 -block_hint,KeyedLoadIC_Megamorphic,847,848,0 -block_hint,KeyedLoadIC_Megamorphic,332,333,1 -block_hint,KeyedLoadIC_Megamorphic,833,834,1 -block_hint,KeyedLoadIC_Megamorphic,80,81,1 -block_hint,KeyedLoadIC_Megamorphic,362,363,0 -block_hint,KeyedLoadIC_Megamorphic,706,707,0 -block_hint,KeyedLoadIC_Megamorphic,97,98,1 -block_hint,KeyedLoadIC_Megamorphic,661,662,1 -block_hint,KeyedLoadIC_Megamorphic,119,120,1 -block_hint,KeyedLoadIC_Megamorphic,183,184,0 -block_hint,KeyedLoadIC_Megamorphic,926,927,0 -block_hint,KeyedLoadIC_Megamorphic,795,796,1 -block_hint,KeyedLoadIC_Megamorphic,187,188,1 -block_hint,KeyedLoadIC_Megamorphic,663,664,0 -block_hint,KeyedLoadIC_Megamorphic,510,511,0 -block_hint,KeyedLoadIC_Megamorphic,983,984,0 -block_hint,KeyedLoadIC_Megamorphic,957,958,0 -block_hint,KeyedLoadIC_Megamorphic,667,668,1 -block_hint,KeyedLoadIC_Megamorphic,301,302,1 -block_hint,KeyedLoadIC_Megamorphic,307,308,0 -block_hint,KeyedLoadIC_Megamorphic,830,831,0 -block_hint,KeyedLoadIC_Megamorphic,1014,1015,0 -block_hint,KeyedLoadIC_Megamorphic,191,192,0 -block_hint,KeyedLoadIC_Megamorphic,913,914,0 -block_hint,KeyedLoadIC_Megamorphic,1075,1076,0 -block_hint,KeyedLoadIC_Megamorphic,871,872,1 -block_hint,KeyedLoadIC_Megamorphic,131,132,0 -block_hint,KeyedLoadIC_Megamorphic,696,697,0 -block_hint,KeyedLoadIC_Megamorphic,978,979,0 -block_hint,KeyedLoadIC_Megamorphic,548,549,1 -block_hint,KeyedLoadIC_Megamorphic,317,318,0 -block_hint,KeyedLoadIC_Megamorphic,692,693,0 -block_hint,KeyedLoadIC_Megamorphic,550,551,0 -block_hint,KeyedLoadIC_Megamorphic,133,134,1 -block_hint,KeyedLoadIC_Megamorphic,558,559,0 -block_hint,KeyedLoadIC_Megamorphic,875,876,1 -block_hint,KeyedLoadIC_Megamorphic,478,479,0 -block_hint,KeyedLoadIC_Megamorphic,911,912,0 -block_hint,KeyedLoadIC_Megamorphic,700,701,1 -block_hint,KeyedLoadIC_Megamorphic,566,567,0 -block_hint,KeyedLoadIC_Megamorphic,217,218,0 -block_hint,KeyedLoadIC_Megamorphic,476,477,1 -block_hint,KeyedLoadIC_Megamorphic,568,569,1 -block_hint,KeyedLoadIC_Megamorphic,149,150,1 +block_hint,KeyedLoadIC_Megamorphic,564,565,0 +block_hint,KeyedLoadIC_Megamorphic,129,130,1 +block_hint,KeyedLoadIC_Megamorphic,572,573,0 +block_hint,KeyedLoadIC_Megamorphic,909,910,1 +block_hint,KeyedLoadIC_Megamorphic,492,493,0 +block_hint,KeyedLoadIC_Megamorphic,945,946,0 +block_hint,KeyedLoadIC_Megamorphic,722,723,1 +block_hint,KeyedLoadIC_Megamorphic,580,581,0 +block_hint,KeyedLoadIC_Megamorphic,213,214,0 +block_hint,KeyedLoadIC_Megamorphic,490,491,1 +block_hint,KeyedLoadIC_Megamorphic,582,583,1 +block_hint,KeyedLoadIC_Megamorphic,145,146,1 block_hint,KeyedLoadICTrampoline,3,4,1 block_hint,KeyedLoadICTrampoline_Megamorphic,3,4,1 -block_hint,StoreGlobalIC,73,74,0 -block_hint,StoreGlobalIC,224,225,1 -block_hint,StoreGlobalIC,263,264,0 -block_hint,StoreGlobalIC,141,142,0 -block_hint,StoreGlobalIC,201,202,0 -block_hint,StoreGlobalIC,89,90,0 -block_hint,StoreGlobalIC,143,144,1 -block_hint,StoreGlobalIC,91,92,1 -block_hint,StoreGlobalIC,16,17,1 +block_hint,StoreGlobalIC,72,73,0 +block_hint,StoreGlobalIC,229,230,1 +block_hint,StoreGlobalIC,268,269,0 +block_hint,StoreGlobalIC,144,145,0 +block_hint,StoreGlobalIC,205,206,0 +block_hint,StoreGlobalIC,92,93,0 +block_hint,StoreGlobalIC,146,147,1 +block_hint,StoreGlobalIC,94,95,1 +block_hint,StoreGlobalIC,15,16,1 block_hint,StoreGlobalICTrampoline,3,4,1 -block_hint,StoreIC,328,329,1 -block_hint,StoreIC,140,141,0 -block_hint,StoreIC,71,72,0 -block_hint,StoreIC,202,203,0 -block_hint,StoreIC,204,205,1 -block_hint,StoreIC,374,375,1 -block_hint,StoreIC,234,235,1 -block_hint,StoreIC,236,237,1 -block_hint,StoreIC,76,77,1 -block_hint,StoreIC,244,245,1 -block_hint,StoreIC,104,105,0 -block_hint,StoreIC,37,38,0 -block_hint,StoreIC,210,211,1 -block_hint,StoreIC,142,143,0 +block_hint,StoreIC,338,339,1 block_hint,StoreIC,144,145,0 -block_hint,StoreIC,18,19,1 +block_hint,StoreIC,69,70,0 +block_hint,StoreIC,208,209,0 +block_hint,StoreIC,210,211,1 +block_hint,StoreIC,395,396,1 +block_hint,StoreIC,386,387,0 +block_hint,StoreIC,240,241,1 +block_hint,StoreIC,242,243,1 +block_hint,StoreIC,74,75,1 +block_hint,StoreIC,250,251,1 +block_hint,StoreIC,108,109,0 +block_hint,StoreIC,35,36,0 +block_hint,StoreIC,316,317,1 block_hint,StoreIC,92,93,0 -block_hint,StoreIC,20,21,0 -block_hint,StoreIC,347,348,0 -block_hint,StoreIC,154,155,1 +block_hint,StoreIC,146,147,0 +block_hint,StoreIC,150,151,0 +block_hint,StoreIC,16,17,1 +block_hint,StoreIC,96,97,0 +block_hint,StoreIC,18,19,0 +block_hint,StoreIC,359,360,0 +block_hint,StoreIC,160,161,1 +block_hint,StoreIC,162,163,1 +block_hint,StoreIC,327,328,1 +block_hint,StoreIC,164,165,0 +block_hint,StoreIC,105,106,0 +block_hint,StoreIC,103,104,1 +block_hint,StoreIC,320,321,1 +block_hint,StoreIC,23,24,0 +block_hint,StoreIC,152,153,1 +block_hint,StoreIC,287,288,0 +block_hint,StoreIC,154,155,0 block_hint,StoreIC,156,157,1 -block_hint,StoreIC,317,318,1 +block_hint,StoreIC,323,324,1 +block_hint,StoreIC,25,26,1 block_hint,StoreIC,158,159,0 -block_hint,StoreIC,101,102,0 -block_hint,StoreIC,99,100,1 -block_hint,StoreIC,310,311,1 -block_hint,StoreIC,25,26,0 -block_hint,StoreIC,146,147,1 -block_hint,StoreIC,281,282,0 -block_hint,StoreIC,148,149,0 -block_hint,StoreIC,150,151,1 -block_hint,StoreIC,313,314,1 -block_hint,StoreIC,27,28,1 -block_hint,StoreIC,152,153,0 -block_hint,StoreIC,315,316,1 -block_hint,StoreIC,33,34,0 -block_hint,StoreIC,31,32,1 -block_hint,StoreIC,223,224,1 -block_hint,StoreIC,65,66,0 -block_hint,StoreIC,285,286,0 -block_hint,StoreIC,160,161,1 -block_hint,StoreIC,287,288,1 -block_hint,StoreIC,206,207,1 -block_hint,StoreIC,240,241,0 -block_hint,StoreIC,170,171,0 +block_hint,StoreIC,325,326,1 +block_hint,StoreIC,31,32,0 +block_hint,StoreIC,29,30,1 +block_hint,StoreIC,227,228,1 +block_hint,StoreIC,63,64,0 +block_hint,StoreIC,291,292,0 +block_hint,StoreIC,166,167,1 +block_hint,StoreIC,293,294,1 +block_hint,StoreIC,312,313,1 +block_hint,StoreIC,76,77,0 +block_hint,StoreIC,176,177,0 +block_hint,StoreIC,43,44,1 +block_hint,StoreIC,112,113,0 +block_hint,StoreIC,178,179,0 +block_hint,StoreIC,271,272,0 +block_hint,StoreIC,125,126,1 +block_hint,StoreIC,371,372,0 +block_hint,StoreIC,267,268,1 block_hint,StoreIC,45,46,1 -block_hint,StoreIC,108,109,0 -block_hint,StoreIC,172,173,0 -block_hint,StoreIC,265,266,0 -block_hint,StoreIC,121,122,1 -block_hint,StoreIC,359,360,0 -block_hint,StoreIC,261,262,1 block_hint,StoreIC,47,48,1 +block_hint,StoreIC,121,122,0 block_hint,StoreIC,49,50,1 -block_hint,StoreIC,117,118,0 +block_hint,StoreIC,123,124,0 block_hint,StoreIC,51,52,1 -block_hint,StoreIC,119,120,0 +block_hint,StoreIC,80,81,0 block_hint,StoreIC,53,54,1 block_hint,StoreIC,55,56,1 +block_hint,StoreIC,333,334,0 block_hint,StoreIC,57,58,1 -block_hint,StoreIC,323,324,0 -block_hint,StoreIC,59,60,1 -block_hint,StoreIC,178,179,0 -block_hint,StoreIC,180,181,0 -block_hint,StoreIC,225,226,0 -block_hint,StoreIC,129,130,0 -block_hint,StoreIC,293,294,0 -block_hint,StoreIC,184,185,1 +block_hint,StoreIC,184,185,0 block_hint,StoreIC,186,187,0 -block_hint,StoreIC,275,276,0 -block_hint,StoreIC,353,354,0 -block_hint,StoreIC,295,296,1 -block_hint,StoreIC,188,189,1 +block_hint,StoreIC,229,230,0 +block_hint,StoreIC,133,134,0 +block_hint,StoreIC,299,300,0 +block_hint,StoreIC,190,191,1 +block_hint,StoreIC,192,193,0 +block_hint,StoreIC,281,282,0 +block_hint,StoreIC,365,366,0 +block_hint,StoreIC,301,302,1 block_hint,StoreIC,194,195,1 -block_hint,StoreIC,196,197,0 -block_hint,StoreIC,198,199,0 block_hint,StoreIC,200,201,1 -block_hint,StoreIC,192,193,1 -block_hint,StoreIC,190,191,0 -block_hint,StoreIC,372,373,0 -block_hint,StoreIC,376,377,1 -block_hint,StoreIC,345,346,1 -block_hint,StoreIC,306,307,1 -block_hint,StoreIC,82,83,0 -block_hint,StoreIC,135,136,0 -block_hint,StoreIC,227,228,1 +block_hint,StoreIC,202,203,0 +block_hint,StoreIC,204,205,0 +block_hint,StoreIC,206,207,1 +block_hint,StoreIC,198,199,1 +block_hint,StoreIC,196,197,0 +block_hint,StoreIC,384,385,0 +block_hint,StoreIC,388,389,1 +block_hint,StoreIC,357,358,1 +block_hint,StoreIC,314,315,1 +block_hint,StoreIC,84,85,0 +block_hint,StoreIC,139,140,0 +block_hint,StoreIC,231,232,1 block_hint,StoreICTrampoline,3,4,1 -block_hint,DefineNamedOwnIC,320,321,1 -block_hint,DefineNamedOwnIC,139,140,0 -block_hint,DefineNamedOwnIC,295,296,1 -block_hint,DefineNamedOwnIC,197,198,0 -block_hint,DefineNamedOwnIC,71,72,0 -block_hint,DefineNamedOwnIC,199,200,0 -block_hint,DefineNamedOwnIC,317,318,0 -block_hint,DefineNamedOwnIC,238,239,1 -block_hint,DefineNamedOwnIC,89,90,0 -block_hint,DefineNamedOwnIC,19,20,0 -block_hint,DefineNamedOwnIC,339,340,0 -block_hint,DefineNamedOwnIC,277,278,1 -block_hint,DefineNamedOwnIC,151,152,1 -block_hint,DefineNamedOwnIC,153,154,1 -block_hint,DefineNamedOwnIC,249,250,1 -block_hint,DefineNamedOwnIC,34,35,0 -block_hint,DefineNamedOwnIC,241,242,1 -block_hint,DefineNamedOwnIC,24,25,0 -block_hint,DefineNamedOwnIC,143,144,1 -block_hint,DefineNamedOwnIC,341,342,0 -block_hint,DefineNamedOwnIC,275,276,0 +block_hint,DefineNamedOwnIC,329,330,1 block_hint,DefineNamedOwnIC,145,146,0 -block_hint,DefineNamedOwnIC,147,148,1 +block_hint,DefineNamedOwnIC,300,301,1 +block_hint,DefineNamedOwnIC,203,204,0 +block_hint,DefineNamedOwnIC,69,70,0 +block_hint,DefineNamedOwnIC,205,206,0 +block_hint,DefineNamedOwnIC,326,327,0 block_hint,DefineNamedOwnIC,243,244,1 -block_hint,DefineNamedOwnIC,28,29,0 -block_hint,DefineNamedOwnIC,149,150,0 -block_hint,DefineNamedOwnIC,245,246,1 +block_hint,DefineNamedOwnIC,93,94,0 +block_hint,DefineNamedOwnIC,17,18,0 +block_hint,DefineNamedOwnIC,350,351,0 +block_hint,DefineNamedOwnIC,282,283,1 +block_hint,DefineNamedOwnIC,157,158,1 +block_hint,DefineNamedOwnIC,159,160,1 +block_hint,DefineNamedOwnIC,254,255,1 block_hint,DefineNamedOwnIC,32,33,0 -block_hint,KeyedStoreIC,391,392,1 +block_hint,DefineNamedOwnIC,246,247,1 +block_hint,DefineNamedOwnIC,22,23,0 +block_hint,DefineNamedOwnIC,149,150,1 +block_hint,DefineNamedOwnIC,352,353,0 +block_hint,DefineNamedOwnIC,280,281,0 +block_hint,DefineNamedOwnIC,151,152,0 +block_hint,DefineNamedOwnIC,153,154,1 +block_hint,DefineNamedOwnIC,248,249,1 +block_hint,DefineNamedOwnIC,26,27,0 +block_hint,DefineNamedOwnIC,155,156,0 +block_hint,DefineNamedOwnIC,250,251,1 +block_hint,DefineNamedOwnIC,30,31,0 +block_hint,KeyedStoreIC,401,402,1 +block_hint,KeyedStoreIC,173,174,0 block_hint,KeyedStoreIC,169,170,0 -block_hint,KeyedStoreIC,165,166,0 -block_hint,KeyedStoreIC,233,234,0 -block_hint,KeyedStoreIC,167,168,1 -block_hint,KeyedStoreIC,85,86,1 -block_hint,KeyedStoreIC,89,90,1 -block_hint,KeyedStoreIC,388,389,1 -block_hint,KeyedStoreIC,105,106,0 -block_hint,KeyedStoreIC,24,25,0 -block_hint,KeyedStoreIC,420,421,0 -block_hint,KeyedStoreIC,175,176,1 -block_hint,KeyedStoreIC,422,423,0 +block_hint,KeyedStoreIC,239,240,0 +block_hint,KeyedStoreIC,171,172,1 +block_hint,KeyedStoreIC,83,84,1 +block_hint,KeyedStoreIC,87,88,1 +block_hint,KeyedStoreIC,398,399,1 +block_hint,KeyedStoreIC,109,110,0 +block_hint,KeyedStoreIC,22,23,0 +block_hint,KeyedStoreIC,432,433,0 +block_hint,KeyedStoreIC,181,182,1 +block_hint,KeyedStoreIC,434,435,0 +block_hint,KeyedStoreIC,351,352,0 +block_hint,KeyedStoreIC,298,299,1 +block_hint,KeyedStoreIC,31,32,0 +block_hint,KeyedStoreIC,272,273,0 +block_hint,KeyedStoreIC,355,356,0 +block_hint,KeyedStoreIC,195,196,1 +block_hint,KeyedStoreIC,260,261,1 +block_hint,KeyedStoreIC,436,437,1 +block_hint,KeyedStoreIC,329,330,0 +block_hint,KeyedStoreIC,137,138,1 +block_hint,KeyedStoreIC,45,46,1 +block_hint,KeyedStoreIC,197,198,0 +block_hint,KeyedStoreIC,47,48,0 +block_hint,KeyedStoreIC,215,216,0 +block_hint,KeyedStoreIC,361,362,1 +block_hint,KeyedStoreIC,363,364,0 +block_hint,KeyedStoreIC,221,222,1 +block_hint,KeyedStoreIC,223,224,0 block_hint,KeyedStoreIC,345,346,0 -block_hint,KeyedStoreIC,292,293,1 -block_hint,KeyedStoreIC,33,34,0 -block_hint,KeyedStoreIC,268,269,0 -block_hint,KeyedStoreIC,349,350,0 -block_hint,KeyedStoreIC,189,190,1 -block_hint,KeyedStoreIC,256,257,1 -block_hint,KeyedStoreIC,424,425,1 -block_hint,KeyedStoreIC,323,324,0 -block_hint,KeyedStoreIC,133,134,1 -block_hint,KeyedStoreIC,47,48,1 -block_hint,KeyedStoreIC,191,192,0 -block_hint,KeyedStoreIC,49,50,0 -block_hint,KeyedStoreIC,209,210,0 -block_hint,KeyedStoreIC,355,356,1 -block_hint,KeyedStoreIC,357,358,0 -block_hint,KeyedStoreIC,215,216,1 -block_hint,KeyedStoreIC,217,218,0 -block_hint,KeyedStoreIC,339,340,0 -block_hint,KeyedStoreIC,361,362,0 -block_hint,KeyedStoreIC,426,427,0 -block_hint,KeyedStoreIC,359,360,1 -block_hint,KeyedStoreIC,225,226,1 -block_hint,KeyedStoreIC,227,228,0 -block_hint,KeyedStoreIC,229,230,0 +block_hint,KeyedStoreIC,367,368,0 +block_hint,KeyedStoreIC,438,439,0 +block_hint,KeyedStoreIC,365,366,1 block_hint,KeyedStoreIC,231,232,1 -block_hint,KeyedStoreIC,441,442,0 -block_hint,KeyedStoreIC,418,419,1 -block_hint,KeyedStoreIC,274,275,0 -block_hint,KeyedStoreIC,369,370,1 -block_hint,KeyedStoreIC,95,96,0 -block_hint,KeyedStoreIC,160,161,0 +block_hint,KeyedStoreIC,233,234,0 +block_hint,KeyedStoreIC,235,236,0 +block_hint,KeyedStoreIC,237,238,1 +block_hint,KeyedStoreIC,453,454,0 +block_hint,KeyedStoreIC,430,431,1 +block_hint,KeyedStoreIC,278,279,0 +block_hint,KeyedStoreIC,377,378,1 +block_hint,KeyedStoreIC,97,98,0 +block_hint,KeyedStoreIC,164,165,0 block_hint,KeyedStoreICTrampoline,3,4,1 -block_hint,DefineKeyedOwnIC,383,384,1 -block_hint,DefineKeyedOwnIC,168,169,0 -block_hint,DefineKeyedOwnIC,164,165,1 +block_hint,DefineKeyedOwnIC,392,393,1 +block_hint,DefineKeyedOwnIC,174,175,0 +block_hint,DefineKeyedOwnIC,170,171,1 block_hint,StoreInArrayLiteralIC,30,31,1 block_hint,StoreInArrayLiteralIC,19,20,0 block_hint,StoreInArrayLiteralIC,23,24,0 @@ -1619,29 +1696,29 @@ block_hint,StoreInArrayLiteralIC,14,15,1 block_hint,StoreInArrayLiteralIC,16,17,1 block_hint,StoreInArrayLiteralIC,8,9,1 block_hint,StoreInArrayLiteralIC,4,5,1 -block_hint,LoadGlobalIC,61,62,0 -block_hint,LoadGlobalIC,15,16,1 -block_hint,LoadGlobalIC,17,18,1 -block_hint,LoadGlobalIC,19,20,1 -block_hint,LoadGlobalIC,188,189,0 -block_hint,LoadGlobalIC,13,14,0 -block_hint,LoadGlobalIC,109,110,1 -block_hint,LoadGlobalICInsideTypeof,61,62,0 -block_hint,LoadGlobalICInsideTypeof,190,191,1 -block_hint,LoadGlobalICInsideTypeof,13,14,0 -block_hint,LoadGlobalICInsideTypeof,109,110,0 -block_hint,LoadGlobalICInsideTypeof,21,22,1 -block_hint,LoadGlobalICInsideTypeof,23,24,1 -block_hint,LoadGlobalICInsideTypeof,249,250,1 -block_hint,LoadGlobalICInsideTypeof,205,206,0 -block_hint,LoadGlobalICInsideTypeof,59,60,0 -block_hint,LoadGlobalICInsideTypeof,217,218,0 -block_hint,LoadGlobalICInsideTypeof,111,112,1 -block_hint,LoadGlobalICInsideTypeof,25,26,1 -block_hint,LoadGlobalICInsideTypeof,226,227,1 -block_hint,LoadGlobalICInsideTypeof,196,197,0 -block_hint,LoadGlobalICInsideTypeof,44,45,0 -block_hint,LoadGlobalICInsideTypeof,42,43,1 +block_hint,LoadGlobalIC,60,61,0 +block_hint,LoadGlobalIC,14,15,1 +block_hint,LoadGlobalIC,16,17,1 +block_hint,LoadGlobalIC,18,19,1 +block_hint,LoadGlobalIC,191,192,0 +block_hint,LoadGlobalIC,12,13,0 +block_hint,LoadGlobalIC,111,112,1 +block_hint,LoadGlobalICInsideTypeof,60,61,0 +block_hint,LoadGlobalICInsideTypeof,193,194,1 +block_hint,LoadGlobalICInsideTypeof,12,13,0 +block_hint,LoadGlobalICInsideTypeof,111,112,0 +block_hint,LoadGlobalICInsideTypeof,20,21,1 +block_hint,LoadGlobalICInsideTypeof,22,23,1 +block_hint,LoadGlobalICInsideTypeof,254,255,1 +block_hint,LoadGlobalICInsideTypeof,208,209,0 +block_hint,LoadGlobalICInsideTypeof,58,59,0 +block_hint,LoadGlobalICInsideTypeof,220,221,0 +block_hint,LoadGlobalICInsideTypeof,113,114,1 +block_hint,LoadGlobalICInsideTypeof,24,25,1 +block_hint,LoadGlobalICInsideTypeof,229,230,1 +block_hint,LoadGlobalICInsideTypeof,199,200,0 +block_hint,LoadGlobalICInsideTypeof,43,44,0 +block_hint,LoadGlobalICInsideTypeof,41,42,1 block_hint,LoadGlobalICTrampoline,3,4,1 block_hint,LoadGlobalICInsideTypeofTrampoline,3,4,1 block_hint,LookupGlobalICBaseline,3,4,1 @@ -1650,50 +1727,54 @@ block_hint,LookupGlobalICBaseline,5,6,1 block_hint,LookupGlobalICBaseline,11,12,1 block_hint,LookupGlobalICBaseline,7,8,1 block_hint,LookupGlobalICBaseline,9,10,0 -block_hint,KeyedHasIC,251,252,1 +block_hint,KeyedHasIC,261,262,1 +block_hint,KeyedHasIC,125,126,0 block_hint,KeyedHasIC,117,118,0 -block_hint,KeyedHasIC,109,110,0 -block_hint,KeyedHasIC,233,234,0 -block_hint,KeyedHasIC,157,158,0 -block_hint,KeyedHasIC,81,82,0 -block_hint,KeyedHasIC,111,112,1 -block_hint,KeyedHasIC,159,160,0 -block_hint,KeyedHasIC,115,116,1 -block_hint,KeyedHasIC,83,84,1 -block_hint,KeyedHasIC,193,194,0 -block_hint,KeyedHasIC,215,216,0 -block_hint,KeyedHasIC,273,274,0 -block_hint,KeyedHasIC,271,272,0 -block_hint,KeyedHasIC,153,154,1 -block_hint,KeyedHasIC,63,64,0 -block_hint,KeyedHasIC_Megamorphic,133,134,1 -block_hint,KeyedHasIC_Megamorphic,135,136,1 -block_hint,KeyedHasIC_Megamorphic,253,254,0 -block_hint,KeyedHasIC_Megamorphic,207,208,1 -block_hint,KeyedHasIC_Megamorphic,245,246,0 -block_hint,KeyedHasIC_Megamorphic,93,94,0 -block_hint,KeyedHasIC_Megamorphic,228,229,1 -block_hint,KeyedHasIC_Megamorphic,119,120,1 +block_hint,KeyedHasIC,239,240,0 +block_hint,KeyedHasIC,165,166,0 +block_hint,KeyedHasIC,77,78,0 +block_hint,KeyedHasIC,119,120,1 +block_hint,KeyedHasIC,167,168,0 +block_hint,KeyedHasIC,123,124,1 +block_hint,KeyedHasIC,79,80,1 +block_hint,KeyedHasIC,197,198,0 +block_hint,KeyedHasIC,221,222,0 +block_hint,KeyedHasIC,283,284,0 +block_hint,KeyedHasIC,281,282,0 +block_hint,KeyedHasIC,161,162,1 +block_hint,KeyedHasIC,61,62,0 block_hint,KeyedHasIC_Megamorphic,137,138,1 -block_hint,KeyedHasIC_Megamorphic,195,196,0 -block_hint,KeyedHasIC_Megamorphic,197,198,0 +block_hint,KeyedHasIC_Megamorphic,139,140,1 +block_hint,KeyedHasIC_Megamorphic,263,264,0 +block_hint,KeyedHasIC_Megamorphic,211,212,1 +block_hint,KeyedHasIC_Megamorphic,254,255,0 block_hint,KeyedHasIC_Megamorphic,97,98,0 -block_hint,KeyedHasIC_Megamorphic,95,96,0 -block_hint,KeyedHasIC_Megamorphic,241,242,0 -block_hint,KeyedHasIC_Megamorphic,232,233,0 -block_hint,KeyedHasIC_Megamorphic,249,250,0 +block_hint,KeyedHasIC_Megamorphic,234,235,1 +block_hint,KeyedHasIC_Megamorphic,123,124,1 +block_hint,KeyedHasIC_Megamorphic,141,142,1 +block_hint,KeyedHasIC_Megamorphic,199,200,0 block_hint,KeyedHasIC_Megamorphic,201,202,0 -block_hint,KeyedHasIC_Megamorphic,47,48,0 -block_hint,KeyedHasIC_Megamorphic,61,62,0 -block_hint,KeyedHasIC_Megamorphic,103,104,1 -block_hint,KeyedHasIC_Megamorphic,258,259,0 +block_hint,KeyedHasIC_Megamorphic,101,102,0 +block_hint,KeyedHasIC_Megamorphic,99,100,0 +block_hint,KeyedHasIC_Megamorphic,250,251,0 +block_hint,KeyedHasIC_Megamorphic,270,271,0 +block_hint,KeyedHasIC_Megamorphic,106,107,0 +block_hint,KeyedHasIC_Megamorphic,277,278,0 +block_hint,KeyedHasIC_Megamorphic,282,283,0 +block_hint,KeyedHasIC_Megamorphic,268,269,0 +block_hint,KeyedHasIC_Megamorphic,203,204,0 +block_hint,KeyedHasIC_Megamorphic,44,45,0 +block_hint,KeyedHasIC_Megamorphic,63,64,0 +block_hint,KeyedHasIC_Megamorphic,239,240,1 +block_hint,KeyedHasIC_Megamorphic,48,49,0 +block_hint,KeyedHasIC_Megamorphic,272,273,0 +block_hint,KeyedHasIC_Megamorphic,228,229,0 +block_hint,KeyedHasIC_Megamorphic,87,88,0 +block_hint,KeyedHasIC_Megamorphic,155,156,0 +block_hint,KeyedHasIC_Megamorphic,196,197,0 +block_hint,KeyedHasIC_Megamorphic,59,60,0 block_hint,KeyedHasIC_Megamorphic,222,223,0 -block_hint,KeyedHasIC_Megamorphic,83,84,0 -block_hint,KeyedHasIC_Megamorphic,151,152,0 -block_hint,KeyedHasIC_Megamorphic,192,193,0 -block_hint,KeyedHasIC_Megamorphic,58,59,0 -block_hint,KeyedHasIC_Megamorphic,216,217,0 -block_hint,KeyedHasIC_Megamorphic,56,57,1 +block_hint,KeyedHasIC_Megamorphic,57,58,1 block_hint,IterableToList,42,43,1 block_hint,IterableToList,44,45,1 block_hint,IterableToList,46,47,1 @@ -1754,21 +1835,21 @@ block_hint,FindOrderedHashMapEntry,22,23,0 block_hint,FindOrderedHashMapEntry,68,69,0 block_hint,FindOrderedHashMapEntry,58,59,1 block_hint,FindOrderedHashMapEntry,60,61,1 -block_hint,MapConstructor,323,324,1 -block_hint,MapConstructor,243,244,1 -block_hint,MapConstructor,100,101,0 +block_hint,MapConstructor,328,329,1 +block_hint,MapConstructor,248,249,1 +block_hint,MapConstructor,105,106,0 block_hint,MapConstructor,13,14,1 -block_hint,MapConstructor,265,266,1 -block_hint,MapConstructor,205,206,1 -block_hint,MapConstructor,83,84,0 -block_hint,MapConstructor,85,86,1 -block_hint,MapConstructor,267,268,1 -block_hint,MapConstructor,300,301,0 -block_hint,MapConstructor,314,315,0 -block_hint,MapConstructor,215,216,0 -block_hint,MapConstructor,104,105,0 -block_hint,MapConstructor,233,234,1 -block_hint,MapConstructor,98,99,1 +block_hint,MapConstructor,270,271,1 +block_hint,MapConstructor,211,212,1 +block_hint,MapConstructor,86,87,0 +block_hint,MapConstructor,88,89,1 +block_hint,MapConstructor,272,273,1 +block_hint,MapConstructor,308,309,0 +block_hint,MapConstructor,319,320,0 +block_hint,MapConstructor,220,221,0 +block_hint,MapConstructor,109,110,0 +block_hint,MapConstructor,238,239,1 +block_hint,MapConstructor,103,104,1 block_hint,MapPrototypeSet,98,99,1 block_hint,MapPrototypeSet,62,63,1 block_hint,MapPrototypeSet,64,65,1 @@ -1842,91 +1923,91 @@ block_hint,MapIteratorPrototypeNext,15,16,1 block_hint,MapIteratorPrototypeNext,17,18,1 block_hint,MapIteratorPrototypeNext,25,26,1 block_hint,SameValueNumbersOnly,4,5,1 -block_hint,Add_Baseline,32,33,0 -block_hint,Add_Baseline,21,22,0 -block_hint,Add_Baseline,8,9,1 -block_hint,Add_Baseline,58,59,0 -block_hint,Add_Baseline,35,36,1 -block_hint,Add_Baseline,47,48,0 -block_hint,Add_Baseline,17,18,1 -block_hint,Add_Baseline,53,54,1 -block_hint,Add_Baseline,19,20,1 -block_hint,Add_Baseline,26,27,1 -block_hint,Add_Baseline,10,11,1 -block_hint,AddSmi_Baseline,32,33,0 -block_hint,AddSmi_Baseline,21,22,0 -block_hint,AddSmi_Baseline,8,9,1 -block_hint,AddSmi_Baseline,49,50,1 -block_hint,AddSmi_Baseline,26,27,1 -block_hint,AddSmi_Baseline,10,11,1 -block_hint,Subtract_Baseline,21,22,0 -block_hint,Subtract_Baseline,8,9,1 -block_hint,Subtract_Baseline,46,47,1 -block_hint,Subtract_Baseline,56,57,1 -block_hint,Subtract_Baseline,54,55,0 -block_hint,Subtract_Baseline,42,43,0 -block_hint,Subtract_Baseline,48,49,1 -block_hint,Subtract_Baseline,17,18,1 +block_hint,Add_Baseline,39,40,0 +block_hint,Add_Baseline,25,26,0 +block_hint,Add_Baseline,9,10,1 +block_hint,Add_Baseline,84,85,0 +block_hint,Add_Baseline,46,47,1 +block_hint,Add_Baseline,56,57,0 +block_hint,Add_Baseline,20,21,1 +block_hint,Add_Baseline,64,65,1 +block_hint,Add_Baseline,23,24,1 +block_hint,Add_Baseline,31,32,1 +block_hint,Add_Baseline,11,12,1 +block_hint,AddSmi_Baseline,39,40,0 +block_hint,AddSmi_Baseline,25,26,0 +block_hint,AddSmi_Baseline,9,10,1 +block_hint,AddSmi_Baseline,60,61,1 +block_hint,AddSmi_Baseline,31,32,1 +block_hint,AddSmi_Baseline,11,12,1 +block_hint,Subtract_Baseline,31,32,0 +block_hint,Subtract_Baseline,11,12,1 +block_hint,Subtract_Baseline,60,61,1 +block_hint,Subtract_Baseline,82,83,1 +block_hint,Subtract_Baseline,76,77,0 +block_hint,Subtract_Baseline,53,54,0 +block_hint,Subtract_Baseline,62,63,1 block_hint,Subtract_Baseline,23,24,1 -block_hint,Subtract_Baseline,10,11,1 -block_hint,SubtractSmi_Baseline,21,22,0 -block_hint,SubtractSmi_Baseline,8,9,1 -block_hint,SubtractSmi_Baseline,38,39,1 -block_hint,SubtractSmi_Baseline,23,24,1 -block_hint,SubtractSmi_Baseline,10,11,1 -block_hint,Multiply_Baseline,62,63,0 -block_hint,Multiply_Baseline,40,41,0 -block_hint,Multiply_Baseline,48,49,0 -block_hint,Multiply_Baseline,54,55,1 -block_hint,Multiply_Baseline,50,51,1 -block_hint,Multiply_Baseline,6,7,1 -block_hint,Multiply_Baseline,42,43,1 -block_hint,Multiply_Baseline,60,61,1 -block_hint,Multiply_Baseline,44,45,1 -block_hint,Multiply_Baseline,19,20,1 -block_hint,Multiply_Baseline,8,9,1 -block_hint,MultiplySmi_Baseline,54,55,0 -block_hint,MultiplySmi_Baseline,40,41,0 -block_hint,MultiplySmi_Baseline,42,43,0 -block_hint,MultiplySmi_Baseline,44,45,1 -block_hint,MultiplySmi_Baseline,17,18,0 -block_hint,MultiplySmi_Baseline,6,7,1 -block_hint,MultiplySmi_Baseline,33,34,1 -block_hint,MultiplySmi_Baseline,19,20,1 -block_hint,MultiplySmi_Baseline,8,9,1 -block_hint,Divide_Baseline,52,53,0 -block_hint,Divide_Baseline,54,55,0 -block_hint,Divide_Baseline,41,42,0 -block_hint,Divide_Baseline,26,27,1 -block_hint,Divide_Baseline,6,7,1 -block_hint,Divide_Baseline,45,46,1 -block_hint,Divide_Baseline,60,61,1 -block_hint,Divide_Baseline,47,48,1 -block_hint,Divide_Baseline,33,34,0 -block_hint,Divide_Baseline,14,15,1 -block_hint,Divide_Baseline,20,21,1 -block_hint,Divide_Baseline,8,9,1 -block_hint,DivideSmi_Baseline,46,47,0 -block_hint,DivideSmi_Baseline,54,55,0 -block_hint,DivideSmi_Baseline,48,49,0 -block_hint,DivideSmi_Baseline,41,42,0 -block_hint,DivideSmi_Baseline,26,27,1 -block_hint,DivideSmi_Baseline,6,7,1 -block_hint,DivideSmi_Baseline,35,36,1 -block_hint,DivideSmi_Baseline,20,21,1 -block_hint,DivideSmi_Baseline,8,9,1 -block_hint,Modulus_Baseline,61,62,0 -block_hint,Modulus_Baseline,57,58,0 -block_hint,Modulus_Baseline,43,44,1 -block_hint,Modulus_Baseline,38,39,1 -block_hint,Modulus_Baseline,17,18,0 +block_hint,Subtract_Baseline,33,34,1 +block_hint,Subtract_Baseline,13,14,1 +block_hint,SubtractSmi_Baseline,31,32,0 +block_hint,SubtractSmi_Baseline,11,12,1 +block_hint,SubtractSmi_Baseline,51,52,1 +block_hint,SubtractSmi_Baseline,33,34,1 +block_hint,SubtractSmi_Baseline,13,14,1 +block_hint,Multiply_Baseline,100,101,0 +block_hint,Multiply_Baseline,61,62,0 +block_hint,Multiply_Baseline,77,78,0 +block_hint,Multiply_Baseline,87,88,1 +block_hint,Multiply_Baseline,79,80,1 +block_hint,Multiply_Baseline,13,14,1 +block_hint,Multiply_Baseline,63,64,1 +block_hint,Multiply_Baseline,93,94,1 +block_hint,Multiply_Baseline,65,66,1 +block_hint,Multiply_Baseline,34,35,1 +block_hint,Multiply_Baseline,15,16,1 +block_hint,MultiplySmi_Baseline,92,93,0 +block_hint,MultiplySmi_Baseline,61,62,0 +block_hint,MultiplySmi_Baseline,71,72,0 +block_hint,MultiplySmi_Baseline,73,74,1 +block_hint,MultiplySmi_Baseline,32,33,0 +block_hint,MultiplySmi_Baseline,13,14,1 +block_hint,MultiplySmi_Baseline,51,52,1 +block_hint,MultiplySmi_Baseline,34,35,1 +block_hint,MultiplySmi_Baseline,15,16,1 +block_hint,Divide_Baseline,69,70,0 +block_hint,Divide_Baseline,71,72,0 +block_hint,Divide_Baseline,50,51,0 +block_hint,Divide_Baseline,31,32,1 +block_hint,Divide_Baseline,10,11,1 +block_hint,Divide_Baseline,54,55,1 +block_hint,Divide_Baseline,79,80,1 +block_hint,Divide_Baseline,56,57,1 +block_hint,Divide_Baseline,39,40,0 +block_hint,Divide_Baseline,19,20,1 +block_hint,Divide_Baseline,25,26,1 +block_hint,Divide_Baseline,12,13,1 +block_hint,DivideSmi_Baseline,63,64,0 +block_hint,DivideSmi_Baseline,76,77,0 +block_hint,DivideSmi_Baseline,65,66,0 +block_hint,DivideSmi_Baseline,50,51,0 +block_hint,DivideSmi_Baseline,31,32,1 +block_hint,DivideSmi_Baseline,10,11,1 +block_hint,DivideSmi_Baseline,41,42,1 +block_hint,DivideSmi_Baseline,25,26,1 +block_hint,DivideSmi_Baseline,12,13,1 +block_hint,Modulus_Baseline,76,77,0 +block_hint,Modulus_Baseline,72,73,0 +block_hint,Modulus_Baseline,55,56,1 +block_hint,Modulus_Baseline,50,51,1 +block_hint,Modulus_Baseline,18,19,0 block_hint,Modulus_Baseline,6,7,1 -block_hint,ModulusSmi_Baseline,43,44,1 -block_hint,ModulusSmi_Baseline,38,39,1 -block_hint,ModulusSmi_Baseline,17,18,0 +block_hint,ModulusSmi_Baseline,55,56,1 +block_hint,ModulusSmi_Baseline,50,51,1 +block_hint,ModulusSmi_Baseline,18,19,0 block_hint,ModulusSmi_Baseline,6,7,1 -block_hint,ModulusSmi_Baseline,32,33,1 -block_hint,ModulusSmi_Baseline,19,20,1 +block_hint,ModulusSmi_Baseline,40,41,1 +block_hint,ModulusSmi_Baseline,20,21,1 block_hint,ModulusSmi_Baseline,8,9,1 block_hint,BitwiseAnd_Baseline,35,36,0 block_hint,BitwiseAnd_Baseline,23,24,1 @@ -1984,31 +2065,29 @@ block_hint,ShiftRightSmi_Baseline,20,21,0 block_hint,ShiftRightSmi_Baseline,9,10,1 block_hint,ShiftRightLogical_Baseline,25,26,1 block_hint,ShiftRightLogical_Baseline,10,11,0 -block_hint,ShiftRightLogical_Baseline,46,47,0 -block_hint,ShiftRightLogical_Baseline,29,30,0 block_hint,ShiftRightLogical_Baseline,14,15,1 block_hint,ShiftRightLogicalSmi_Baseline,35,36,1 block_hint,ShiftRightLogicalSmi_Baseline,25,26,1 block_hint,ShiftRightLogicalSmi_Baseline,33,34,0 block_hint,ShiftRightLogicalSmi_Baseline,23,24,0 block_hint,ShiftRightLogicalSmi_Baseline,9,10,1 -block_hint,Add_WithFeedback,49,50,1 -block_hint,Add_WithFeedback,60,61,0 -block_hint,Add_WithFeedback,58,59,0 -block_hint,Add_WithFeedback,45,46,1 -block_hint,Add_WithFeedback,35,36,1 -block_hint,Add_WithFeedback,28,29,0 -block_hint,Add_WithFeedback,19,20,1 -block_hint,Subtract_WithFeedback,52,53,1 -block_hint,Subtract_WithFeedback,56,57,0 -block_hint,Subtract_WithFeedback,54,55,0 -block_hint,Subtract_WithFeedback,42,43,0 -block_hint,Subtract_WithFeedback,17,18,1 -block_hint,Modulus_WithFeedback,61,62,0 -block_hint,Modulus_WithFeedback,57,58,0 -block_hint,Modulus_WithFeedback,43,44,1 -block_hint,Modulus_WithFeedback,38,39,1 -block_hint,Modulus_WithFeedback,17,18,0 +block_hint,Add_WithFeedback,60,61,1 +block_hint,Add_WithFeedback,86,87,0 +block_hint,Add_WithFeedback,84,85,0 +block_hint,Add_WithFeedback,54,55,1 +block_hint,Add_WithFeedback,46,47,1 +block_hint,Add_WithFeedback,33,34,0 +block_hint,Add_WithFeedback,23,24,1 +block_hint,Subtract_WithFeedback,74,75,1 +block_hint,Subtract_WithFeedback,82,83,0 +block_hint,Subtract_WithFeedback,76,77,0 +block_hint,Subtract_WithFeedback,53,54,0 +block_hint,Subtract_WithFeedback,23,24,1 +block_hint,Modulus_WithFeedback,76,77,0 +block_hint,Modulus_WithFeedback,72,73,0 +block_hint,Modulus_WithFeedback,55,56,1 +block_hint,Modulus_WithFeedback,50,51,1 +block_hint,Modulus_WithFeedback,18,19,0 block_hint,Modulus_WithFeedback,6,7,1 block_hint,BitwiseOr_WithFeedback,6,7,1 block_hint,BitwiseOr_WithFeedback,35,36,0 @@ -2213,43 +2292,43 @@ block_hint,ObjectCreate,13,14,1 block_hint,ObjectCreate,15,16,1 block_hint,ObjectCreate,20,21,0 block_hint,ObjectCreate,61,62,1 -block_hint,ObjectGetOwnPropertyDescriptor,493,494,1 -block_hint,ObjectGetOwnPropertyDescriptor,490,491,0 -block_hint,ObjectGetOwnPropertyDescriptor,487,488,0 -block_hint,ObjectGetOwnPropertyDescriptor,479,480,1 -block_hint,ObjectGetOwnPropertyDescriptor,466,467,1 -block_hint,ObjectGetOwnPropertyDescriptor,384,385,0 -block_hint,ObjectGetOwnPropertyDescriptor,444,445,1 +block_hint,ObjectGetOwnPropertyDescriptor,519,520,1 +block_hint,ObjectGetOwnPropertyDescriptor,516,517,0 +block_hint,ObjectGetOwnPropertyDescriptor,513,514,0 +block_hint,ObjectGetOwnPropertyDescriptor,505,506,1 +block_hint,ObjectGetOwnPropertyDescriptor,492,493,1 +block_hint,ObjectGetOwnPropertyDescriptor,408,409,0 +block_hint,ObjectGetOwnPropertyDescriptor,470,471,1 +block_hint,ObjectGetOwnPropertyDescriptor,488,489,0 +block_hint,ObjectGetOwnPropertyDescriptor,434,435,0 +block_hint,ObjectGetOwnPropertyDescriptor,467,468,1 +block_hint,ObjectGetOwnPropertyDescriptor,410,411,1 block_hint,ObjectGetOwnPropertyDescriptor,462,463,0 -block_hint,ObjectGetOwnPropertyDescriptor,410,411,0 -block_hint,ObjectGetOwnPropertyDescriptor,441,442,1 -block_hint,ObjectGetOwnPropertyDescriptor,386,387,1 +block_hint,ObjectGetOwnPropertyDescriptor,464,465,0 block_hint,ObjectGetOwnPropertyDescriptor,436,437,0 -block_hint,ObjectGetOwnPropertyDescriptor,438,439,0 -block_hint,ObjectGetOwnPropertyDescriptor,412,413,0 -block_hint,ObjectGetOwnPropertyDescriptor,382,383,0 -block_hint,ObjectGetOwnPropertyDescriptor,312,313,0 -block_hint,ObjectGetOwnPropertyDescriptor,184,185,1 -block_hint,ObjectGetOwnPropertyDescriptor,134,135,1 -block_hint,ObjectGetOwnPropertyDescriptor,140,141,0 -block_hint,ObjectGetOwnPropertyDescriptor,473,474,0 -block_hint,ObjectGetOwnPropertyDescriptor,481,482,1 -block_hint,ObjectGetOwnPropertyDescriptor,469,470,0 -block_hint,ObjectGetOwnPropertyDescriptor,402,403,0 -block_hint,ObjectGetOwnPropertyDescriptor,310,311,0 +block_hint,ObjectGetOwnPropertyDescriptor,406,407,0 +block_hint,ObjectGetOwnPropertyDescriptor,331,332,0 +block_hint,ObjectGetOwnPropertyDescriptor,197,198,1 +block_hint,ObjectGetOwnPropertyDescriptor,307,308,1 +block_hint,ObjectGetOwnPropertyDescriptor,138,139,0 +block_hint,ObjectGetOwnPropertyDescriptor,499,500,0 +block_hint,ObjectGetOwnPropertyDescriptor,507,508,1 +block_hint,ObjectGetOwnPropertyDescriptor,495,496,0 +block_hint,ObjectGetOwnPropertyDescriptor,426,427,0 +block_hint,ObjectGetOwnPropertyDescriptor,329,330,0 block_hint,ObjectGetOwnPropertyDescriptor,31,32,1 -block_hint,ObjectGetOwnPropertyDescriptor,340,341,1 +block_hint,ObjectGetOwnPropertyDescriptor,361,362,1 block_hint,ObjectGetOwnPropertyDescriptor,150,151,0 -block_hint,ObjectGetOwnPropertyDescriptor,448,449,0 -block_hint,ObjectGetOwnPropertyDescriptor,367,368,0 -block_hint,ObjectGetOwnPropertyDescriptor,249,250,0 -block_hint,ObjectGetOwnPropertyDescriptor,245,246,0 -block_hint,ObjectGetOwnPropertyDescriptor,265,266,0 -block_hint,ObjectGetOwnPropertyDescriptor,267,268,1 +block_hint,ObjectGetOwnPropertyDescriptor,474,475,0 +block_hint,ObjectGetOwnPropertyDescriptor,390,391,0 +block_hint,ObjectGetOwnPropertyDescriptor,264,265,0 +block_hint,ObjectGetOwnPropertyDescriptor,260,261,0 +block_hint,ObjectGetOwnPropertyDescriptor,282,283,0 +block_hint,ObjectGetOwnPropertyDescriptor,284,285,1 block_hint,ObjectGetOwnPropertyDescriptor,36,37,1 -block_hint,ObjectGetOwnPropertyDescriptor,344,345,1 -block_hint,ObjectGetOwnPropertyDescriptor,174,175,0 -block_hint,ObjectGetOwnPropertyDescriptor,253,254,1 +block_hint,ObjectGetOwnPropertyDescriptor,365,366,1 +block_hint,ObjectGetOwnPropertyDescriptor,186,187,0 +block_hint,ObjectGetOwnPropertyDescriptor,268,269,1 block_hint,ObjectKeys,32,33,1 block_hint,ObjectKeys,27,28,1 block_hint,ObjectKeys,23,24,1 @@ -2260,29 +2339,33 @@ block_hint,ObjectKeys,21,22,1 block_hint,ObjectKeys,9,10,0 block_hint,ObjectKeys,7,8,1 block_hint,ObjectKeys,14,15,1 -block_hint,ObjectPrototypeHasOwnProperty,212,213,1 -block_hint,ObjectPrototypeHasOwnProperty,190,191,1 -block_hint,ObjectPrototypeHasOwnProperty,206,207,1 -block_hint,ObjectPrototypeHasOwnProperty,223,224,0 -block_hint,ObjectPrototypeHasOwnProperty,203,204,0 -block_hint,ObjectPrototypeHasOwnProperty,194,195,1 -block_hint,ObjectPrototypeHasOwnProperty,156,157,1 -block_hint,ObjectPrototypeHasOwnProperty,217,218,0 +block_hint,ObjectPrototypeHasOwnProperty,230,231,1 +block_hint,ObjectPrototypeHasOwnProperty,205,206,1 +block_hint,ObjectPrototypeHasOwnProperty,222,223,1 +block_hint,ObjectPrototypeHasOwnProperty,241,242,0 block_hint,ObjectPrototypeHasOwnProperty,219,220,0 -block_hint,ObjectPrototypeHasOwnProperty,215,216,0 -block_hint,ObjectPrototypeHasOwnProperty,210,211,0 -block_hint,ObjectPrototypeHasOwnProperty,183,184,1 -block_hint,ObjectPrototypeHasOwnProperty,131,132,0 -block_hint,ObjectPrototypeHasOwnProperty,196,197,0 -block_hint,ObjectPrototypeHasOwnProperty,33,34,1 -block_hint,ObjectPrototypeHasOwnProperty,72,73,0 -block_hint,ObjectPrototypeHasOwnProperty,37,38,1 -block_hint,ObjectPrototypeHasOwnProperty,50,51,0 -block_hint,ObjectPrototypeHasOwnProperty,39,40,0 -block_hint,ObjectPrototypeHasOwnProperty,136,137,1 -block_hint,ObjectPrototypeHasOwnProperty,164,165,0 -block_hint,ObjectPrototypeHasOwnProperty,169,170,1 -block_hint,ObjectPrototypeHasOwnProperty,54,55,0 +block_hint,ObjectPrototypeHasOwnProperty,209,210,1 +block_hint,ObjectPrototypeHasOwnProperty,163,164,1 +block_hint,ObjectPrototypeHasOwnProperty,235,236,0 +block_hint,ObjectPrototypeHasOwnProperty,237,238,0 +block_hint,ObjectPrototypeHasOwnProperty,233,234,0 +block_hint,ObjectPrototypeHasOwnProperty,228,229,0 +block_hint,ObjectPrototypeHasOwnProperty,192,193,1 +block_hint,ObjectPrototypeHasOwnProperty,137,138,0 +block_hint,ObjectPrototypeHasOwnProperty,211,212,0 +block_hint,ObjectPrototypeHasOwnProperty,175,176,1 +block_hint,ObjectPrototypeHasOwnProperty,141,142,0 +block_hint,ObjectPrototypeHasOwnProperty,226,227,0 +block_hint,ObjectPrototypeHasOwnProperty,76,77,0 +block_hint,ObjectPrototypeHasOwnProperty,203,204,0 +block_hint,ObjectPrototypeHasOwnProperty,34,35,1 +block_hint,ObjectPrototypeHasOwnProperty,52,53,0 +block_hint,ObjectPrototypeHasOwnProperty,36,37,0 +block_hint,ObjectPrototypeHasOwnProperty,197,198,1 +block_hint,ObjectPrototypeHasOwnProperty,40,41,0 +block_hint,ObjectPrototypeHasOwnProperty,171,172,0 +block_hint,ObjectPrototypeHasOwnProperty,178,179,1 +block_hint,ObjectPrototypeHasOwnProperty,58,59,0 block_hint,ObjectToString,42,43,0 block_hint,ObjectToString,57,58,0 block_hint,ObjectToString,65,66,0 @@ -2307,6 +2390,7 @@ block_hint,InstanceOf_Baseline,14,15,1 block_hint,ForInEnumerate,34,35,1 block_hint,ForInEnumerate,36,37,0 block_hint,ForInEnumerate,30,31,0 +block_hint,ForInEnumerate,22,23,1 block_hint,ForInEnumerate,32,33,1 block_hint,ForInEnumerate,5,6,1 block_hint,ForInEnumerate,38,39,1 @@ -2314,34 +2398,38 @@ block_hint,ForInEnumerate,9,10,1 block_hint,ForInPrepare,7,8,1 block_hint,ForInPrepare,12,13,1 block_hint,ForInPrepare,5,6,1 -block_hint,ForInFilter,226,227,1 -block_hint,ForInFilter,228,229,1 -block_hint,ForInFilter,219,220,0 -block_hint,ForInFilter,115,116,1 -block_hint,ForInFilter,210,211,0 -block_hint,ForInFilter,60,61,0 -block_hint,ForInFilter,125,126,1 -block_hint,ForInFilter,214,215,1 -block_hint,ForInFilter,101,102,0 +block_hint,ForInFilter,234,235,1 +block_hint,ForInFilter,236,237,1 +block_hint,ForInFilter,227,228,0 +block_hint,ForInFilter,117,118,1 +block_hint,ForInFilter,217,218,0 +block_hint,ForInFilter,62,63,0 +block_hint,ForInFilter,129,130,1 +block_hint,ForInFilter,221,222,1 block_hint,ForInFilter,103,104,0 +block_hint,ForInFilter,105,106,0 +block_hint,ForInFilter,66,67,0 block_hint,ForInFilter,64,65,0 -block_hint,ForInFilter,62,63,0 -block_hint,ForInFilter,241,242,0 -block_hint,ForInFilter,105,106,1 -block_hint,ForInFilter,39,40,1 -block_hint,ForInFilter,217,218,0 +block_hint,ForInFilter,270,271,0 +block_hint,ForInFilter,225,226,1 +block_hint,ForInFilter,109,110,1 +block_hint,ForInFilter,71,72,0 +block_hint,ForInFilter,266,267,0 +block_hint,ForInFilter,264,265,0 +block_hint,ForInFilter,251,252,0 block_hint,ForInFilter,107,108,1 -block_hint,ForInFilter,43,44,1 -block_hint,ForInFilter,196,197,0 -block_hint,ForInFilter,45,46,0 -block_hint,ForInFilter,70,71,1 -block_hint,ForInFilter,111,112,0 -block_hint,ForInFilter,127,128,0 -block_hint,ForInFilter,37,38,0 -block_hint,ForInFilter,238,239,0 -block_hint,ForInFilter,243,244,1 -block_hint,ForInFilter,184,185,0 -block_hint,ForInFilter,34,35,1 +block_hint,ForInFilter,40,41,1 +block_hint,ForInFilter,201,202,0 +block_hint,ForInFilter,42,43,0 +block_hint,ForInFilter,144,145,1 +block_hint,ForInFilter,46,47,0 +block_hint,ForInFilter,113,114,0 +block_hint,ForInFilter,131,132,0 +block_hint,ForInFilter,36,37,0 +block_hint,ForInFilter,248,249,0 +block_hint,ForInFilter,255,256,1 +block_hint,ForInFilter,189,190,0 +block_hint,ForInFilter,33,34,1 block_hint,RegExpConstructor,55,56,1 block_hint,RegExpConstructor,7,8,1 block_hint,RegExpConstructor,131,132,1 @@ -2375,42 +2463,42 @@ block_hint,FindOrderedHashSetEntry,42,43,1 block_hint,FindOrderedHashSetEntry,68,69,0 block_hint,FindOrderedHashSetEntry,58,59,1 block_hint,FindOrderedHashSetEntry,60,61,1 -block_hint,SetConstructor,193,194,1 -block_hint,SetConstructor,71,72,0 +block_hint,SetConstructor,202,203,1 +block_hint,SetConstructor,74,75,0 block_hint,SetConstructor,11,12,1 -block_hint,SetConstructor,168,169,1 -block_hint,SetConstructor,130,131,1 -block_hint,SetConstructor,54,55,0 -block_hint,SetConstructor,56,57,1 -block_hint,SetConstructor,207,208,1 -block_hint,SetConstructor,199,200,0 -block_hint,SetConstructor,76,77,1 +block_hint,SetConstructor,172,173,1 +block_hint,SetConstructor,135,136,1 +block_hint,SetConstructor,56,57,0 +block_hint,SetConstructor,58,59,1 +block_hint,SetConstructor,218,219,1 +block_hint,SetConstructor,210,211,0 +block_hint,SetConstructor,79,80,1 block_hint,SetConstructor,23,24,1 -block_hint,SetConstructor,211,212,1 -block_hint,SetConstructor,203,204,0 -block_hint,SetConstructor,144,145,1 +block_hint,SetConstructor,222,223,1 +block_hint,SetConstructor,214,215,0 +block_hint,SetConstructor,150,151,1 block_hint,SetConstructor,25,26,1 -block_hint,SetConstructor,172,173,1 -block_hint,SetConstructor,137,138,1 -block_hint,SetConstructor,80,81,1 -block_hint,SetConstructor,82,83,1 -block_hint,SetConstructor,84,85,1 -block_hint,SetConstructor,86,87,1 -block_hint,SetConstructor,88,89,1 -block_hint,SetConstructor,90,91,1 -block_hint,SetConstructor,32,33,1 -block_hint,SetConstructor,92,93,1 -block_hint,SetConstructor,140,141,1 +block_hint,SetConstructor,178,179,1 +block_hint,SetConstructor,143,144,1 +block_hint,SetConstructor,83,84,1 +block_hint,SetConstructor,85,86,1 +block_hint,SetConstructor,87,88,1 +block_hint,SetConstructor,89,90,1 +block_hint,SetConstructor,91,92,1 +block_hint,SetConstructor,93,94,1 +block_hint,SetConstructor,34,35,1 +block_hint,SetConstructor,95,96,1 block_hint,SetConstructor,146,147,1 +block_hint,SetConstructor,152,153,1 +block_hint,SetConstructor,190,191,0 block_hint,SetConstructor,183,184,0 -block_hint,SetConstructor,176,177,0 -block_hint,SetConstructor,148,149,0 -block_hint,SetConstructor,102,103,0 -block_hint,SetConstructor,132,133,1 +block_hint,SetConstructor,154,155,0 +block_hint,SetConstructor,105,106,0 +block_hint,SetConstructor,137,138,1 block_hint,SetConstructor,27,28,1 -block_hint,SetConstructor,60,61,1 -block_hint,SetConstructor,159,160,0 -block_hint,SetConstructor,64,65,1 +block_hint,SetConstructor,62,63,1 +block_hint,SetConstructor,176,177,0 +block_hint,SetConstructor,66,67,1 block_hint,SetPrototypeHas,10,11,1 block_hint,SetPrototypeHas,5,6,1 block_hint,SetPrototypeHas,7,8,1 @@ -2565,35 +2653,36 @@ block_hint,TypedArrayPrototypeLength,52,53,0 block_hint,TypedArrayPrototypeLength,44,45,0 block_hint,TypedArrayPrototypeLength,28,29,0 block_hint,TypedArrayPrototypeLength,19,20,0 -block_hint,WeakMapConstructor,346,347,1 -block_hint,WeakMapConstructor,266,267,1 -block_hint,WeakMapConstructor,115,116,0 -block_hint,WeakMapConstructor,14,15,1 -block_hint,WeakMapConstructor,288,289,1 -block_hint,WeakMapConstructor,226,227,1 -block_hint,WeakMapConstructor,90,91,0 -block_hint,WeakMapConstructor,92,93,1 -block_hint,WeakMapConstructor,290,291,1 -block_hint,WeakMapConstructor,323,324,0 -block_hint,WeakMapConstructor,337,338,0 -block_hint,WeakMapConstructor,236,237,0 +block_hint,WeakMapConstructor,351,352,1 +block_hint,WeakMapConstructor,271,272,1 block_hint,WeakMapConstructor,119,120,0 -block_hint,WeakMapConstructor,238,239,0 -block_hint,WeakMapConstructor,106,107,0 -block_hint,WeakMapConstructor,240,241,1 -block_hint,WeakMapConstructor,207,208,1 +block_hint,WeakMapConstructor,14,15,1 +block_hint,WeakMapConstructor,293,294,1 +block_hint,WeakMapConstructor,230,231,1 +block_hint,WeakMapConstructor,93,94,0 +block_hint,WeakMapConstructor,95,96,1 +block_hint,WeakMapConstructor,295,296,1 +block_hint,WeakMapConstructor,331,332,0 +block_hint,WeakMapConstructor,342,343,0 +block_hint,WeakMapConstructor,239,240,0 +block_hint,WeakMapConstructor,123,124,0 +block_hint,WeakMapConstructor,241,242,0 +block_hint,WeakMapConstructor,109,110,0 +block_hint,WeakMapConstructor,243,244,1 +block_hint,WeakMapConstructor,211,212,1 block_hint,WeakMapConstructor,28,29,1 block_hint,WeakMapConstructor,30,31,1 block_hint,WeakMapConstructor,32,33,1 -block_hint,WeakMapConstructor,95,96,0 -block_hint,WeakMapConstructor,113,114,1 -block_hint,WeakMapLookupHashIndex,10,11,1 -block_hint,WeakMapLookupHashIndex,12,13,1 -block_hint,WeakMapLookupHashIndex,14,15,0 -block_hint,WeakMapLookupHashIndex,16,17,0 +block_hint,WeakMapConstructor,98,99,0 +block_hint,WeakMapConstructor,117,118,1 +block_hint,WeakMapLookupHashIndex,9,10,1 +block_hint,WeakMapLookupHashIndex,31,32,1 +block_hint,WeakMapLookupHashIndex,11,12,0 +block_hint,WeakMapLookupHashIndex,13,14,0 block_hint,WeakMapLookupHashIndex,25,26,1 -block_hint,WeakMapLookupHashIndex,21,22,0 -block_hint,WeakMapLookupHashIndex,18,19,0 +block_hint,WeakMapLookupHashIndex,33,34,1 +block_hint,WeakMapLookupHashIndex,27,28,0 +block_hint,WeakMapLookupHashIndex,23,24,0 block_hint,WeakMapGet,12,13,1 block_hint,WeakMapGet,7,8,1 block_hint,WeakMapGet,9,10,1 @@ -2601,31 +2690,32 @@ block_hint,WeakMapGet,3,4,1 block_hint,WeakMapPrototypeHas,10,11,1 block_hint,WeakMapPrototypeHas,5,6,1 block_hint,WeakMapPrototypeHas,7,8,1 -block_hint,WeakMapPrototypeSet,17,18,1 -block_hint,WeakMapPrototypeSet,6,7,1 -block_hint,WeakMapPrototypeSet,8,9,1 -block_hint,WeakMapPrototypeSet,10,11,1 -block_hint,WeakMapPrototypeSet,12,13,1 -block_hint,WeakMapPrototypeSet,14,15,0 -block_hint,WeakMapPrototypeSet,4,5,0 -block_hint,WeakCollectionSet,18,19,0 -block_hint,WeakCollectionSet,6,7,1 -block_hint,WeakCollectionSet,14,15,0 +block_hint,WeakMapPrototypeSet,24,25,1 +block_hint,WeakMapPrototypeSet,5,6,1 +block_hint,WeakMapPrototypeSet,7,8,1 +block_hint,WeakMapPrototypeSet,13,14,1 +block_hint,WeakMapPrototypeSet,22,23,1 +block_hint,WeakMapPrototypeSet,15,16,0 +block_hint,WeakMapPrototypeSet,9,10,0 +block_hint,WeakCollectionSet,17,18,1 +block_hint,WeakCollectionSet,20,21,0 +block_hint,WeakCollectionSet,7,8,1 +block_hint,WeakCollectionSet,13,14,0 block_hint,AsyncGeneratorResolve,9,10,1 block_hint,AsyncGeneratorResolve,3,4,1 block_hint,AsyncGeneratorResolve,11,12,0 block_hint,AsyncGeneratorResolve,7,8,0 -block_hint,AsyncGeneratorYield,24,25,1 -block_hint,AsyncGeneratorYield,19,20,0 -block_hint,AsyncGeneratorYield,6,7,1 -block_hint,AsyncGeneratorYield,42,43,1 -block_hint,AsyncGeneratorYield,37,38,0 -block_hint,AsyncGeneratorYield,28,29,1 -block_hint,AsyncGeneratorYield,8,9,1 -block_hint,AsyncGeneratorYield,10,11,1 -block_hint,AsyncGeneratorYield,12,13,1 -block_hint,AsyncGeneratorYield,14,15,1 -block_hint,AsyncGeneratorYield,22,23,0 +block_hint,AsyncGeneratorYieldWithAwait,24,25,1 +block_hint,AsyncGeneratorYieldWithAwait,19,20,0 +block_hint,AsyncGeneratorYieldWithAwait,6,7,1 +block_hint,AsyncGeneratorYieldWithAwait,42,43,1 +block_hint,AsyncGeneratorYieldWithAwait,37,38,0 +block_hint,AsyncGeneratorYieldWithAwait,28,29,1 +block_hint,AsyncGeneratorYieldWithAwait,8,9,1 +block_hint,AsyncGeneratorYieldWithAwait,10,11,1 +block_hint,AsyncGeneratorYieldWithAwait,12,13,1 +block_hint,AsyncGeneratorYieldWithAwait,14,15,1 +block_hint,AsyncGeneratorYieldWithAwait,22,23,0 block_hint,AsyncGeneratorResumeNext,18,19,0 block_hint,AsyncGeneratorResumeNext,14,15,0 block_hint,AsyncGeneratorPrototypeNext,27,28,1 @@ -2652,8 +2742,8 @@ block_hint,AsyncGeneratorAwaitUncaught,22,23,0 block_hint,AsyncGeneratorAwaitResolveClosure,8,9,1 block_hint,AsyncGeneratorAwaitResolveClosure,2,3,1 block_hint,AsyncGeneratorAwaitResolveClosure,6,7,0 -block_hint,AsyncGeneratorYieldResolveClosure,5,6,1 -block_hint,AsyncGeneratorYieldResolveClosure,2,3,1 +block_hint,AsyncGeneratorYieldWithAwaitResolveClosure,5,6,1 +block_hint,AsyncGeneratorYieldWithAwaitResolveClosure,2,3,1 block_hint,StringAdd_CheckNone,19,20,1 block_hint,StringAdd_CheckNone,58,59,0 block_hint,StringAdd_CheckNone,78,79,1 @@ -2699,108 +2789,115 @@ block_hint,SubString,139,140,0 block_hint,SubString,103,104,1 block_hint,SubString,34,35,1 block_hint,SubString,31,32,0 -block_hint,GetProperty,53,54,1 -block_hint,GetProperty,98,99,0 -block_hint,GetProperty,168,169,1 -block_hint,GetProperty,195,196,0 -block_hint,GetProperty,158,159,1 -block_hint,GetProperty,130,131,1 -block_hint,GetProperty,57,58,1 -block_hint,GetProperty,136,137,0 -block_hint,GetProperty,138,139,0 -block_hint,GetProperty,107,108,0 -block_hint,GetProperty,59,60,0 -block_hint,GetProperty,160,161,0 -block_hint,GetProperty,179,180,0 -block_hint,GetProperty,140,141,1 -block_hint,GetProperty,199,200,0 -block_hint,GetProperty,38,39,1 -block_hint,GetProperty,207,208,0 -block_hint,GetProperty,40,41,0 -block_hint,GetProperty,144,145,0 -block_hint,GetProperty,110,111,1 -block_hint,GetProperty,146,147,1 -block_hint,GetProperty,150,151,0 -block_hint,GetProperty,154,155,1 -block_hint,GetProperty,148,149,0 -block_hint,GetProperty,46,47,0 -block_hint,GetProperty,214,215,0 -block_hint,GetProperty,186,187,1 -block_hint,GetProperty,89,90,0 -block_hint,GetProperty,91,92,0 -block_hint,GetProperty,93,94,0 -block_hint,GetProperty,156,157,0 -block_hint,GetProperty,95,96,1 -block_hint,GetProperty,193,194,0 -block_hint,GetProperty,211,212,0 -block_hint,GetProperty,216,217,1 -block_hint,GetProperty,191,192,0 -block_hint,GetProperty,189,190,0 -block_hint,GetProperty,23,24,0 +block_hint,GetProperty,56,57,1 +block_hint,GetProperty,101,102,0 block_hint,GetProperty,175,176,1 -block_hint,GetProperty,101,102,1 -block_hint,GetPropertyWithReceiver,55,56,1 -block_hint,GetPropertyWithReceiver,57,58,1 -block_hint,GetPropertyWithReceiver,193,194,0 -block_hint,GetPropertyWithReceiver,167,168,1 -block_hint,GetPropertyWithReceiver,201,202,0 -block_hint,GetPropertyWithReceiver,109,110,0 -block_hint,GetPropertyWithReceiver,155,156,1 -block_hint,GetPropertyWithReceiver,135,136,1 -block_hint,GetPropertyWithReceiver,59,60,1 -block_hint,GetPropertyWithReceiver,141,142,0 -block_hint,GetPropertyWithReceiver,143,144,0 -block_hint,GetPropertyWithReceiver,111,112,0 -block_hint,GetPropertyWithReceiver,61,62,0 -block_hint,GetPropertyWithReceiver,157,158,0 -block_hint,GetPropertyWithReceiver,145,146,1 -block_hint,GetPropertyWithReceiver,206,207,0 -block_hint,GetPropertyWithReceiver,147,148,1 -block_hint,GetPropertyWithReceiver,41,42,1 -block_hint,GetPropertyWithReceiver,219,220,0 -block_hint,GetPropertyWithReceiver,43,44,0 -block_hint,GetPropertyWithReceiver,176,177,0 -block_hint,GetPropertyWithReceiver,35,36,0 -block_hint,GetPropertyWithReceiver,216,217,1 -block_hint,GetPropertyWithReceiver,195,196,0 +block_hint,GetProperty,205,206,0 +block_hint,GetProperty,165,166,1 +block_hint,GetProperty,133,134,1 +block_hint,GetProperty,60,61,1 +block_hint,GetProperty,139,140,0 +block_hint,GetProperty,141,142,0 +block_hint,GetProperty,110,111,0 +block_hint,GetProperty,62,63,0 +block_hint,GetProperty,167,168,0 +block_hint,GetProperty,220,221,0 +block_hint,GetProperty,210,211,1 +block_hint,GetProperty,112,113,0 +block_hint,GetProperty,231,232,0 +block_hint,GetProperty,222,223,0 +block_hint,GetProperty,218,219,0 +block_hint,GetProperty,35,36,1 +block_hint,GetProperty,224,225,0 +block_hint,GetProperty,37,38,0 +block_hint,GetProperty,147,148,0 +block_hint,GetProperty,187,188,1 +block_hint,GetProperty,41,42,0 +block_hint,GetProperty,149,150,1 +block_hint,GetProperty,157,158,0 +block_hint,GetProperty,161,162,1 +block_hint,GetProperty,151,152,0 +block_hint,GetProperty,47,48,0 +block_hint,GetProperty,233,234,0 +block_hint,GetProperty,196,197,1 +block_hint,GetProperty,92,93,0 +block_hint,GetProperty,94,95,0 +block_hint,GetProperty,96,97,0 +block_hint,GetProperty,163,164,0 +block_hint,GetProperty,98,99,1 +block_hint,GetProperty,203,204,0 +block_hint,GetProperty,228,229,0 +block_hint,GetProperty,235,236,1 +block_hint,GetProperty,201,202,0 +block_hint,GetProperty,199,200,0 +block_hint,GetProperty,22,23,0 +block_hint,GetProperty,182,183,1 +block_hint,GetProperty,104,105,1 +block_hint,GetPropertyWithReceiver,58,59,1 +block_hint,GetPropertyWithReceiver,60,61,1 +block_hint,GetPropertyWithReceiver,203,204,0 block_hint,GetPropertyWithReceiver,174,175,1 -block_hint,GetPropertyWithReceiver,105,106,1 -block_hint,SetProperty,375,376,1 -block_hint,SetProperty,377,378,0 -block_hint,SetProperty,1159,1160,0 -block_hint,SetProperty,915,916,1 -block_hint,SetProperty,1020,1021,1 -block_hint,SetProperty,1022,1023,0 -block_hint,SetProperty,728,729,0 -block_hint,SetProperty,909,910,1 -block_hint,SetProperty,409,410,0 -block_hint,SetProperty,411,412,0 -block_hint,SetProperty,253,254,1 +block_hint,GetPropertyWithReceiver,211,212,0 +block_hint,GetPropertyWithReceiver,112,113,0 +block_hint,GetPropertyWithReceiver,162,163,1 +block_hint,GetPropertyWithReceiver,138,139,1 +block_hint,GetPropertyWithReceiver,62,63,1 +block_hint,GetPropertyWithReceiver,144,145,0 +block_hint,GetPropertyWithReceiver,146,147,0 +block_hint,GetPropertyWithReceiver,114,115,0 +block_hint,GetPropertyWithReceiver,64,65,0 +block_hint,GetPropertyWithReceiver,164,165,0 +block_hint,GetPropertyWithReceiver,217,218,1 +block_hint,GetPropertyWithReceiver,117,118,0 +block_hint,GetPropertyWithReceiver,238,239,0 +block_hint,GetPropertyWithReceiver,234,235,0 +block_hint,GetPropertyWithReceiver,225,226,0 +block_hint,GetPropertyWithReceiver,148,149,1 +block_hint,GetPropertyWithReceiver,38,39,1 +block_hint,GetPropertyWithReceiver,236,237,0 +block_hint,GetPropertyWithReceiver,40,41,0 +block_hint,GetPropertyWithReceiver,183,184,0 +block_hint,GetPropertyWithReceiver,34,35,0 +block_hint,GetPropertyWithReceiver,231,232,1 +block_hint,GetPropertyWithReceiver,205,206,0 +block_hint,GetPropertyWithReceiver,181,182,1 +block_hint,GetPropertyWithReceiver,108,109,1 +block_hint,SetProperty,379,380,1 +block_hint,SetProperty,381,382,0 +block_hint,SetProperty,1201,1202,0 +block_hint,SetProperty,925,926,1 +block_hint,SetProperty,1034,1035,1 +block_hint,SetProperty,1036,1037,0 +block_hint,SetProperty,733,734,0 +block_hint,SetProperty,919,920,1 block_hint,SetProperty,413,414,0 -block_hint,SetProperty,625,626,1 -block_hint,SetProperty,93,94,1 -block_hint,SetProperty,95,96,1 -block_hint,SetProperty,1061,1062,0 -block_hint,SetProperty,800,801,1 -block_hint,SetProperty,802,803,1 -block_hint,SetProperty,804,805,0 -block_hint,SetProperty,105,106,1 -block_hint,SetProperty,109,110,1 -block_hint,SetProperty,425,426,1 -block_hint,SetProperty,111,112,1 -block_hint,SetProperty,107,108,1 -block_hint,CreateDataProperty,317,318,1 -block_hint,CreateDataProperty,319,320,0 -block_hint,CreateDataProperty,955,956,0 -block_hint,CreateDataProperty,772,773,1 -block_hint,CreateDataProperty,860,861,1 -block_hint,CreateDataProperty,536,537,1 -block_hint,CreateDataProperty,638,639,0 -block_hint,CreateDataProperty,640,641,1 -block_hint,CreateDataProperty,886,887,1 -block_hint,CreateDataProperty,331,332,0 +block_hint,SetProperty,415,416,0 +block_hint,SetProperty,256,257,1 +block_hint,SetProperty,417,418,0 +block_hint,SetProperty,630,631,1 +block_hint,SetProperty,92,93,1 +block_hint,SetProperty,94,95,1 +block_hint,SetProperty,1089,1090,0 +block_hint,SetProperty,808,809,1 +block_hint,SetProperty,810,811,1 +block_hint,SetProperty,812,813,0 +block_hint,SetProperty,104,105,1 +block_hint,SetProperty,108,109,1 +block_hint,SetProperty,429,430,1 +block_hint,SetProperty,110,111,1 +block_hint,SetProperty,106,107,1 +block_hint,CreateDataProperty,319,320,1 +block_hint,CreateDataProperty,321,322,0 +block_hint,CreateDataProperty,978,979,0 +block_hint,CreateDataProperty,779,780,1 +block_hint,CreateDataProperty,868,869,1 +block_hint,CreateDataProperty,539,540,1 +block_hint,CreateDataProperty,645,646,0 +block_hint,CreateDataProperty,647,648,1 +block_hint,CreateDataProperty,903,904,1 +block_hint,CreateDataProperty,333,334,0 block_hint,CreateDataProperty,55,56,1 -block_hint,CreateDataProperty,540,541,1 +block_hint,CreateDataProperty,543,544,1 block_hint,CreateDataProperty,57,58,1 block_hint,ArrayPrototypeConcat,79,80,1 block_hint,ArrayPrototypeConcat,54,55,0 @@ -3730,7 +3827,6 @@ block_hint,FunctionPrototypeHasInstance,19,20,1 block_hint,FunctionPrototypeHasInstance,33,34,1 block_hint,FunctionPrototypeHasInstance,23,24,0 block_hint,FunctionPrototypeHasInstance,13,14,0 -block_hint,FunctionPrototypeHasInstance,31,32,0 block_hint,FunctionPrototypeHasInstance,25,26,0 block_hint,FunctionPrototypeHasInstance,27,28,0 block_hint,FastFunctionPrototypeBind,91,92,1 @@ -3984,16 +4080,17 @@ block_hint,PerformPromiseThen,20,21,1 block_hint,PerformPromiseThen,115,116,1 block_hint,PromiseFulfillReactionJob,22,23,0 block_hint,PromiseFulfillReactionJob,2,3,1 -block_hint,ResolvePromise,27,28,0 block_hint,ResolvePromise,29,30,0 -block_hint,ResolvePromise,15,16,1 -block_hint,ResolvePromise,45,46,0 block_hint,ResolvePromise,31,32,0 +block_hint,ResolvePromise,15,16,1 +block_hint,ResolvePromise,47,48,0 +block_hint,ResolvePromise,33,34,0 block_hint,ResolvePromise,6,7,1 block_hint,ResolvePromise,17,18,0 -block_hint,ResolvePromise,51,52,1 -block_hint,ResolvePromise,47,48,0 -block_hint,ResolvePromise,21,22,0 +block_hint,ResolvePromise,19,20,1 +block_hint,ResolvePromise,53,54,1 +block_hint,ResolvePromise,49,50,0 +block_hint,ResolvePromise,23,24,0 block_hint,ProxyConstructor,30,31,1 block_hint,ProxyConstructor,10,11,0 block_hint,ProxyConstructor,22,23,1 @@ -4004,29 +4101,31 @@ block_hint,ProxyConstructor,7,8,1 block_hint,ProxyConstructor,17,18,1 block_hint,ProxyConstructor,5,6,1 block_hint,ProxyConstructor,12,13,1 -block_hint,ProxyGetProperty,139,140,1 -block_hint,ProxyGetProperty,33,34,0 -block_hint,ProxyGetProperty,11,12,0 -block_hint,ProxyGetProperty,82,83,0 -block_hint,ProxyGetProperty,84,85,0 -block_hint,ProxyGetProperty,78,79,1 -block_hint,ProxyGetProperty,80,81,1 -block_hint,ProxyGetProperty,160,161,1 -block_hint,ProxyGetProperty,163,164,0 -block_hint,ProxyGetProperty,108,109,0 -block_hint,ProxyGetProperty,37,38,1 -block_hint,ProxyGetProperty,21,22,1 -block_hint,ProxyGetProperty,27,28,0 -block_hint,ProxyGetProperty,29,30,1 -block_hint,ProxyGetProperty,189,190,1 -block_hint,ProxyGetProperty,179,180,0 -block_hint,ProxyGetProperty,69,70,1 -block_hint,ProxyGetProperty,151,152,0 -block_hint,ProxyGetProperty,170,171,1 -block_hint,ProxyGetProperty,121,122,1 block_hint,ProxyGetProperty,153,154,1 -block_hint,ProxyGetProperty,155,156,0 -block_hint,ProxyGetProperty,53,54,0 +block_hint,ProxyGetProperty,34,35,0 +block_hint,ProxyGetProperty,10,11,0 +block_hint,ProxyGetProperty,89,90,0 +block_hint,ProxyGetProperty,91,92,0 +block_hint,ProxyGetProperty,85,86,1 +block_hint,ProxyGetProperty,87,88,1 +block_hint,ProxyGetProperty,176,177,1 +block_hint,ProxyGetProperty,180,181,0 +block_hint,ProxyGetProperty,118,119,0 +block_hint,ProxyGetProperty,40,41,1 +block_hint,ProxyGetProperty,114,115,1 +block_hint,ProxyGetProperty,24,25,0 +block_hint,ProxyGetProperty,26,27,1 +block_hint,ProxyGetProperty,208,209,1 +block_hint,ProxyGetProperty,198,199,0 +block_hint,ProxyGetProperty,149,150,1 +block_hint,ProxyGetProperty,28,29,0 +block_hint,ProxyGetProperty,48,49,1 +block_hint,ProxyGetProperty,167,168,0 +block_hint,ProxyGetProperty,187,188,1 +block_hint,ProxyGetProperty,131,132,1 +block_hint,ProxyGetProperty,169,170,1 +block_hint,ProxyGetProperty,171,172,0 +block_hint,ProxyGetProperty,60,61,0 block_hint,ReflectGet,20,21,1 block_hint,ReflectGet,15,16,0 block_hint,ReflectGet,5,6,1 @@ -4036,162 +4135,164 @@ block_hint,ReflectGet,9,10,0 block_hint,ReflectHas,8,9,1 block_hint,ReflectHas,5,6,1 block_hint,ReflectHas,3,4,0 -block_hint,RegExpPrototypeExec,200,201,1 -block_hint,RegExpPrototypeExec,128,129,1 -block_hint,RegExpPrototypeExec,130,131,1 block_hint,RegExpPrototypeExec,202,203,1 -block_hint,RegExpPrototypeExec,164,165,1 +block_hint,RegExpPrototypeExec,130,131,1 +block_hint,RegExpPrototypeExec,132,133,1 +block_hint,RegExpPrototypeExec,204,205,1 +block_hint,RegExpPrototypeExec,166,167,1 block_hint,RegExpPrototypeExec,16,17,1 -block_hint,RegExpPrototypeExec,146,147,1 -block_hint,RegExpPrototypeExec,148,149,0 +block_hint,RegExpPrototypeExec,148,149,1 block_hint,RegExpPrototypeExec,150,151,0 -block_hint,RegExpPrototypeExec,206,207,0 block_hint,RegExpPrototypeExec,152,153,0 +block_hint,RegExpPrototypeExec,208,209,0 +block_hint,RegExpPrototypeExec,154,155,0 block_hint,RegExpPrototypeExec,18,19,1 -block_hint,RegExpPrototypeExec,183,184,0 -block_hint,RegExpPrototypeExec,132,133,0 -block_hint,RegExpPrototypeExec,157,158,0 -block_hint,RegExpPrototypeExec,234,235,0 -block_hint,RegExpPrototypeExec,225,226,1 -block_hint,RegExpPrototypeExec,210,211,1 -block_hint,RegExpPrototypeExec,169,170,1 +block_hint,RegExpPrototypeExec,185,186,0 +block_hint,RegExpPrototypeExec,134,135,0 block_hint,RegExpPrototypeExec,159,160,0 +block_hint,RegExpPrototypeExec,236,237,0 +block_hint,RegExpPrototypeExec,227,228,1 +block_hint,RegExpPrototypeExec,212,213,1 +block_hint,RegExpPrototypeExec,171,172,1 +block_hint,RegExpPrototypeExec,161,162,0 block_hint,RegExpPrototypeExec,73,74,0 block_hint,RegExpPrototypeExec,24,25,1 -block_hint,RegExpPrototypeExec,136,137,1 -block_hint,RegExpPrototypeExec,26,27,1 -block_hint,RegExpPrototypeExec,188,189,0 block_hint,RegExpPrototypeExec,138,139,1 -block_hint,RegExpPrototypeExec,240,241,1 -block_hint,RegExpPrototypeExec,212,213,0 -block_hint,RegExpPrototypeExec,177,178,1 +block_hint,RegExpPrototypeExec,26,27,1 +block_hint,RegExpPrototypeExec,190,191,0 +block_hint,RegExpPrototypeExec,140,141,1 +block_hint,RegExpPrototypeExec,242,243,1 +block_hint,RegExpPrototypeExec,214,215,0 +block_hint,RegExpPrototypeExec,179,180,1 block_hint,RegExpPrototypeExec,77,78,0 block_hint,RegExpPrototypeExec,34,35,1 -block_hint,RegExpPrototypeExec,142,143,1 -block_hint,RegExpPrototypeExec,114,115,1 -block_hint,RegExpPrototypeExec,154,155,1 -block_hint,RegExpMatchFast,251,252,0 -block_hint,RegExpMatchFast,292,293,1 +block_hint,RegExpPrototypeExec,144,145,1 +block_hint,RegExpPrototypeExec,116,117,1 +block_hint,RegExpPrototypeExec,156,157,1 +block_hint,RegExpMatchFast,363,364,0 +block_hint,RegExpMatchFast,293,294,1 block_hint,RegExpMatchFast,34,35,1 -block_hint,RegExpMatchFast,328,329,0 -block_hint,RegExpMatchFast,237,238,0 -block_hint,RegExpMatchFast,286,287,0 -block_hint,RegExpMatchFast,449,450,0 -block_hint,RegExpMatchFast,392,393,1 -block_hint,RegExpMatchFast,294,295,1 -block_hint,RegExpMatchFast,288,289,0 +block_hint,RegExpMatchFast,331,332,0 +block_hint,RegExpMatchFast,240,241,0 +block_hint,RegExpMatchFast,287,288,0 +block_hint,RegExpMatchFast,454,455,0 +block_hint,RegExpMatchFast,397,398,1 +block_hint,RegExpMatchFast,295,296,1 +block_hint,RegExpMatchFast,289,290,0 block_hint,RegExpMatchFast,127,128,0 -block_hint,RegExpMatchFast,239,240,1 -block_hint,RegExpMatchFast,241,242,1 +block_hint,RegExpMatchFast,242,243,1 +block_hint,RegExpMatchFast,244,245,1 block_hint,RegExpMatchFast,42,43,1 -block_hint,RegExpMatchFast,333,334,0 -block_hint,RegExpMatchFast,243,244,1 -block_hint,RegExpMatchFast,457,458,1 -block_hint,RegExpMatchFast,394,395,0 -block_hint,RegExpMatchFast,322,323,1 +block_hint,RegExpMatchFast,336,337,0 +block_hint,RegExpMatchFast,246,247,1 +block_hint,RegExpMatchFast,462,463,1 +block_hint,RegExpMatchFast,399,400,0 +block_hint,RegExpMatchFast,325,326,1 block_hint,RegExpMatchFast,131,132,0 block_hint,RegExpMatchFast,50,51,1 -block_hint,RegExpMatchFast,247,248,1 -block_hint,RegExpMatchFast,183,184,1 -block_hint,RegExpMatchFast,262,263,1 +block_hint,RegExpMatchFast,250,251,1 +block_hint,RegExpMatchFast,186,187,1 +block_hint,RegExpMatchFast,263,264,1 +block_hint,RegExpMatchFast,301,302,0 block_hint,RegExpMatchFast,84,85,1 block_hint,RegExpMatchFast,86,87,1 -block_hint,RegExpMatchFast,302,303,0 -block_hint,RegExpMatchFast,347,348,0 -block_hint,RegExpMatchFast,378,379,0 -block_hint,RegExpMatchFast,300,301,0 +block_hint,RegExpMatchFast,305,306,0 +block_hint,RegExpMatchFast,350,351,0 +block_hint,RegExpMatchFast,383,384,0 +block_hint,RegExpMatchFast,303,304,0 block_hint,RegExpMatchFast,88,89,1 -block_hint,RegExpMatchFast,342,343,0 -block_hint,RegExpMatchFast,253,254,0 -block_hint,RegExpMatchFast,278,279,0 -block_hint,RegExpMatchFast,193,194,1 -block_hint,RegExpMatchFast,451,452,0 -block_hint,RegExpMatchFast,437,438,1 -block_hint,RegExpMatchFast,390,391,1 -block_hint,RegExpMatchFast,304,305,1 -block_hint,RegExpMatchFast,280,281,0 +block_hint,RegExpMatchFast,345,346,0 +block_hint,RegExpMatchFast,254,255,0 +block_hint,RegExpMatchFast,279,280,0 +block_hint,RegExpMatchFast,196,197,1 +block_hint,RegExpMatchFast,456,457,0 +block_hint,RegExpMatchFast,442,443,1 +block_hint,RegExpMatchFast,395,396,1 +block_hint,RegExpMatchFast,307,308,1 +block_hint,RegExpMatchFast,281,282,0 block_hint,RegExpMatchFast,115,116,0 -block_hint,RegExpMatchFast,344,345,0 -block_hint,RegExpMatchFast,255,256,0 +block_hint,RegExpMatchFast,347,348,0 +block_hint,RegExpMatchFast,256,257,0 block_hint,RegExpMatchFast,94,95,1 -block_hint,RegExpMatchFast,382,383,1 -block_hint,RegExpMatchFast,306,307,0 -block_hint,RegExpMatchFast,178,179,1 -block_hint,RegExpMatchFast,176,177,1 -block_hint,RegExpMatchFast,308,309,0 -block_hint,RegExpMatchFast,180,181,0 +block_hint,RegExpMatchFast,387,388,1 +block_hint,RegExpMatchFast,309,310,0 +block_hint,RegExpMatchFast,181,182,1 +block_hint,RegExpMatchFast,179,180,1 +block_hint,RegExpMatchFast,311,312,0 +block_hint,RegExpMatchFast,183,184,0 block_hint,RegExpMatchFast,102,103,0 block_hint,RegExpMatchFast,104,105,0 -block_hint,RegExpMatchFast,201,202,1 -block_hint,RegExpMatchFast,319,320,0 +block_hint,RegExpMatchFast,204,205,1 +block_hint,RegExpMatchFast,322,323,0 block_hint,RegExpMatchFast,106,107,1 -block_hint,RegExpMatchFast,190,191,1 -block_hint,RegExpMatchFast,349,350,0 +block_hint,RegExpMatchFast,193,194,1 +block_hint,RegExpMatchFast,352,353,0 block_hint,RegExpMatchFast,96,97,1 -block_hint,RegExpMatchFast,172,173,1 -block_hint,RegExpMatchFast,170,171,1 -block_hint,RegExpMatchFast,174,175,0 +block_hint,RegExpMatchFast,175,176,1 +block_hint,RegExpMatchFast,173,174,1 +block_hint,RegExpMatchFast,177,178,0 block_hint,RegExpMatchFast,98,99,0 block_hint,RegExpMatchFast,100,101,0 -block_hint,RegExpMatchFast,221,222,1 -block_hint,RegExpMatchFast,311,312,0 -block_hint,RegExpMatchFast,223,224,0 -block_hint,RegExpReplace,258,259,1 -block_hint,RegExpReplace,296,297,1 -block_hint,RegExpReplace,248,249,1 -block_hint,RegExpReplace,148,149,0 +block_hint,RegExpMatchFast,224,225,1 +block_hint,RegExpMatchFast,314,315,0 +block_hint,RegExpMatchFast,226,227,0 +block_hint,RegExpReplace,261,262,1 +block_hint,RegExpReplace,299,300,1 +block_hint,RegExpReplace,251,252,1 +block_hint,RegExpReplace,149,150,0 block_hint,RegExpReplace,22,23,1 -block_hint,RegExpReplace,206,207,1 -block_hint,RegExpReplace,150,151,0 +block_hint,RegExpReplace,209,210,1 +block_hint,RegExpReplace,151,152,0 block_hint,RegExpReplace,24,25,1 -block_hint,RegExpReplace,208,209,1 -block_hint,RegExpReplace,210,211,1 -block_hint,RegExpReplace,171,172,1 -block_hint,RegExpReplace,254,255,0 +block_hint,RegExpReplace,211,212,1 +block_hint,RegExpReplace,213,214,1 +block_hint,RegExpReplace,172,173,1 +block_hint,RegExpReplace,179,180,0 +block_hint,RegExpReplace,257,258,0 block_hint,RegExpReplace,50,51,1 -block_hint,RegExpReplace,226,227,0 -block_hint,RegExpReplace,162,163,0 -block_hint,RegExpReplace,180,181,0 -block_hint,RegExpReplace,108,109,1 -block_hint,RegExpReplace,372,373,0 -block_hint,RegExpReplace,356,357,1 -block_hint,RegExpReplace,290,291,1 -block_hint,RegExpReplace,200,201,1 -block_hint,RegExpReplace,182,183,0 -block_hint,RegExpReplace,80,81,0 +block_hint,RegExpReplace,229,230,0 +block_hint,RegExpReplace,163,164,0 +block_hint,RegExpReplace,183,184,0 +block_hint,RegExpReplace,109,110,1 +block_hint,RegExpReplace,375,376,0 +block_hint,RegExpReplace,359,360,1 +block_hint,RegExpReplace,293,294,1 +block_hint,RegExpReplace,203,204,1 +block_hint,RegExpReplace,185,186,0 +block_hint,RegExpReplace,81,82,0 block_hint,RegExpReplace,56,57,1 block_hint,RegExpReplace,58,59,1 block_hint,RegExpReplace,60,61,1 -block_hint,RegExpReplace,166,167,0 +block_hint,RegExpReplace,167,168,0 block_hint,RegExpReplace,62,63,1 -block_hint,RegExpReplace,230,231,1 -block_hint,RegExpReplace,168,169,0 +block_hint,RegExpReplace,233,234,1 +block_hint,RegExpReplace,169,170,0 block_hint,RegExpReplace,64,65,1 -block_hint,RegExpReplace,377,378,1 -block_hint,RegExpReplace,368,369,1 -block_hint,RegExpReplace,323,324,0 -block_hint,RegExpReplace,282,283,0 -block_hint,RegExpReplace,215,216,0 -block_hint,RegExpReplace,99,100,1 +block_hint,RegExpReplace,380,381,1 +block_hint,RegExpReplace,371,372,1 +block_hint,RegExpReplace,326,327,0 +block_hint,RegExpReplace,285,286,0 +block_hint,RegExpReplace,218,219,0 +block_hint,RegExpReplace,100,101,1 block_hint,RegExpReplace,26,27,1 block_hint,RegExpReplace,28,29,1 -block_hint,RegExpReplace,101,102,1 +block_hint,RegExpReplace,102,103,1 block_hint,RegExpReplace,30,31,0 block_hint,RegExpReplace,32,33,1 block_hint,RegExpReplace,34,35,1 block_hint,RegExpReplace,72,73,1 block_hint,RegExpReplace,44,45,1 -block_hint,RegExpReplace,160,161,1 +block_hint,RegExpReplace,161,162,1 block_hint,RegExpReplace,46,47,1 block_hint,RegExpReplace,48,49,1 -block_hint,RegExpReplace,233,234,1 -block_hint,RegExpReplace,175,176,1 -block_hint,RegExpReplace,152,153,1 +block_hint,RegExpReplace,236,237,1 +block_hint,RegExpReplace,176,177,1 +block_hint,RegExpReplace,153,154,1 block_hint,RegExpReplace,36,37,1 -block_hint,RegExpReplace,154,155,1 +block_hint,RegExpReplace,155,156,1 block_hint,RegExpReplace,40,41,0 -block_hint,RegExpReplace,251,252,1 -block_hint,RegExpReplace,193,194,1 +block_hint,RegExpReplace,254,255,1 +block_hint,RegExpReplace,196,197,1 block_hint,RegExpReplace,42,43,1 block_hint,RegExpSearchFast,50,51,1 block_hint,RegExpSearchFast,6,7,1 @@ -4529,106 +4630,120 @@ block_hint,SymbolPrototypeToString,9,10,1 block_hint,SymbolPrototypeToString,11,12,1 block_hint,SymbolPrototypeToString,5,6,0 block_hint,SymbolPrototypeToString,7,8,1 -block_hint,CreateTypedArray,567,568,0 -block_hint,CreateTypedArray,593,594,0 -block_hint,CreateTypedArray,540,541,0 -block_hint,CreateTypedArray,451,452,0 -block_hint,CreateTypedArray,331,332,1 -block_hint,CreateTypedArray,333,334,1 -block_hint,CreateTypedArray,635,636,0 -block_hint,CreateTypedArray,487,488,1 -block_hint,CreateTypedArray,485,486,1 -block_hint,CreateTypedArray,382,383,1 -block_hint,CreateTypedArray,546,547,0 -block_hint,CreateTypedArray,616,617,0 -block_hint,CreateTypedArray,544,545,0 -block_hint,CreateTypedArray,455,456,0 -block_hint,CreateTypedArray,393,394,0 -block_hint,CreateTypedArray,395,396,0 -block_hint,CreateTypedArray,385,386,0 -block_hint,CreateTypedArray,104,105,1 -block_hint,CreateTypedArray,106,107,1 -block_hint,CreateTypedArray,645,646,1 -block_hint,CreateTypedArray,596,597,0 -block_hint,CreateTypedArray,643,644,1 -block_hint,CreateTypedArray,613,614,1 -block_hint,CreateTypedArray,489,490,0 -block_hint,CreateTypedArray,523,524,1 -block_hint,CreateTypedArray,361,362,0 -block_hint,CreateTypedArray,236,237,0 -block_hint,CreateTypedArray,299,300,0 -block_hint,CreateTypedArray,279,280,1 -block_hint,CreateTypedArray,281,282,1 +block_hint,CreateTypedArray,610,611,0 +block_hint,CreateTypedArray,638,639,0 +block_hint,CreateTypedArray,576,577,0 +block_hint,CreateTypedArray,485,486,0 +block_hint,CreateTypedArray,356,357,1 +block_hint,CreateTypedArray,358,359,1 +block_hint,CreateTypedArray,677,678,0 +block_hint,CreateTypedArray,520,521,1 +block_hint,CreateTypedArray,518,519,1 +block_hint,CreateTypedArray,407,408,1 +block_hint,CreateTypedArray,586,587,0 +block_hint,CreateTypedArray,662,663,0 +block_hint,CreateTypedArray,584,585,0 block_hint,CreateTypedArray,491,492,0 -block_hint,CreateTypedArray,525,526,1 -block_hint,CreateTypedArray,363,364,0 -block_hint,CreateTypedArray,252,253,0 -block_hint,CreateTypedArray,301,302,0 -block_hint,CreateTypedArray,479,480,0 -block_hint,CreateTypedArray,481,482,0 -block_hint,CreateTypedArray,629,630,0 -block_hint,CreateTypedArray,496,497,1 -block_hint,CreateTypedArray,494,495,1 -block_hint,CreateTypedArray,397,398,1 -block_hint,CreateTypedArray,504,505,0 -block_hint,CreateTypedArray,498,499,0 -block_hint,CreateTypedArray,400,401,0 -block_hint,CreateTypedArray,152,153,1 -block_hint,CreateTypedArray,340,341,0 -block_hint,CreateTypedArray,154,155,1 -block_hint,CreateTypedArray,650,651,1 -block_hint,CreateTypedArray,603,604,0 -block_hint,CreateTypedArray,648,649,1 -block_hint,CreateTypedArray,619,620,1 -block_hint,CreateTypedArray,500,501,0 -block_hint,CreateTypedArray,519,520,1 -block_hint,CreateTypedArray,357,358,0 -block_hint,CreateTypedArray,204,205,0 -block_hint,CreateTypedArray,622,623,0 -block_hint,CreateTypedArray,166,167,1 -block_hint,CreateTypedArray,289,290,1 -block_hint,CreateTypedArray,291,292,1 -block_hint,CreateTypedArray,502,503,0 -block_hint,CreateTypedArray,521,522,1 -block_hint,CreateTypedArray,359,360,0 -block_hint,CreateTypedArray,220,221,0 -block_hint,CreateTypedArray,624,625,0 -block_hint,CreateTypedArray,511,512,0 -block_hint,CreateTypedArray,506,507,0 -block_hint,CreateTypedArray,462,463,0 -block_hint,CreateTypedArray,346,347,0 -block_hint,CreateTypedArray,416,417,1 -block_hint,CreateTypedArray,350,351,1 -block_hint,CreateTypedArray,465,466,0 -block_hint,CreateTypedArray,348,349,1 -block_hint,CreateTypedArray,418,419,0 -block_hint,CreateTypedArray,654,655,0 -block_hint,CreateTypedArray,605,606,0 +block_hint,CreateTypedArray,424,425,0 +block_hint,CreateTypedArray,426,427,0 +block_hint,CreateTypedArray,410,411,0 +block_hint,CreateTypedArray,105,106,1 +block_hint,CreateTypedArray,107,108,1 +block_hint,CreateTypedArray,412,413,1 +block_hint,CreateTypedArray,109,110,1 +block_hint,CreateTypedArray,111,112,1 +block_hint,CreateTypedArray,641,642,0 +block_hint,CreateTypedArray,683,684,1 +block_hint,CreateTypedArray,660,661,1 +block_hint,CreateTypedArray,522,523,0 +block_hint,CreateTypedArray,558,559,1 +block_hint,CreateTypedArray,384,385,0 +block_hint,CreateTypedArray,261,262,0 +block_hint,CreateTypedArray,416,417,0 +block_hint,CreateTypedArray,123,124,1 +block_hint,CreateTypedArray,125,126,1 +block_hint,CreateTypedArray,305,306,1 +block_hint,CreateTypedArray,307,308,1 +block_hint,CreateTypedArray,525,526,0 +block_hint,CreateTypedArray,560,561,1 +block_hint,CreateTypedArray,386,387,0 +block_hint,CreateTypedArray,277,278,0 +block_hint,CreateTypedArray,420,421,0 +block_hint,CreateTypedArray,137,138,1 +block_hint,CreateTypedArray,139,140,1 +block_hint,CreateTypedArray,512,513,0 +block_hint,CreateTypedArray,514,515,0 +block_hint,CreateTypedArray,671,672,0 +block_hint,CreateTypedArray,531,532,1 block_hint,CreateTypedArray,529,530,1 -block_hint,CreateTypedArray,527,528,1 -block_hint,CreateTypedArray,431,432,1 -block_hint,CreateTypedArray,609,610,0 -block_hint,CreateTypedArray,537,538,0 -block_hint,CreateTypedArray,445,446,0 -block_hint,CreateTypedArray,314,315,1 -block_hint,CreateTypedArray,607,608,0 +block_hint,CreateTypedArray,428,429,1 +block_hint,CreateTypedArray,541,542,0 +block_hint,CreateTypedArray,533,534,0 +block_hint,CreateTypedArray,431,432,0 +block_hint,CreateTypedArray,165,166,1 +block_hint,CreateTypedArray,365,366,0 +block_hint,CreateTypedArray,167,168,1 +block_hint,CreateTypedArray,433,434,1 +block_hint,CreateTypedArray,169,170,1 +block_hint,CreateTypedArray,171,172,1 +block_hint,CreateTypedArray,648,649,0 +block_hint,CreateTypedArray,686,687,1 +block_hint,CreateTypedArray,665,666,1 block_hint,CreateTypedArray,535,536,0 +block_hint,CreateTypedArray,554,555,1 +block_hint,CreateTypedArray,380,381,0 +block_hint,CreateTypedArray,229,230,0 +block_hint,CreateTypedArray,437,438,0 +block_hint,CreateTypedArray,183,184,1 +block_hint,CreateTypedArray,185,186,1 +block_hint,CreateTypedArray,187,188,1 +block_hint,CreateTypedArray,318,319,1 +block_hint,CreateTypedArray,320,321,1 +block_hint,CreateTypedArray,538,539,0 +block_hint,CreateTypedArray,556,557,1 +block_hint,CreateTypedArray,382,383,0 +block_hint,CreateTypedArray,245,246,0 block_hint,CreateTypedArray,441,442,0 -block_hint,CreateTypedArray,265,266,0 -block_hint,CreateTypedArray,588,589,0 -block_hint,CreateTypedArray,321,322,0 -block_hint,CreateTypedArray,323,324,0 +block_hint,CreateTypedArray,199,200,1 +block_hint,CreateTypedArray,201,202,1 +block_hint,CreateTypedArray,548,549,0 +block_hint,CreateTypedArray,543,544,0 +block_hint,CreateTypedArray,500,501,0 block_hint,CreateTypedArray,371,372,0 -block_hint,CreateTypedArray,373,374,0 -block_hint,CreateTypedArray,316,317,1 -block_hint,CreateTypedArray,326,327,0 -block_hint,CreateTypedArray,319,320,0 -block_hint,CreateTypedArray,473,474,0 -block_hint,CreateTypedArray,517,518,1 -block_hint,CreateTypedArray,355,356,0 -block_hint,CreateTypedArray,188,189,0 -block_hint,CreateTypedArray,449,450,0 -block_hint,CreateTypedArray,273,274,0 +block_hint,CreateTypedArray,453,454,1 +block_hint,CreateTypedArray,375,376,1 +block_hint,CreateTypedArray,503,504,0 +block_hint,CreateTypedArray,373,374,1 +block_hint,CreateTypedArray,455,456,0 +block_hint,CreateTypedArray,688,689,0 +block_hint,CreateTypedArray,650,651,0 +block_hint,CreateTypedArray,564,565,1 +block_hint,CreateTypedArray,562,563,1 +block_hint,CreateTypedArray,466,467,1 +block_hint,CreateTypedArray,656,657,0 +block_hint,CreateTypedArray,574,575,0 +block_hint,CreateTypedArray,481,482,0 +block_hint,CreateTypedArray,340,341,1 +block_hint,CreateTypedArray,654,655,0 +block_hint,CreateTypedArray,572,573,0 +block_hint,CreateTypedArray,477,478,0 +block_hint,CreateTypedArray,290,291,0 +block_hint,CreateTypedArray,634,635,0 +block_hint,CreateTypedArray,347,348,0 +block_hint,CreateTypedArray,349,350,0 +block_hint,CreateTypedArray,396,397,0 +block_hint,CreateTypedArray,398,399,0 +block_hint,CreateTypedArray,342,343,1 +block_hint,CreateTypedArray,352,353,0 +block_hint,CreateTypedArray,345,346,0 +block_hint,CreateTypedArray,507,508,0 +block_hint,CreateTypedArray,552,553,1 +block_hint,CreateTypedArray,378,379,0 +block_hint,CreateTypedArray,213,214,0 +block_hint,CreateTypedArray,567,568,0 +block_hint,CreateTypedArray,391,392,0 +block_hint,CreateTypedArray,60,61,1 +block_hint,CreateTypedArray,62,63,1 block_hint,TypedArrayFrom,156,157,1 block_hint,TypedArrayFrom,140,141,0 block_hint,TypedArrayFrom,124,125,1 @@ -4866,6 +4981,11 @@ block_hint,MergeAt,182,183,1 block_hint,MergeAt,85,86,1 block_hint,MergeAt,87,88,1 block_hint,MergeAt,89,90,1 +block_hint,MergeAt,147,148,0 +block_hint,MergeAt,91,92,1 +block_hint,MergeAt,93,94,1 +block_hint,MergeAt,95,96,1 +block_hint,MergeAt,107,108,1 block_hint,MergeAt,194,195,1 block_hint,MergeAt,97,98,1 block_hint,MergeAt,99,100,1 @@ -5060,82 +5180,82 @@ block_hint,TestTypeOfHandler,23,24,0 block_hint,TestTypeOfHandler,31,32,1 block_hint,TestTypeOfHandler,50,51,0 block_hint,TestTypeOfHandler,35,36,0 -block_hint,LdaGlobalHandler,8,9,1 -block_hint,LdaGlobalHandler,10,11,1 -block_hint,LdaGlobalHandler,12,13,1 -block_hint,LdaGlobalHandler,14,15,1 -block_hint,LdaGlobalHandler,180,181,0 -block_hint,LdaGlobalHandler,104,105,0 -block_hint,LdaGlobalHandler,108,109,1 +block_hint,LdaGlobalHandler,7,8,1 +block_hint,LdaGlobalHandler,9,10,1 +block_hint,LdaGlobalHandler,11,12,1 +block_hint,LdaGlobalHandler,13,14,1 +block_hint,LdaGlobalHandler,183,184,0 +block_hint,LdaGlobalHandler,105,106,0 +block_hint,LdaGlobalHandler,109,110,1 block_hint,StaContextSlotHandler,5,6,1 block_hint,StaCurrentContextSlotHandler,2,3,1 -block_hint,LdaLookupGlobalSlotHandler,14,15,1 -block_hint,LdaLookupGlobalSlotHandler,124,125,0 -block_hint,LdaLookupGlobalSlotHandler,16,17,1 -block_hint,GetNamedPropertyHandler,362,363,1 -block_hint,GetNamedPropertyHandler,210,211,0 -block_hint,GetNamedPropertyHandler,75,76,0 -block_hint,GetNamedPropertyHandler,37,38,1 -block_hint,GetNamedPropertyHandler,303,304,0 -block_hint,GetNamedPropertyHandler,329,330,0 -block_hint,GetNamedPropertyHandler,212,213,1 -block_hint,GetNamedPropertyHandler,112,113,0 -block_hint,GetNamedPropertyHandler,214,215,0 -block_hint,GetNamedPropertyHandler,286,287,1 -block_hint,GetNamedPropertyHandler,39,40,0 -block_hint,GetNamedPropertyHandler,92,93,1 -block_hint,GetNamedPropertyHandler,337,338,0 -block_hint,GetNamedPropertyHandler,236,237,0 -block_hint,GetNamedPropertyHandler,148,149,0 -block_hint,GetNamedPropertyHandler,59,60,1 -block_hint,GetNamedPropertyHandler,85,86,0 -block_hint,GetNamedPropertyHandler,27,28,1 -block_hint,GetNamedPropertyHandler,138,139,0 -block_hint,GetNamedPropertyHandler,63,64,0 -block_hint,GetNamedPropertyHandler,293,294,1 -block_hint,GetNamedPropertyHandler,96,97,0 -block_hint,GetNamedPropertyHandler,242,243,1 +block_hint,LdaLookupGlobalSlotHandler,13,14,1 +block_hint,LdaLookupGlobalSlotHandler,125,126,0 +block_hint,LdaLookupGlobalSlotHandler,15,16,1 +block_hint,GetNamedPropertyHandler,372,373,1 +block_hint,GetNamedPropertyHandler,216,217,0 +block_hint,GetNamedPropertyHandler,77,78,0 +block_hint,GetNamedPropertyHandler,35,36,1 +block_hint,GetNamedPropertyHandler,313,314,0 +block_hint,GetNamedPropertyHandler,339,340,0 +block_hint,GetNamedPropertyHandler,218,219,1 +block_hint,GetNamedPropertyHandler,290,291,0 +block_hint,GetNamedPropertyHandler,220,221,0 +block_hint,GetNamedPropertyHandler,294,295,1 +block_hint,GetNamedPropertyHandler,98,99,1 +block_hint,GetNamedPropertyHandler,347,348,0 +block_hint,GetNamedPropertyHandler,242,243,0 +block_hint,GetNamedPropertyHandler,154,155,0 +block_hint,GetNamedPropertyHandler,120,121,1 +block_hint,GetNamedPropertyHandler,49,50,0 +block_hint,GetNamedPropertyHandler,87,88,0 +block_hint,GetNamedPropertyHandler,25,26,1 +block_hint,GetNamedPropertyHandler,144,145,0 +block_hint,GetNamedPropertyHandler,65,66,0 +block_hint,GetNamedPropertyHandler,303,304,1 +block_hint,GetNamedPropertyHandler,102,103,0 +block_hint,GetNamedPropertyHandler,248,249,1 +block_hint,GetNamedPropertyHandler,250,251,1 block_hint,GetNamedPropertyHandler,244,245,1 -block_hint,GetNamedPropertyHandler,238,239,1 -block_hint,GetNamedPropertyHandler,240,241,1 -block_hint,GetNamedPropertyHandler,158,159,1 -block_hint,AddHandler,53,54,0 -block_hint,AddHandler,37,38,0 -block_hint,AddHandler,28,29,1 -block_hint,AddHandler,80,81,0 -block_hint,AddHandler,60,61,1 -block_hint,AddHandler,40,41,1 -block_hint,AddHandler,74,75,1 -block_hint,AddHandler,43,44,1 -block_hint,AddHandler,56,57,1 -block_hint,AddHandler,22,23,1 -block_hint,SubHandler,35,36,0 -block_hint,SubHandler,23,24,1 -block_hint,SubHandler,64,65,1 -block_hint,SubHandler,75,76,1 -block_hint,SubHandler,66,67,1 -block_hint,SubHandler,45,46,1 -block_hint,SubHandler,19,20,1 -block_hint,MulHandler,69,70,1 -block_hint,MulHandler,65,66,1 -block_hint,MulHandler,17,18,1 -block_hint,MulHandler,75,76,1 +block_hint,GetNamedPropertyHandler,246,247,1 +block_hint,GetNamedPropertyHandler,164,165,1 +block_hint,AddHandler,72,73,0 +block_hint,AddHandler,45,46,0 +block_hint,AddHandler,32,33,1 +block_hint,AddHandler,118,119,0 +block_hint,AddHandler,81,82,1 +block_hint,AddHandler,48,49,1 +block_hint,AddHandler,103,104,1 +block_hint,AddHandler,52,53,1 +block_hint,AddHandler,75,76,1 +block_hint,AddHandler,24,25,1 +block_hint,SubHandler,42,43,0 +block_hint,SubHandler,27,28,1 +block_hint,SubHandler,78,79,1 +block_hint,SubHandler,98,99,1 +block_hint,SubHandler,80,81,1 +block_hint,SubHandler,56,57,1 +block_hint,SubHandler,21,22,1 +block_hint,MulHandler,106,107,1 +block_hint,MulHandler,98,99,1 +block_hint,MulHandler,30,31,1 +block_hint,MulHandler,112,113,1 +block_hint,MulHandler,91,92,1 block_hint,MulHandler,59,60,1 -block_hint,MulHandler,39,40,1 -block_hint,MulHandler,12,13,1 -block_hint,DivHandler,65,66,0 -block_hint,DivHandler,54,55,0 -block_hint,DivHandler,37,38,1 -block_hint,DivHandler,15,16,1 -block_hint,DivHandler,73,74,1 -block_hint,DivHandler,60,61,1 -block_hint,DivHandler,40,41,1 -block_hint,DivHandler,10,11,1 -block_hint,ModHandler,74,75,0 -block_hint,ModHandler,70,71,0 -block_hint,ModHandler,56,57,1 -block_hint,ModHandler,51,52,1 -block_hint,ModHandler,28,29,0 +block_hint,MulHandler,23,24,1 +block_hint,DivHandler,85,86,0 +block_hint,DivHandler,66,67,0 +block_hint,DivHandler,43,44,1 +block_hint,DivHandler,23,24,1 +block_hint,DivHandler,95,96,1 +block_hint,DivHandler,72,73,1 +block_hint,DivHandler,46,47,1 +block_hint,DivHandler,17,18,1 +block_hint,ModHandler,89,90,0 +block_hint,ModHandler,85,86,0 +block_hint,ModHandler,68,69,1 +block_hint,ModHandler,63,64,1 +block_hint,ModHandler,29,30,0 block_hint,ModHandler,15,16,1 block_hint,BitwiseOrHandler,42,43,0 block_hint,BitwiseOrHandler,30,31,1 @@ -5421,37 +5541,37 @@ block_hint,LdaImmutableContextSlotWideHandler,3,4,1 block_hint,LdaImmutableContextSlotWideHandler,9,10,0 block_hint,LdaImmutableContextSlotWideHandler,5,6,1 block_hint,LdaImmutableCurrentContextSlotWideHandler,2,3,1 -block_hint,LdaGlobalWideHandler,257,258,0 -block_hint,LdaGlobalWideHandler,108,109,1 +block_hint,LdaGlobalWideHandler,262,263,0 +block_hint,LdaGlobalWideHandler,110,111,1 block_hint,StaGlobalWideHandler,3,4,0 block_hint,StaCurrentContextSlotWideHandler,2,3,1 -block_hint,GetNamedPropertyWideHandler,323,324,0 -block_hint,GetNamedPropertyWideHandler,138,139,1 +block_hint,GetNamedPropertyWideHandler,331,332,0 +block_hint,GetNamedPropertyWideHandler,140,141,1 block_hint,GetKeyedPropertyWideHandler,3,4,0 block_hint,SetNamedPropertyWideHandler,3,4,0 block_hint,DefineNamedOwnPropertyWideHandler,3,4,0 block_hint,SetKeyedPropertyWideHandler,3,4,0 block_hint,DefineKeyedOwnPropertyWideHandler,3,4,0 block_hint,StaInArrayLiteralWideHandler,3,4,0 -block_hint,AddWideHandler,82,83,0 -block_hint,AddWideHandler,49,50,0 -block_hint,AddWideHandler,35,36,0 -block_hint,AddWideHandler,78,79,0 -block_hint,AddWideHandler,64,65,1 -block_hint,AddWideHandler,45,46,1 -block_hint,AddWideHandler,27,28,1 -block_hint,AddWideHandler,43,44,1 -block_hint,AddWideHandler,16,17,1 -block_hint,SubWideHandler,75,76,0 -block_hint,SubWideHandler,53,54,0 -block_hint,SubWideHandler,33,34,0 -block_hint,SubWideHandler,13,14,1 -block_hint,MulWideHandler,80,81,0 -block_hint,MulWideHandler,73,74,1 -block_hint,MulWideHandler,59,60,1 -block_hint,MulWideHandler,57,58,1 -block_hint,MulWideHandler,30,31,1 -block_hint,MulWideHandler,10,11,1 +block_hint,AddWideHandler,120,121,0 +block_hint,AddWideHandler,60,61,0 +block_hint,AddWideHandler,42,43,0 +block_hint,AddWideHandler,107,108,0 +block_hint,AddWideHandler,76,77,1 +block_hint,AddWideHandler,53,54,1 +block_hint,AddWideHandler,31,32,1 +block_hint,AddWideHandler,51,52,1 +block_hint,AddWideHandler,18,19,1 +block_hint,SubWideHandler,108,109,0 +block_hint,SubWideHandler,65,66,0 +block_hint,SubWideHandler,40,41,0 +block_hint,SubWideHandler,15,16,1 +block_hint,MulWideHandler,128,129,0 +block_hint,MulWideHandler,106,107,1 +block_hint,MulWideHandler,83,84,1 +block_hint,MulWideHandler,81,82,1 +block_hint,MulWideHandler,43,44,1 +block_hint,MulWideHandler,19,20,1 block_hint,BitwiseOrWideHandler,28,29,0 block_hint,BitwiseOrWideHandler,20,21,1 block_hint,AddSmiWideHandler,25,26,0 @@ -5544,8 +5664,8 @@ block_hint,ForInNextWideHandler,11,12,0 block_hint,ForInNextWideHandler,2,3,1 block_hint,ForInNextWideHandler,4,5,0 block_hint,ForInNextWideHandler,9,10,1 -block_hint,LdaGlobalExtraWideHandler,257,258,0 -block_hint,LdaGlobalExtraWideHandler,108,109,1 +block_hint,LdaGlobalExtraWideHandler,262,263,0 +block_hint,LdaGlobalExtraWideHandler,110,111,1 block_hint,AddSmiExtraWideHandler,33,34,1 block_hint,AddSmiExtraWideHandler,23,24,0 block_hint,AddSmiExtraWideHandler,28,29,1 @@ -5563,720 +5683,720 @@ block_hint,BitwiseAndSmiExtraWideHandler,29,30,0 block_hint,BitwiseAndSmiExtraWideHandler,18,19,1 block_hint,CallUndefinedReceiver1ExtraWideHandler,68,69,0 block_hint,CallUndefinedReceiver1ExtraWideHandler,19,20,0 -builtin_hash,RecordWriteSaveFP,925153714 -builtin_hash,RecordWriteIgnoreFP,925153714 -builtin_hash,EphemeronKeyBarrierSaveFP,576191782 -builtin_hash,AdaptorWithBuiltinExitFrame,354449226 -builtin_hash,Call_ReceiverIsNullOrUndefined_Baseline_Compact,184201450 -builtin_hash,Call_ReceiverIsNotNullOrUndefined_Baseline_Compact,-622040840 -builtin_hash,Call_ReceiverIsAny_Baseline_Compact,-622040840 -builtin_hash,CallProxy,431038095 -builtin_hash,CallWithSpread,114885228 -builtin_hash,CallWithSpread_Baseline,-764009345 -builtin_hash,CallWithArrayLike,-906554997 -builtin_hash,ConstructWithSpread,-608478575 -builtin_hash,ConstructWithSpread_Baseline,633800479 -builtin_hash,Construct_Baseline,-357225391 -builtin_hash,FastNewObject,-181970843 -builtin_hash,FastNewClosure,-175349459 -builtin_hash,StringEqual,706401832 -builtin_hash,StringGreaterThan,-613733034 -builtin_hash,StringGreaterThanOrEqual,-355204059 -builtin_hash,StringLessThan,-355204059 -builtin_hash,StringLessThanOrEqual,-613733034 -builtin_hash,StringSubstring,293726114 -builtin_hash,OrderedHashTableHealIndex,320211480 -builtin_hash,CompileLazy,-48838662 -builtin_hash,CompileLazyDeoptimizedCode,1029930506 -builtin_hash,InstantiateAsmJs,-487292373 -builtin_hash,AllocateInYoungGeneration,669322182 -builtin_hash,AllocateRegularInYoungGeneration,-197165802 -builtin_hash,AllocateRegularInOldGeneration,-197165802 -builtin_hash,CopyFastSmiOrObjectElements,-974669866 -builtin_hash,GrowFastDoubleElements,633375032 -builtin_hash,GrowFastSmiOrObjectElements,55014026 -builtin_hash,ToNumber,1056900683 -builtin_hash,ToNumber_Baseline,-255970856 -builtin_hash,ToNumeric_Baseline,-150297448 -builtin_hash,ToNumberConvertBigInt,-115864776 -builtin_hash,Typeof,455002258 -builtin_hash,KeyedLoadIC_PolymorphicName,-147363616 -builtin_hash,KeyedStoreIC_Megamorphic,-766812467 -builtin_hash,DefineKeyedOwnIC_Megamorphic,-98359717 -builtin_hash,LoadGlobalIC_NoFeedback,-751542491 -builtin_hash,LoadIC_FunctionPrototype,-456945758 -builtin_hash,LoadIC_StringLength,708277321 -builtin_hash,LoadIC_StringWrapperLength,813299235 -builtin_hash,LoadIC_NoFeedback,203476084 -builtin_hash,StoreIC_NoFeedback,-1067800910 -builtin_hash,DefineNamedOwnIC_NoFeedback,-679825540 -builtin_hash,KeyedLoadIC_SloppyArguments,-451779601 -builtin_hash,StoreFastElementIC_Standard,-723267640 -builtin_hash,StoreFastElementIC_GrowNoTransitionHandleCOW,-981655461 -builtin_hash,StoreFastElementIC_NoTransitionHandleCOW,516590767 -builtin_hash,ElementsTransitionAndStore_Standard,-764902427 -builtin_hash,ElementsTransitionAndStore_GrowNoTransitionHandleCOW,403149715 -builtin_hash,ElementsTransitionAndStore_NoTransitionHandleCOW,995918993 -builtin_hash,KeyedHasIC_PolymorphicName,-165442496 -builtin_hash,EnqueueMicrotask,7595399 -builtin_hash,RunMicrotasks,-338691087 -builtin_hash,HasProperty,508450008 -builtin_hash,DeleteProperty,-838129578 -builtin_hash,SetDataProperties,-464632524 -builtin_hash,ReturnReceiver,312434521 -builtin_hash,ArrayConstructor,-319898279 -builtin_hash,ArrayConstructorImpl,412703549 -builtin_hash,ArrayNoArgumentConstructor_PackedSmi_DontOverride,850144068 -builtin_hash,ArrayNoArgumentConstructor_HoleySmi_DontOverride,850144068 -builtin_hash,ArrayNoArgumentConstructor_PackedSmi_DisableAllocationSites,1070673895 -builtin_hash,ArrayNoArgumentConstructor_Packed_DisableAllocationSites,1070673895 -builtin_hash,ArrayNoArgumentConstructor_Holey_DisableAllocationSites,1070673895 -builtin_hash,ArrayNoArgumentConstructor_PackedDouble_DisableAllocationSites,-336123958 -builtin_hash,ArraySingleArgumentConstructor_HoleySmi_DontOverride,227053212 -builtin_hash,ArraySingleArgumentConstructor_HoleySmi_DisableAllocationSites,873893 -builtin_hash,ArraySingleArgumentConstructor_Holey_DisableAllocationSites,873893 -builtin_hash,ArraySingleArgumentConstructor_HoleyDouble_DisableAllocationSites,-847176062 -builtin_hash,ArrayIncludesSmi,536907331 -builtin_hash,ArrayIncludesSmiOrObject,1001204729 -builtin_hash,ArrayIncludes,561561276 -builtin_hash,ArrayIndexOfSmi,-663968987 -builtin_hash,ArrayIndexOfSmiOrObject,-876114200 -builtin_hash,ArrayIndexOf,-253950356 -builtin_hash,ArrayPrototypePop,-292344596 -builtin_hash,ArrayPrototypePush,-675294835 -builtin_hash,CloneFastJSArray,-172971657 -builtin_hash,CloneFastJSArrayFillingHoles,-284975194 -builtin_hash,ExtractFastJSArray,-471066963 -builtin_hash,ArrayPrototypeEntries,829416546 -builtin_hash,ArrayPrototypeKeys,162456650 -builtin_hash,ArrayPrototypeValues,829416546 -builtin_hash,ArrayIteratorPrototypeNext,1046841582 -builtin_hash,AsyncFunctionEnter,608034084 -builtin_hash,AsyncFunctionResolve,-196767509 -builtin_hash,AsyncFunctionAwaitCaught,861277429 -builtin_hash,AsyncFunctionAwaitUncaught,861277429 -builtin_hash,AsyncFunctionAwaitResolveClosure,-342974575 -builtin_hash,DatePrototypeGetDate,799986656 -builtin_hash,DatePrototypeGetDay,799986656 -builtin_hash,DatePrototypeGetFullYear,799986656 -builtin_hash,DatePrototypeGetHours,799986656 -builtin_hash,DatePrototypeGetMilliseconds,-1015935702 -builtin_hash,DatePrototypeGetMinutes,799986656 -builtin_hash,DatePrototypeGetMonth,799986656 -builtin_hash,DatePrototypeGetSeconds,799986656 -builtin_hash,DatePrototypeGetTime,604056264 -builtin_hash,DatePrototypeGetTimezoneOffset,-1015935702 -builtin_hash,DatePrototypeValueOf,604056264 -builtin_hash,DatePrototypeToPrimitive,83647064 -builtin_hash,CreateIterResultObject,-837586211 -builtin_hash,CreateGeneratorObject,-731927520 -builtin_hash,GeneratorPrototypeNext,24056276 -builtin_hash,GeneratorPrototypeReturn,704697656 -builtin_hash,SuspendGeneratorBaseline,-772427084 -builtin_hash,ResumeGeneratorBaseline,-1024670400 -builtin_hash,GlobalIsFinite,98040433 -builtin_hash,GlobalIsNaN,-131926158 -builtin_hash,LoadIC,758731508 -builtin_hash,LoadIC_Megamorphic,485927514 -builtin_hash,LoadIC_Noninlined,-900550790 -builtin_hash,LoadICTrampoline,279586909 -builtin_hash,LoadICBaseline,378721791 -builtin_hash,LoadICTrampoline_Megamorphic,279586909 -builtin_hash,LoadSuperIC,-700275115 -builtin_hash,LoadSuperICBaseline,-779741774 -builtin_hash,KeyedLoadIC,-932282799 -builtin_hash,KeyedLoadIC_Megamorphic,-52322939 -builtin_hash,KeyedLoadICTrampoline,279586909 -builtin_hash,KeyedLoadICBaseline,378721791 -builtin_hash,KeyedLoadICTrampoline_Megamorphic,279586909 -builtin_hash,StoreGlobalIC,-593770272 -builtin_hash,StoreGlobalICTrampoline,279586909 -builtin_hash,StoreGlobalICBaseline,378721791 -builtin_hash,StoreIC,-725198793 -builtin_hash,StoreICTrampoline,853716338 -builtin_hash,StoreICBaseline,-779741774 -builtin_hash,DefineNamedOwnIC,407070464 -builtin_hash,DefineNamedOwnICBaseline,-779741774 -builtin_hash,KeyedStoreIC,-781537170 -builtin_hash,KeyedStoreICTrampoline,853716338 -builtin_hash,KeyedStoreICBaseline,-779741774 -builtin_hash,DefineKeyedOwnIC,-368740640 -builtin_hash,StoreInArrayLiteralIC,870478079 -builtin_hash,StoreInArrayLiteralICBaseline,-779741774 -builtin_hash,LoadGlobalIC,-629815841 -builtin_hash,LoadGlobalICInsideTypeof,-681620395 -builtin_hash,LoadGlobalICTrampoline,742678434 -builtin_hash,LoadGlobalICBaseline,-376594265 -builtin_hash,LoadGlobalICInsideTypeofTrampoline,742678434 -builtin_hash,LoadGlobalICInsideTypeofBaseline,-376594265 -builtin_hash,LookupGlobalICBaseline,318821975 -builtin_hash,LookupGlobalICInsideTypeofBaseline,318821975 -builtin_hash,KeyedHasIC,-1007842062 -builtin_hash,KeyedHasICBaseline,378721791 -builtin_hash,KeyedHasIC_Megamorphic,508450008 -builtin_hash,IterableToList,444610848 -builtin_hash,IterableToListWithSymbolLookup,-721120479 -builtin_hash,IterableToListMayPreserveHoles,907505763 -builtin_hash,FindOrderedHashMapEntry,-772719645 -builtin_hash,MapConstructor,328612162 -builtin_hash,MapPrototypeSet,-740571272 -builtin_hash,MapPrototypeDelete,815676314 -builtin_hash,MapPrototypeGet,-99464800 -builtin_hash,MapPrototypeHas,417921745 -builtin_hash,MapPrototypeEntries,-250935376 -builtin_hash,MapPrototypeGetSize,1035731846 -builtin_hash,MapPrototypeForEach,-11074084 -builtin_hash,MapPrototypeKeys,-250935376 -builtin_hash,MapPrototypeValues,-250935376 -builtin_hash,MapIteratorPrototypeNext,-513700577 -builtin_hash,MapIteratorToList,788737747 -builtin_hash,SameValueNumbersOnly,-715209 -builtin_hash,Add_Baseline,239982805 -builtin_hash,AddSmi_Baseline,890570719 -builtin_hash,Subtract_Baseline,984085983 -builtin_hash,SubtractSmi_Baseline,745275517 -builtin_hash,Multiply_Baseline,363197242 -builtin_hash,MultiplySmi_Baseline,-291668426 -builtin_hash,Divide_Baseline,41207474 -builtin_hash,DivideSmi_Baseline,-228110709 -builtin_hash,Modulus_Baseline,895869986 -builtin_hash,ModulusSmi_Baseline,1052037814 -builtin_hash,Exponentiate_Baseline,525497385 -builtin_hash,BitwiseAnd_Baseline,834157251 -builtin_hash,BitwiseAndSmi_Baseline,-224843182 -builtin_hash,BitwiseOr_Baseline,-1000862579 -builtin_hash,BitwiseOrSmi_Baseline,-175921720 -builtin_hash,BitwiseXor_Baseline,451693864 -builtin_hash,BitwiseXorSmi_Baseline,1072796926 -builtin_hash,ShiftLeft_Baseline,-1059182958 -builtin_hash,ShiftLeftSmi_Baseline,180208819 -builtin_hash,ShiftRight_Baseline,8908966 -builtin_hash,ShiftRightSmi_Baseline,-553470248 -builtin_hash,ShiftRightLogical_Baseline,-771447674 -builtin_hash,ShiftRightLogicalSmi_Baseline,524787408 -builtin_hash,Add_WithFeedback,-726025304 -builtin_hash,Subtract_WithFeedback,82634219 -builtin_hash,Modulus_WithFeedback,-14646246 -builtin_hash,BitwiseOr_WithFeedback,-345564281 -builtin_hash,Equal_Baseline,-851425691 -builtin_hash,StrictEqual_Baseline,609264081 -builtin_hash,LessThan_Baseline,-250082751 -builtin_hash,GreaterThan_Baseline,442646997 -builtin_hash,LessThanOrEqual_Baseline,278491596 -builtin_hash,GreaterThanOrEqual_Baseline,-526555932 -builtin_hash,Equal_WithFeedback,470211465 -builtin_hash,StrictEqual_WithFeedback,-720033793 -builtin_hash,LessThan_WithFeedback,555433500 -builtin_hash,GreaterThan_WithFeedback,-13794951 -builtin_hash,GreaterThanOrEqual_WithFeedback,38912741 -builtin_hash,BitwiseNot_Baseline,-63143721 -builtin_hash,Decrement_Baseline,-491402242 -builtin_hash,Increment_Baseline,379465907 -builtin_hash,Negate_Baseline,984080569 -builtin_hash,ObjectAssign,72200033 -builtin_hash,ObjectCreate,-611788064 -builtin_hash,ObjectEntries,-669410235 -builtin_hash,ObjectGetOwnPropertyDescriptor,671274362 -builtin_hash,ObjectGetOwnPropertyNames,12658690 -builtin_hash,ObjectIs,-340387257 -builtin_hash,ObjectKeys,-12925872 -builtin_hash,ObjectPrototypeHasOwnProperty,156674900 -builtin_hash,ObjectToString,-633754100 -builtin_hash,InstanceOf_WithFeedback,411449225 -builtin_hash,InstanceOf_Baseline,486598742 -builtin_hash,ForInEnumerate,-1069244956 -builtin_hash,ForInPrepare,362202805 -builtin_hash,ForInFilter,-306829682 -builtin_hash,RegExpConstructor,-10164395 -builtin_hash,RegExpExecAtom,-1073379799 -builtin_hash,RegExpExecInternal,891768236 -builtin_hash,FindOrderedHashSetEntry,705655709 -builtin_hash,SetConstructor,-575472839 -builtin_hash,SetPrototypeHas,417921745 -builtin_hash,SetPrototypeAdd,-541161428 -builtin_hash,SetPrototypeDelete,851985022 -builtin_hash,SetPrototypeEntries,-250935376 -builtin_hash,SetPrototypeGetSize,1035731846 -builtin_hash,SetPrototypeForEach,-130012968 -builtin_hash,SetPrototypeValues,-250935376 -builtin_hash,SetIteratorPrototypeNext,-958421279 -builtin_hash,SetOrSetIteratorToList,144945576 -builtin_hash,StringFromCharCode,-225080952 -builtin_hash,StringPrototypeReplace,-894056994 -builtin_hash,StringPrototypeSplit,52285085 -builtin_hash,TypedArrayConstructor,536725208 -builtin_hash,TypedArrayPrototypeByteLength,667117445 -builtin_hash,TypedArrayPrototypeLength,423335186 -builtin_hash,WeakMapConstructor,211573517 -builtin_hash,WeakMapLookupHashIndex,449726121 -builtin_hash,WeakMapGet,-388873308 -builtin_hash,WeakMapPrototypeHas,-407398312 -builtin_hash,WeakMapPrototypeSet,795326571 -builtin_hash,WeakSetConstructor,-100066523 -builtin_hash,WeakSetPrototypeHas,-407398312 -builtin_hash,WeakSetPrototypeAdd,-116381667 -builtin_hash,WeakCollectionSet,-319283680 -builtin_hash,AsyncGeneratorResolve,69898511 -builtin_hash,AsyncGeneratorYield,444077127 -builtin_hash,AsyncGeneratorResumeNext,-495744369 -builtin_hash,AsyncGeneratorPrototypeNext,95683089 -builtin_hash,AsyncGeneratorAwaitUncaught,-291001587 -builtin_hash,AsyncGeneratorAwaitResolveClosure,1023878839 -builtin_hash,AsyncGeneratorYieldResolveClosure,783335474 -builtin_hash,StringAdd_CheckNone,313254412 -builtin_hash,SubString,992908207 -builtin_hash,GetProperty,-151131763 -builtin_hash,GetPropertyWithReceiver,-145418645 -builtin_hash,SetProperty,877836229 -builtin_hash,CreateDataProperty,-606450676 -builtin_hash,ArrayPrototypeConcat,841497445 -builtin_hash,ArrayEvery,-992635840 -builtin_hash,ArrayFilterLoopLazyDeoptContinuation,923422735 -builtin_hash,ArrayFilterLoopContinuation,-871404088 -builtin_hash,ArrayFilter,135589144 -builtin_hash,ArrayPrototypeFind,-52246620 -builtin_hash,ArrayForEachLoopLazyDeoptContinuation,-87111692 -builtin_hash,ArrayForEachLoopContinuation,-432571379 -builtin_hash,ArrayForEach,1063323906 -builtin_hash,ArrayFrom,403114949 -builtin_hash,ArrayIsArray,421541474 -builtin_hash,LoadJoinElement_FastSmiOrObjectElements_0,-931014688 -builtin_hash,LoadJoinElement_FastDoubleElements_0,668207359 -builtin_hash,JoinStackPush,1045985488 -builtin_hash,JoinStackPop,294613228 -builtin_hash,ArrayPrototypeJoin,-591790002 -builtin_hash,ArrayPrototypeToString,254788114 -builtin_hash,ArrayPrototypeLastIndexOf,-623388518 -builtin_hash,ArrayMapLoopLazyDeoptContinuation,527144862 -builtin_hash,ArrayMapLoopContinuation,-248361443 -builtin_hash,ArrayMap,-977984343 -builtin_hash,ArrayReduceLoopLazyDeoptContinuation,641128240 -builtin_hash,ArrayReduceLoopContinuation,601284012 -builtin_hash,ArrayReduce,-918238503 -builtin_hash,ArrayPrototypeReverse,-50777013 -builtin_hash,ArrayPrototypeShift,763658583 -builtin_hash,ArrayPrototypeSlice,132491105 -builtin_hash,ArraySome,46741703 -builtin_hash,ArrayPrototypeSplice,532432423 -builtin_hash,ArrayPrototypeUnshift,285400235 -builtin_hash,ArrayBufferPrototypeGetByteLength,904301424 -builtin_hash,ArrayBufferIsView,-1007526052 -builtin_hash,ToInteger,488762220 -builtin_hash,FastCreateDataProperty,-709822671 -builtin_hash,BooleanConstructor,-990386327 -builtin_hash,BooleanPrototypeToString,-439668236 -builtin_hash,ToString,-945769269 -builtin_hash,StringPrototypeToString,-494656392 -builtin_hash,StringPrototypeValueOf,-494656392 -builtin_hash,StringPrototypeCharAt,-1053868658 -builtin_hash,StringPrototypeCharCodeAt,228833317 -builtin_hash,StringPrototypeCodePointAt,-570719484 -builtin_hash,StringPrototypeConcat,408689273 -builtin_hash,StringConstructor,259187035 -builtin_hash,StringAddConvertLeft,-940497411 -builtin_hash,StringAddConvertRight,262934773 -builtin_hash,StringCharAt,40686160 -builtin_hash,FastNewClosureBaseline,274645735 -builtin_hash,FastNewFunctionContextFunction,-198047187 -builtin_hash,CreateRegExpLiteral,-338674408 -builtin_hash,CreateShallowArrayLiteral,-564919299 -builtin_hash,CreateEmptyArrayLiteral,643763718 -builtin_hash,CreateShallowObjectLiteral,1034845855 -builtin_hash,ObjectConstructor,-400745906 -builtin_hash,CreateEmptyLiteralObject,-942901622 -builtin_hash,NumberConstructor,287129549 -builtin_hash,StringToNumber,831842981 -builtin_hash,NonNumberToNumber,-93280944 -builtin_hash,NonNumberToNumeric,773024939 -builtin_hash,ToNumeric,-142083210 -builtin_hash,NumberToString,882645046 -builtin_hash,ToBoolean,303836147 -builtin_hash,ToBooleanForBaselineJump,-629829779 -builtin_hash,ToLength,824030765 -builtin_hash,ToName,1027768228 -builtin_hash,ToObject,71126311 -builtin_hash,NonPrimitiveToPrimitive_Default,847969156 -builtin_hash,NonPrimitiveToPrimitive_Number,847969156 -builtin_hash,NonPrimitiveToPrimitive_String,847969156 -builtin_hash,OrdinaryToPrimitive_Number,424204202 -builtin_hash,OrdinaryToPrimitive_String,424204202 -builtin_hash,DataViewPrototypeGetByteLength,480775435 -builtin_hash,DataViewPrototypeGetFloat64,1003024607 -builtin_hash,DataViewPrototypeSetUint32,229410881 -builtin_hash,DataViewPrototypeSetFloat64,-938786777 -builtin_hash,FunctionPrototypeHasInstance,505391038 -builtin_hash,FastFunctionPrototypeBind,-508566393 -builtin_hash,ForInNext,-1021384217 -builtin_hash,GetIteratorWithFeedback,51240002 -builtin_hash,GetIteratorBaseline,-216059576 -builtin_hash,CallIteratorWithFeedback,581894736 -builtin_hash,MathAbs,-187735218 -builtin_hash,MathCeil,-338453010 -builtin_hash,MathFloor,506568824 -builtin_hash,MathRound,-849255534 -builtin_hash,MathPow,-187693750 -builtin_hash,MathMax,897170464 -builtin_hash,MathMin,213295587 -builtin_hash,MathAsin,788026272 -builtin_hash,MathAtan2,-181481861 -builtin_hash,MathCos,-681373097 -builtin_hash,MathExp,-410231589 -builtin_hash,MathFround,415902182 -builtin_hash,MathImul,-715703335 -builtin_hash,MathLog,941930077 -builtin_hash,MathSin,855471515 -builtin_hash,MathSign,539035638 -builtin_hash,MathSqrt,-650867412 -builtin_hash,MathTan,85134160 -builtin_hash,MathTanh,-321274619 -builtin_hash,MathRandom,827257341 -builtin_hash,NumberPrototypeToString,-583893270 -builtin_hash,NumberIsInteger,795757762 -builtin_hash,NumberIsNaN,-189048659 -builtin_hash,NumberParseFloat,-439771973 -builtin_hash,ParseInt,-192026072 -builtin_hash,NumberParseInt,82296249 -builtin_hash,Add,325215303 -builtin_hash,Subtract,52083078 -builtin_hash,Multiply,-317381366 -builtin_hash,Divide,750482944 -builtin_hash,Modulus,328987036 -builtin_hash,CreateObjectWithoutProperties,596134857 -builtin_hash,ObjectIsExtensible,-930811057 -builtin_hash,ObjectPreventExtensions,858037175 -builtin_hash,ObjectGetPrototypeOf,-202287704 -builtin_hash,ObjectSetPrototypeOf,-524999648 -builtin_hash,ObjectPrototypeToString,169720373 -builtin_hash,ObjectPrototypeValueOf,-28430309 -builtin_hash,FulfillPromise,21355213 -builtin_hash,NewPromiseCapability,-218729781 -builtin_hash,PromiseCapabilityDefaultResolve,336354486 -builtin_hash,PerformPromiseThen,278993520 -builtin_hash,PromiseAll,-190868023 -builtin_hash,PromiseAllResolveElementClosure,-452068448 -builtin_hash,PromiseConstructor,-93257640 -builtin_hash,PromisePrototypeCatch,-895785401 -builtin_hash,PromiseFulfillReactionJob,-249420439 -builtin_hash,PromiseResolveTrampoline,-927707015 -builtin_hash,PromiseResolve,557472834 -builtin_hash,ResolvePromise,-983952394 -builtin_hash,PromisePrototypeThen,866384223 -builtin_hash,PromiseResolveThenableJob,-536493053 -builtin_hash,ProxyConstructor,-754644599 -builtin_hash,ProxyGetProperty,213230890 -builtin_hash,ProxyIsExtensible,-484311481 -builtin_hash,ProxyPreventExtensions,297316441 -builtin_hash,ReflectGet,-1068630269 -builtin_hash,ReflectHas,-927707015 -builtin_hash,RegExpPrototypeExec,-1008377217 -builtin_hash,RegExpMatchFast,470663647 -builtin_hash,RegExpReplace,-181940551 -builtin_hash,RegExpPrototypeReplace,39784472 -builtin_hash,RegExpSearchFast,-745723781 -builtin_hash,RegExpPrototypeSourceGetter,-712979884 -builtin_hash,RegExpSplit,881747508 -builtin_hash,RegExpPrototypeTest,-1044390149 -builtin_hash,RegExpPrototypeTestFast,381723041 -builtin_hash,RegExpPrototypeGlobalGetter,-937075195 -builtin_hash,RegExpPrototypeIgnoreCaseGetter,-369470981 -builtin_hash,RegExpPrototypeMultilineGetter,-760493777 -builtin_hash,RegExpPrototypeHasIndicesGetter,260570818 -builtin_hash,RegExpPrototypeDotAllGetter,260570818 -builtin_hash,RegExpPrototypeStickyGetter,-823365333 -builtin_hash,RegExpPrototypeUnicodeGetter,-823365333 -builtin_hash,RegExpPrototypeFlagsGetter,-334954319 -builtin_hash,StringPrototypeEndsWith,-178713286 -builtin_hash,StringPrototypeIncludes,482244051 -builtin_hash,StringPrototypeIndexOf,-130883228 -builtin_hash,StringPrototypeIterator,287346997 -builtin_hash,StringIteratorPrototypeNext,-1032793009 -builtin_hash,StringPrototypeMatch,-984631220 -builtin_hash,StringPrototypeSearch,-984631220 -builtin_hash,StringRepeat,407848752 -builtin_hash,StringPrototypeSlice,538867513 -builtin_hash,StringPrototypeStartsWith,-490931141 -builtin_hash,StringPrototypeSubstr,-1042470347 -builtin_hash,StringPrototypeSubstring,283811647 -builtin_hash,StringPrototypeTrim,509891784 -builtin_hash,SymbolPrototypeToString,477000612 -builtin_hash,CreateTypedArray,255685378 -builtin_hash,TypedArrayFrom,-527855119 -builtin_hash,TypedArrayPrototypeSet,-1042496168 -builtin_hash,TypedArrayPrototypeSubArray,-873399896 -builtin_hash,NewSloppyArgumentsElements,840136606 -builtin_hash,NewStrictArgumentsElements,-701611224 -builtin_hash,NewRestArgumentsElements,450207936 -builtin_hash,FastNewSloppyArguments,427487705 -builtin_hash,FastNewStrictArguments,940012797 -builtin_hash,FastNewRestArguments,408066435 -builtin_hash,StringSlowFlatten,200237548 -builtin_hash,StringIndexOf,506557026 -builtin_hash,Load_FastSmiElements_0,653973042 -builtin_hash,Load_FastObjectElements_0,653973042 -builtin_hash,Store_FastSmiElements_0,418112357 -builtin_hash,Store_FastObjectElements_0,-800846888 -builtin_hash,SortCompareDefault,304655245 -builtin_hash,SortCompareUserFn,-618954695 -builtin_hash,Copy,144972358 -builtin_hash,MergeAt,425921460 -builtin_hash,GallopLeft,-851568101 -builtin_hash,GallopRight,-1027593577 -builtin_hash,ArrayTimSort,-833804402 -builtin_hash,ArrayPrototypeSort,-985590788 -builtin_hash,StringFastLocaleCompare,-984642925 -builtin_hash,WasmInt32ToHeapNumber,-560703743 -builtin_hash,WasmTaggedNonSmiToInt32,-977713892 -builtin_hash,WasmTriggerTierUp,248716051 -builtin_hash,WasmStackGuard,534090893 -builtin_hash,CanUseSameAccessor_FastSmiElements_0,302023388 -builtin_hash,CanUseSameAccessor_FastObjectElements_0,302023388 -builtin_hash,StringPrototypeToLowerCaseIntl,525980093 -builtin_hash,StringToLowerCaseIntl,-870512333 -builtin_hash,WideHandler,964339339 -builtin_hash,ExtraWideHandler,964339339 -builtin_hash,LdarHandler,-1051441899 -builtin_hash,LdaZeroHandler,-361600616 -builtin_hash,LdaSmiHandler,-168991530 -builtin_hash,LdaUndefinedHandler,-638450171 -builtin_hash,LdaNullHandler,-638450171 -builtin_hash,LdaTheHoleHandler,-638450171 -builtin_hash,LdaTrueHandler,-1021965256 -builtin_hash,LdaFalseHandler,-1021965256 -builtin_hash,LdaConstantHandler,356409883 -builtin_hash,LdaContextSlotHandler,943881063 -builtin_hash,LdaImmutableContextSlotHandler,943881063 -builtin_hash,LdaCurrentContextSlotHandler,-382503010 -builtin_hash,LdaImmutableCurrentContextSlotHandler,-382503010 -builtin_hash,StarHandler,87231384 -builtin_hash,MovHandler,83265050 -builtin_hash,PushContextHandler,-240552696 -builtin_hash,PopContextHandler,112490181 -builtin_hash,TestReferenceEqualHandler,-82232472 -builtin_hash,TestUndetectableHandler,-510593553 -builtin_hash,TestNullHandler,68435121 -builtin_hash,TestUndefinedHandler,68435121 -builtin_hash,TestTypeOfHandler,112039968 -builtin_hash,LdaGlobalHandler,-710429991 -builtin_hash,LdaGlobalInsideTypeofHandler,449865173 -builtin_hash,StaGlobalHandler,431419910 -builtin_hash,StaContextSlotHandler,-317790092 -builtin_hash,StaCurrentContextSlotHandler,-425140012 -builtin_hash,LdaLookupGlobalSlotHandler,-213340191 -builtin_hash,LdaLookupGlobalSlotInsideTypeofHandler,841482473 -builtin_hash,StaLookupSlotHandler,712075677 -builtin_hash,GetNamedPropertyHandler,-731343419 -builtin_hash,GetNamedPropertyFromSuperHandler,-238764505 -builtin_hash,GetKeyedPropertyHandler,533315167 -builtin_hash,SetNamedPropertyHandler,-228269698 -builtin_hash,DefineNamedOwnPropertyHandler,-228269698 -builtin_hash,SetKeyedPropertyHandler,-286024195 -builtin_hash,DefineKeyedOwnPropertyHandler,-286024195 -builtin_hash,StaInArrayLiteralHandler,-286024195 -builtin_hash,DefineKeyedOwnPropertyInLiteralHandler,816417950 -builtin_hash,AddHandler,436623080 -builtin_hash,SubHandler,828723569 -builtin_hash,MulHandler,-316424498 -builtin_hash,DivHandler,515954740 -builtin_hash,ModHandler,-366411552 -builtin_hash,ExpHandler,841779190 -builtin_hash,BitwiseOrHandler,-654281787 -builtin_hash,BitwiseXorHandler,-535984936 -builtin_hash,BitwiseAndHandler,-787882211 -builtin_hash,ShiftLeftHandler,-843937923 -builtin_hash,ShiftRightHandler,212674128 -builtin_hash,ShiftRightLogicalHandler,46895009 -builtin_hash,AddSmiHandler,445675964 -builtin_hash,SubSmiHandler,888105299 -builtin_hash,MulSmiHandler,75879268 -builtin_hash,DivSmiHandler,-1048906096 -builtin_hash,ModSmiHandler,844716305 -builtin_hash,BitwiseOrSmiHandler,950867671 -builtin_hash,BitwiseXorSmiHandler,667017295 -builtin_hash,BitwiseAndSmiHandler,16303015 -builtin_hash,ShiftLeftSmiHandler,-243955085 -builtin_hash,ShiftRightSmiHandler,247609496 -builtin_hash,ShiftRightLogicalSmiHandler,-992904191 -builtin_hash,IncHandler,-790806177 -builtin_hash,DecHandler,395099716 -builtin_hash,NegateHandler,773570671 -builtin_hash,BitwiseNotHandler,-30220634 -builtin_hash,ToBooleanLogicalNotHandler,-99917552 -builtin_hash,LogicalNotHandler,-229756855 -builtin_hash,TypeOfHandler,872105570 -builtin_hash,DeletePropertyStrictHandler,986048046 -builtin_hash,DeletePropertySloppyHandler,-712291966 -builtin_hash,GetSuperConstructorHandler,728738893 -builtin_hash,CallAnyReceiverHandler,-851436429 -builtin_hash,CallPropertyHandler,-851436429 -builtin_hash,CallProperty0Handler,250813033 -builtin_hash,CallProperty1Handler,-121823545 -builtin_hash,CallProperty2Handler,370258853 -builtin_hash,CallUndefinedReceiverHandler,-986912746 -builtin_hash,CallUndefinedReceiver0Handler,-202732647 -builtin_hash,CallUndefinedReceiver1Handler,565224727 -builtin_hash,CallUndefinedReceiver2Handler,387553770 -builtin_hash,CallWithSpreadHandler,-851436429 -builtin_hash,CallRuntimeHandler,-170619637 -builtin_hash,CallJSRuntimeHandler,409704029 -builtin_hash,InvokeIntrinsicHandler,715377671 -builtin_hash,ConstructHandler,-131706767 -builtin_hash,ConstructWithSpreadHandler,364942391 -builtin_hash,TestEqualHandler,-837108372 -builtin_hash,TestEqualStrictHandler,894153483 -builtin_hash,TestLessThanHandler,-203533412 -builtin_hash,TestGreaterThanHandler,-62684313 -builtin_hash,TestLessThanOrEqualHandler,200710478 -builtin_hash,TestGreaterThanOrEqualHandler,-469101728 -builtin_hash,TestInstanceOfHandler,-340407472 -builtin_hash,TestInHandler,-632047176 -builtin_hash,ToNameHandler,751533885 -builtin_hash,ToNumberHandler,242489604 -builtin_hash,ToNumericHandler,839640143 -builtin_hash,ToObjectHandler,751533885 -builtin_hash,ToStringHandler,-825940301 -builtin_hash,CreateRegExpLiteralHandler,774985654 -builtin_hash,CreateArrayLiteralHandler,440763826 -builtin_hash,CreateArrayFromIterableHandler,-751073497 -builtin_hash,CreateEmptyArrayLiteralHandler,825438818 -builtin_hash,CreateObjectLiteralHandler,-380204898 -builtin_hash,CreateEmptyObjectLiteralHandler,-480083872 -builtin_hash,CreateClosureHandler,877654061 -builtin_hash,CreateBlockContextHandler,88550400 -builtin_hash,CreateCatchContextHandler,-353230330 -builtin_hash,CreateFunctionContextHandler,159733033 -builtin_hash,CreateMappedArgumentsHandler,427483474 -builtin_hash,CreateUnmappedArgumentsHandler,-700097820 -builtin_hash,CreateRestParameterHandler,-443936485 -builtin_hash,JumpLoopHandler,-343051033 -builtin_hash,JumpHandler,930291898 -builtin_hash,JumpConstantHandler,30595851 -builtin_hash,JumpIfUndefinedConstantHandler,643859045 -builtin_hash,JumpIfNotUndefinedConstantHandler,-579153610 -builtin_hash,JumpIfUndefinedOrNullConstantHandler,407521228 -builtin_hash,JumpIfTrueConstantHandler,643859045 -builtin_hash,JumpIfFalseConstantHandler,643859045 -builtin_hash,JumpIfToBooleanTrueConstantHandler,696724860 -builtin_hash,JumpIfToBooleanFalseConstantHandler,-179848636 -builtin_hash,JumpIfToBooleanTrueHandler,690028724 -builtin_hash,JumpIfToBooleanFalseHandler,-185041964 -builtin_hash,JumpIfTrueHandler,-843949956 -builtin_hash,JumpIfFalseHandler,-843949956 -builtin_hash,JumpIfNullHandler,-843949956 -builtin_hash,JumpIfNotNullHandler,1010847017 -builtin_hash,JumpIfUndefinedHandler,-843949956 -builtin_hash,JumpIfNotUndefinedHandler,1010847017 -builtin_hash,JumpIfUndefinedOrNullHandler,-517586474 -builtin_hash,JumpIfJSReceiverHandler,-9297690 -builtin_hash,SwitchOnSmiNoFeedbackHandler,-716989037 -builtin_hash,ForInEnumerateHandler,61176991 -builtin_hash,ForInPrepareHandler,748762980 -builtin_hash,ForInContinueHandler,-714020166 -builtin_hash,ForInNextHandler,1029822774 -builtin_hash,ForInStepHandler,776836572 -builtin_hash,SetPendingMessageHandler,-975756024 -builtin_hash,ThrowHandler,644164630 -builtin_hash,ReThrowHandler,644164630 -builtin_hash,ReturnHandler,258099816 -builtin_hash,ThrowReferenceErrorIfHoleHandler,-638014566 -builtin_hash,ThrowSuperNotCalledIfHoleHandler,-464286245 -builtin_hash,ThrowSuperAlreadyCalledIfNotHoleHandler,398738172 -builtin_hash,ThrowIfNotSuperConstructorHandler,405467231 -builtin_hash,SwitchOnGeneratorStateHandler,-1026917452 -builtin_hash,SuspendGeneratorHandler,-1054903459 -builtin_hash,ResumeGeneratorHandler,31675678 -builtin_hash,GetIteratorHandler,-711157277 -builtin_hash,ShortStarHandler,-568713113 -builtin_hash,LdarWideHandler,756831261 -builtin_hash,LdaSmiWideHandler,-782553530 -builtin_hash,LdaConstantWideHandler,-1037025209 -builtin_hash,LdaContextSlotWideHandler,1063635436 -builtin_hash,LdaImmutableContextSlotWideHandler,1063635436 -builtin_hash,LdaImmutableCurrentContextSlotWideHandler,847312619 -builtin_hash,StarWideHandler,40106091 -builtin_hash,MovWideHandler,-754864487 -builtin_hash,PushContextWideHandler,-948084621 -builtin_hash,PopContextWideHandler,245106357 -builtin_hash,TestReferenceEqualWideHandler,335478736 -builtin_hash,LdaGlobalWideHandler,-1010718236 -builtin_hash,LdaGlobalInsideTypeofWideHandler,-149141203 -builtin_hash,StaGlobalWideHandler,723033359 -builtin_hash,StaContextSlotWideHandler,731461442 -builtin_hash,StaCurrentContextSlotWideHandler,824213038 -builtin_hash,LdaLookupGlobalSlotWideHandler,-311844471 -builtin_hash,GetNamedPropertyWideHandler,-251017711 -builtin_hash,GetKeyedPropertyWideHandler,-256173219 -builtin_hash,SetNamedPropertyWideHandler,137651840 -builtin_hash,DefineNamedOwnPropertyWideHandler,137651840 -builtin_hash,SetKeyedPropertyWideHandler,-232580858 -builtin_hash,DefineKeyedOwnPropertyWideHandler,-232580858 -builtin_hash,StaInArrayLiteralWideHandler,-232580858 -builtin_hash,AddWideHandler,40447064 -builtin_hash,SubWideHandler,563692284 -builtin_hash,MulWideHandler,42508558 -builtin_hash,DivWideHandler,-844098316 -builtin_hash,BitwiseOrWideHandler,-328979788 -builtin_hash,BitwiseAndWideHandler,-469583662 -builtin_hash,ShiftLeftWideHandler,-954698376 -builtin_hash,AddSmiWideHandler,683295664 -builtin_hash,SubSmiWideHandler,-413733930 -builtin_hash,MulSmiWideHandler,-838301803 -builtin_hash,DivSmiWideHandler,527308360 -builtin_hash,ModSmiWideHandler,1034309589 -builtin_hash,BitwiseOrSmiWideHandler,492037335 -builtin_hash,BitwiseXorSmiWideHandler,-42936112 -builtin_hash,BitwiseAndSmiWideHandler,-477429885 -builtin_hash,ShiftLeftSmiWideHandler,672571321 -builtin_hash,ShiftRightSmiWideHandler,-67772744 -builtin_hash,ShiftRightLogicalSmiWideHandler,127757354 -builtin_hash,IncWideHandler,-922639629 -builtin_hash,DecWideHandler,-907897874 -builtin_hash,NegateWideHandler,-988497568 -builtin_hash,CallPropertyWideHandler,-698600283 -builtin_hash,CallProperty0WideHandler,1057782406 -builtin_hash,CallProperty1WideHandler,166979495 -builtin_hash,CallProperty2WideHandler,-58239499 -builtin_hash,CallUndefinedReceiverWideHandler,-121494034 -builtin_hash,CallUndefinedReceiver0WideHandler,-36365251 -builtin_hash,CallUndefinedReceiver1WideHandler,-1052313758 -builtin_hash,CallUndefinedReceiver2WideHandler,-469182582 -builtin_hash,CallWithSpreadWideHandler,-698600283 -builtin_hash,ConstructWideHandler,411540742 -builtin_hash,TestEqualWideHandler,-902995058 -builtin_hash,TestEqualStrictWideHandler,852452310 -builtin_hash,TestLessThanWideHandler,814869973 -builtin_hash,TestGreaterThanWideHandler,18887871 -builtin_hash,TestLessThanOrEqualWideHandler,34488528 -builtin_hash,TestGreaterThanOrEqualWideHandler,702527286 -builtin_hash,TestInstanceOfWideHandler,577442592 -builtin_hash,TestInWideHandler,469604978 -builtin_hash,ToNumericWideHandler,-933767737 -builtin_hash,CreateRegExpLiteralWideHandler,-286224018 -builtin_hash,CreateArrayLiteralWideHandler,925645732 -builtin_hash,CreateEmptyArrayLiteralWideHandler,556344123 -builtin_hash,CreateObjectLiteralWideHandler,-878578517 -builtin_hash,CreateClosureWideHandler,-102315205 -builtin_hash,CreateBlockContextWideHandler,392513921 -builtin_hash,CreateFunctionContextWideHandler,179338975 -builtin_hash,JumpLoopWideHandler,-949183832 -builtin_hash,JumpWideHandler,930291898 -builtin_hash,JumpIfToBooleanTrueWideHandler,-205748918 -builtin_hash,JumpIfToBooleanFalseWideHandler,1069905826 -builtin_hash,JumpIfTrueWideHandler,948634550 -builtin_hash,JumpIfFalseWideHandler,948634550 -builtin_hash,SwitchOnSmiNoFeedbackWideHandler,633155127 -builtin_hash,ForInPrepareWideHandler,-215451327 -builtin_hash,ForInNextWideHandler,372934797 -builtin_hash,ThrowReferenceErrorIfHoleWideHandler,298664482 -builtin_hash,GetIteratorWideHandler,43434708 -builtin_hash,LdaSmiExtraWideHandler,-892533764 -builtin_hash,LdaGlobalExtraWideHandler,661412585 -builtin_hash,AddSmiExtraWideHandler,-692969189 -builtin_hash,SubSmiExtraWideHandler,435863200 -builtin_hash,MulSmiExtraWideHandler,1072184980 -builtin_hash,DivSmiExtraWideHandler,-704989643 -builtin_hash,BitwiseOrSmiExtraWideHandler,-957712250 -builtin_hash,BitwiseXorSmiExtraWideHandler,44753591 -builtin_hash,BitwiseAndSmiExtraWideHandler,-563032786 -builtin_hash,CallUndefinedReceiverExtraWideHandler,-267336492 -builtin_hash,CallUndefinedReceiver1ExtraWideHandler,93448265 -builtin_hash,CallUndefinedReceiver2ExtraWideHandler,11673012 +builtin_hash,RecordWriteSaveFP,-613048523 +builtin_hash,RecordWriteIgnoreFP,-613048523 +builtin_hash,EphemeronKeyBarrierSaveFP,-874028499 +builtin_hash,AdaptorWithBuiltinExitFrame,-50443338 +builtin_hash,Call_ReceiverIsNullOrUndefined_Baseline_Compact,277963652 +builtin_hash,Call_ReceiverIsNotNullOrUndefined_Baseline_Compact,739975018 +builtin_hash,Call_ReceiverIsAny_Baseline_Compact,739975018 +builtin_hash,CallProxy,265720458 +builtin_hash,CallWithSpread,690518666 +builtin_hash,CallWithSpread_Baseline,14944167 +builtin_hash,CallWithArrayLike,-463192950 +builtin_hash,ConstructWithSpread,1026222363 +builtin_hash,ConstructWithSpread_Baseline,-954447059 +builtin_hash,Construct_Baseline,242132798 +builtin_hash,FastNewObject,812115752 +builtin_hash,FastNewClosure,-22842529 +builtin_hash,StringEqual,552928703 +builtin_hash,StringGreaterThan,814990350 +builtin_hash,StringGreaterThanOrEqual,-931415038 +builtin_hash,StringLessThan,-931415038 +builtin_hash,StringLessThanOrEqual,814990350 +builtin_hash,StringSubstring,679034293 +builtin_hash,OrderedHashTableHealIndex,-480837431 +builtin_hash,CompileLazy,-913572652 +builtin_hash,CompileLazyDeoptimizedCode,797435305 +builtin_hash,InstantiateAsmJs,-824208537 +builtin_hash,AllocateInYoungGeneration,-589367571 +builtin_hash,AllocateRegularInYoungGeneration,549206964 +builtin_hash,AllocateRegularInOldGeneration,549206964 +builtin_hash,CopyFastSmiOrObjectElements,-664166620 +builtin_hash,GrowFastDoubleElements,-794207344 +builtin_hash,GrowFastSmiOrObjectElements,-727031326 +builtin_hash,ToNumber,87194511 +builtin_hash,ToNumber_Baseline,-245107362 +builtin_hash,ToNumeric_Baseline,765738096 +builtin_hash,ToNumberConvertBigInt,-809735249 +builtin_hash,Typeof,554300746 +builtin_hash,KeyedLoadIC_PolymorphicName,808866465 +builtin_hash,KeyedStoreIC_Megamorphic,355428822 +builtin_hash,DefineKeyedOwnIC_Megamorphic,-254774567 +builtin_hash,LoadGlobalIC_NoFeedback,567497889 +builtin_hash,LoadIC_FunctionPrototype,440547932 +builtin_hash,LoadIC_StringLength,631981109 +builtin_hash,LoadIC_StringWrapperLength,957410129 +builtin_hash,LoadIC_NoFeedback,-673925088 +builtin_hash,StoreIC_NoFeedback,599149807 +builtin_hash,DefineNamedOwnIC_NoFeedback,-684443605 +builtin_hash,KeyedLoadIC_SloppyArguments,732273933 +builtin_hash,StoreFastElementIC_Standard,-310030150 +builtin_hash,StoreFastElementIC_GrowNoTransitionHandleCOW,-894353505 +builtin_hash,StoreFastElementIC_NoTransitionHandleCOW,-684092303 +builtin_hash,ElementsTransitionAndStore_Standard,-313637466 +builtin_hash,ElementsTransitionAndStore_GrowNoTransitionHandleCOW,887654385 +builtin_hash,ElementsTransitionAndStore_NoTransitionHandleCOW,-730942180 +builtin_hash,KeyedHasIC_PolymorphicName,-900991969 +builtin_hash,EnqueueMicrotask,-201594324 +builtin_hash,RunMicrotasks,226014440 +builtin_hash,HasProperty,-179991880 +builtin_hash,DeleteProperty,-417791504 +builtin_hash,SetDataProperties,-676389083 +builtin_hash,ReturnReceiver,-253986889 +builtin_hash,ArrayConstructor,-132723945 +builtin_hash,ArrayConstructorImpl,-940010648 +builtin_hash,ArrayNoArgumentConstructor_PackedSmi_DontOverride,-419508170 +builtin_hash,ArrayNoArgumentConstructor_HoleySmi_DontOverride,-419508170 +builtin_hash,ArrayNoArgumentConstructor_PackedSmi_DisableAllocationSites,605372040 +builtin_hash,ArrayNoArgumentConstructor_Packed_DisableAllocationSites,605372040 +builtin_hash,ArrayNoArgumentConstructor_Holey_DisableAllocationSites,605372040 +builtin_hash,ArrayNoArgumentConstructor_PackedDouble_DisableAllocationSites,-118459699 +builtin_hash,ArraySingleArgumentConstructor_HoleySmi_DontOverride,-533026482 +builtin_hash,ArraySingleArgumentConstructor_HoleySmi_DisableAllocationSites,276667194 +builtin_hash,ArraySingleArgumentConstructor_Holey_DisableAllocationSites,276667194 +builtin_hash,ArraySingleArgumentConstructor_HoleyDouble_DisableAllocationSites,-533601049 +builtin_hash,ArrayIncludesSmi,-836179006 +builtin_hash,ArrayIncludesSmiOrObject,162670788 +builtin_hash,ArrayIncludes,508610041 +builtin_hash,ArrayIndexOfSmi,-144958716 +builtin_hash,ArrayIndexOfSmiOrObject,-560665373 +builtin_hash,ArrayIndexOf,659016893 +builtin_hash,ArrayPrototypePop,-672159034 +builtin_hash,ArrayPrototypePush,-828516926 +builtin_hash,CloneFastJSArray,330962956 +builtin_hash,CloneFastJSArrayFillingHoles,-114093580 +builtin_hash,ExtractFastJSArray,-899029625 +builtin_hash,ArrayPrototypeEntries,-846534049 +builtin_hash,ArrayPrototypeKeys,-432117890 +builtin_hash,ArrayPrototypeValues,-846534049 +builtin_hash,ArrayIteratorPrototypeNext,167355436 +builtin_hash,AsyncFunctionEnter,-860415031 +builtin_hash,AsyncFunctionResolve,910332485 +builtin_hash,AsyncFunctionAwaitCaught,-619125883 +builtin_hash,AsyncFunctionAwaitUncaught,-619125883 +builtin_hash,AsyncFunctionAwaitResolveClosure,-441313583 +builtin_hash,DatePrototypeGetDate,596885245 +builtin_hash,DatePrototypeGetDay,596885245 +builtin_hash,DatePrototypeGetFullYear,596885245 +builtin_hash,DatePrototypeGetHours,596885245 +builtin_hash,DatePrototypeGetMilliseconds,-147735130 +builtin_hash,DatePrototypeGetMinutes,596885245 +builtin_hash,DatePrototypeGetMonth,596885245 +builtin_hash,DatePrototypeGetSeconds,596885245 +builtin_hash,DatePrototypeGetTime,842589209 +builtin_hash,DatePrototypeGetTimezoneOffset,-147735130 +builtin_hash,DatePrototypeValueOf,842589209 +builtin_hash,DatePrototypeToPrimitive,-469261030 +builtin_hash,CreateIterResultObject,-236239497 +builtin_hash,CreateGeneratorObject,-989601020 +builtin_hash,GeneratorPrototypeNext,-532167070 +builtin_hash,GeneratorPrototypeReturn,204056688 +builtin_hash,SuspendGeneratorBaseline,-716242694 +builtin_hash,ResumeGeneratorBaseline,600643426 +builtin_hash,GlobalIsFinite,-28742852 +builtin_hash,GlobalIsNaN,-414427038 +builtin_hash,LoadIC,-1028921753 +builtin_hash,LoadIC_Megamorphic,604208967 +builtin_hash,LoadIC_Noninlined,-411987614 +builtin_hash,LoadICTrampoline,800274028 +builtin_hash,LoadICBaseline,470944725 +builtin_hash,LoadICTrampoline_Megamorphic,800274028 +builtin_hash,LoadSuperIC,-145652312 +builtin_hash,LoadSuperICBaseline,-463763660 +builtin_hash,KeyedLoadIC,-400473566 +builtin_hash,KeyedLoadIC_Megamorphic,41817838 +builtin_hash,KeyedLoadICTrampoline,800274028 +builtin_hash,KeyedLoadICBaseline,470944725 +builtin_hash,KeyedLoadICTrampoline_Megamorphic,800274028 +builtin_hash,StoreGlobalIC,-985598929 +builtin_hash,StoreGlobalICTrampoline,800274028 +builtin_hash,StoreGlobalICBaseline,470944725 +builtin_hash,StoreIC,107868822 +builtin_hash,StoreICTrampoline,515324941 +builtin_hash,StoreICBaseline,-463763660 +builtin_hash,DefineNamedOwnIC,293425336 +builtin_hash,DefineNamedOwnICBaseline,-463763660 +builtin_hash,KeyedStoreIC,-634858106 +builtin_hash,KeyedStoreICTrampoline,515324941 +builtin_hash,KeyedStoreICBaseline,-463763660 +builtin_hash,DefineKeyedOwnIC,-567510982 +builtin_hash,StoreInArrayLiteralIC,336733574 +builtin_hash,StoreInArrayLiteralICBaseline,-463763660 +builtin_hash,LoadGlobalIC,-994002095 +builtin_hash,LoadGlobalICInsideTypeof,131610143 +builtin_hash,LoadGlobalICTrampoline,-356577892 +builtin_hash,LoadGlobalICBaseline,-87390287 +builtin_hash,LoadGlobalICInsideTypeofTrampoline,-356577892 +builtin_hash,LoadGlobalICInsideTypeofBaseline,-87390287 +builtin_hash,LookupGlobalICBaseline,195819709 +builtin_hash,LookupGlobalICInsideTypeofBaseline,195819709 +builtin_hash,KeyedHasIC,-581893205 +builtin_hash,KeyedHasICBaseline,470944725 +builtin_hash,KeyedHasIC_Megamorphic,-179991880 +builtin_hash,IterableToList,-847583682 +builtin_hash,IterableToListWithSymbolLookup,639766325 +builtin_hash,IterableToListMayPreserveHoles,915672519 +builtin_hash,FindOrderedHashMapEntry,257985360 +builtin_hash,MapConstructor,173900465 +builtin_hash,MapPrototypeSet,-909373880 +builtin_hash,MapPrototypeDelete,-182536468 +builtin_hash,MapPrototypeGet,-10028336 +builtin_hash,MapPrototypeHas,-139761843 +builtin_hash,MapPrototypeEntries,-344495525 +builtin_hash,MapPrototypeGetSize,1002199563 +builtin_hash,MapPrototypeForEach,666422496 +builtin_hash,MapPrototypeKeys,-344495525 +builtin_hash,MapPrototypeValues,-344495525 +builtin_hash,MapIteratorPrototypeNext,824163271 +builtin_hash,MapIteratorToList,-171739571 +builtin_hash,SameValueNumbersOnly,-385008716 +builtin_hash,Add_Baseline,-279802821 +builtin_hash,AddSmi_Baseline,-180294218 +builtin_hash,Subtract_Baseline,422911741 +builtin_hash,SubtractSmi_Baseline,593938918 +builtin_hash,Multiply_Baseline,-390820476 +builtin_hash,MultiplySmi_Baseline,325873812 +builtin_hash,Divide_Baseline,-303206156 +builtin_hash,DivideSmi_Baseline,-760734875 +builtin_hash,Modulus_Baseline,-56419644 +builtin_hash,ModulusSmi_Baseline,-723448 +builtin_hash,Exponentiate_Baseline,-897267514 +builtin_hash,BitwiseAnd_Baseline,368212144 +builtin_hash,BitwiseAndSmi_Baseline,-1040430105 +builtin_hash,BitwiseOr_Baseline,-468458668 +builtin_hash,BitwiseOrSmi_Baseline,688726246 +builtin_hash,BitwiseXor_Baseline,-113074811 +builtin_hash,BitwiseXorSmi_Baseline,601401020 +builtin_hash,ShiftLeft_Baseline,-775732772 +builtin_hash,ShiftLeftSmi_Baseline,-78665210 +builtin_hash,ShiftRight_Baseline,748634885 +builtin_hash,ShiftRightSmi_Baseline,886941283 +builtin_hash,ShiftRightLogical_Baseline,561208446 +builtin_hash,ShiftRightLogicalSmi_Baseline,-31850172 +builtin_hash,Add_WithFeedback,-713508648 +builtin_hash,Subtract_WithFeedback,-1006518356 +builtin_hash,Modulus_WithFeedback,673708690 +builtin_hash,BitwiseOr_WithFeedback,-71811840 +builtin_hash,Equal_Baseline,-449571287 +builtin_hash,StrictEqual_Baseline,-311709296 +builtin_hash,LessThan_Baseline,-1041710075 +builtin_hash,GreaterThan_Baseline,763769306 +builtin_hash,LessThanOrEqual_Baseline,-289600196 +builtin_hash,GreaterThanOrEqual_Baseline,-964000144 +builtin_hash,Equal_WithFeedback,-804822195 +builtin_hash,StrictEqual_WithFeedback,316409561 +builtin_hash,LessThan_WithFeedback,-1041748847 +builtin_hash,GreaterThan_WithFeedback,208079969 +builtin_hash,GreaterThanOrEqual_WithFeedback,50039232 +builtin_hash,BitwiseNot_Baseline,574212378 +builtin_hash,Decrement_Baseline,740961552 +builtin_hash,Increment_Baseline,-482954167 +builtin_hash,Negate_Baseline,257429052 +builtin_hash,ObjectAssign,415745977 +builtin_hash,ObjectCreate,152352347 +builtin_hash,ObjectEntries,-267361188 +builtin_hash,ObjectGetOwnPropertyDescriptor,-1005546404 +builtin_hash,ObjectGetOwnPropertyNames,-10249982 +builtin_hash,ObjectIs,947042700 +builtin_hash,ObjectKeys,276395735 +builtin_hash,ObjectPrototypeHasOwnProperty,-366540189 +builtin_hash,ObjectToString,-680252272 +builtin_hash,InstanceOf_WithFeedback,-814385450 +builtin_hash,InstanceOf_Baseline,-567095434 +builtin_hash,ForInEnumerate,329908035 +builtin_hash,ForInPrepare,731557174 +builtin_hash,ForInFilter,884185984 +builtin_hash,RegExpConstructor,-1029370119 +builtin_hash,RegExpExecAtom,181372809 +builtin_hash,RegExpExecInternal,317900879 +builtin_hash,FindOrderedHashSetEntry,482436035 +builtin_hash,SetConstructor,692235107 +builtin_hash,SetPrototypeHas,-139761843 +builtin_hash,SetPrototypeAdd,-596680080 +builtin_hash,SetPrototypeDelete,331633635 +builtin_hash,SetPrototypeEntries,-344495525 +builtin_hash,SetPrototypeGetSize,1002199563 +builtin_hash,SetPrototypeForEach,97244170 +builtin_hash,SetPrototypeValues,-344495525 +builtin_hash,SetIteratorPrototypeNext,-441725951 +builtin_hash,SetOrSetIteratorToList,623342942 +builtin_hash,StringFromCharCode,-123751380 +builtin_hash,StringPrototypeReplace,-921072145 +builtin_hash,StringPrototypeSplit,415613472 +builtin_hash,TypedArrayConstructor,32466415 +builtin_hash,TypedArrayPrototypeByteLength,864895308 +builtin_hash,TypedArrayPrototypeLength,539604699 +builtin_hash,WeakMapConstructor,814764494 +builtin_hash,WeakMapLookupHashIndex,-464287185 +builtin_hash,WeakMapGet,925651553 +builtin_hash,WeakMapPrototypeHas,947465532 +builtin_hash,WeakMapPrototypeSet,-976760951 +builtin_hash,WeakSetConstructor,694246453 +builtin_hash,WeakSetPrototypeHas,947465532 +builtin_hash,WeakSetPrototypeAdd,-160318733 +builtin_hash,WeakCollectionSet,578996244 +builtin_hash,AsyncGeneratorResolve,-83028412 +builtin_hash,AsyncGeneratorYieldWithAwait,-366463177 +builtin_hash,AsyncGeneratorResumeNext,220127321 +builtin_hash,AsyncGeneratorPrototypeNext,1069549757 +builtin_hash,AsyncGeneratorAwaitUncaught,-628599896 +builtin_hash,AsyncGeneratorAwaitResolveClosure,1062097477 +builtin_hash,AsyncGeneratorYieldWithAwaitResolveClosure,793122606 +builtin_hash,StringAdd_CheckNone,113370168 +builtin_hash,SubString,895503589 +builtin_hash,GetProperty,1052862169 +builtin_hash,GetPropertyWithReceiver,1045827042 +builtin_hash,SetProperty,908643608 +builtin_hash,CreateDataProperty,-314133834 +builtin_hash,ArrayPrototypeConcat,-557766770 +builtin_hash,ArrayEvery,-740699383 +builtin_hash,ArrayFilterLoopLazyDeoptContinuation,-463893516 +builtin_hash,ArrayFilterLoopContinuation,-636224543 +builtin_hash,ArrayFilter,-1006837550 +builtin_hash,ArrayPrototypeFind,358067331 +builtin_hash,ArrayForEachLoopLazyDeoptContinuation,-227856192 +builtin_hash,ArrayForEachLoopContinuation,498815593 +builtin_hash,ArrayForEach,-465472618 +builtin_hash,ArrayFrom,559791774 +builtin_hash,ArrayIsArray,556045869 +builtin_hash,LoadJoinElement_FastSmiOrObjectElements_0,4464260 +builtin_hash,LoadJoinElement_FastDoubleElements_0,-669389930 +builtin_hash,JoinStackPush,932509525 +builtin_hash,JoinStackPop,97051696 +builtin_hash,ArrayPrototypeJoin,638420418 +builtin_hash,ArrayPrototypeToString,571363693 +builtin_hash,ArrayPrototypeLastIndexOf,-262998450 +builtin_hash,ArrayMapLoopLazyDeoptContinuation,992596139 +builtin_hash,ArrayMapLoopContinuation,852679435 +builtin_hash,ArrayMap,237015696 +builtin_hash,ArrayReduceLoopLazyDeoptContinuation,-1021360101 +builtin_hash,ArrayReduceLoopContinuation,736239909 +builtin_hash,ArrayReduce,550306639 +builtin_hash,ArrayPrototypeReverse,-848939503 +builtin_hash,ArrayPrototypeShift,510698980 +builtin_hash,ArrayPrototypeSlice,-226926113 +builtin_hash,ArraySome,616986483 +builtin_hash,ArrayPrototypeSplice,318122997 +builtin_hash,ArrayPrototypeUnshift,942293281 +builtin_hash,ArrayBufferPrototypeGetByteLength,8155127 +builtin_hash,ArrayBufferIsView,-92420774 +builtin_hash,ToInteger,114221870 +builtin_hash,FastCreateDataProperty,683077437 +builtin_hash,BooleanConstructor,104847507 +builtin_hash,BooleanPrototypeToString,496844333 +builtin_hash,ToString,-492204321 +builtin_hash,StringPrototypeToString,232130928 +builtin_hash,StringPrototypeValueOf,232130928 +builtin_hash,StringPrototypeCharAt,-493882295 +builtin_hash,StringPrototypeCharCodeAt,-70476469 +builtin_hash,StringPrototypeCodePointAt,958343749 +builtin_hash,StringPrototypeConcat,122908250 +builtin_hash,StringConstructor,36941296 +builtin_hash,StringAddConvertLeft,895631940 +builtin_hash,StringAddConvertRight,620894196 +builtin_hash,StringCharAt,-771156702 +builtin_hash,FastNewClosureBaseline,-345301780 +builtin_hash,FastNewFunctionContextFunction,393493853 +builtin_hash,CreateRegExpLiteral,1052274841 +builtin_hash,CreateShallowArrayLiteral,758569216 +builtin_hash,CreateEmptyArrayLiteral,-244361805 +builtin_hash,CreateShallowObjectLiteral,429596211 +builtin_hash,ObjectConstructor,792071103 +builtin_hash,CreateEmptyLiteralObject,792021411 +builtin_hash,NumberConstructor,-545912408 +builtin_hash,StringToNumber,-567475001 +builtin_hash,NonNumberToNumber,-75339598 +builtin_hash,NonNumberToNumeric,-163611573 +builtin_hash,ToNumeric,1067114169 +builtin_hash,NumberToString,808056721 +builtin_hash,ToBoolean,474893826 +builtin_hash,ToBooleanForBaselineJump,-1000387172 +builtin_hash,ToLength,-752062439 +builtin_hash,ToName,-893589751 +builtin_hash,ToObject,-995611522 +builtin_hash,NonPrimitiveToPrimitive_Default,-741936834 +builtin_hash,NonPrimitiveToPrimitive_Number,-741936834 +builtin_hash,NonPrimitiveToPrimitive_String,-741936834 +builtin_hash,OrdinaryToPrimitive_Number,940682530 +builtin_hash,OrdinaryToPrimitive_String,940682530 +builtin_hash,DataViewPrototypeGetByteLength,-344862281 +builtin_hash,DataViewPrototypeGetFloat64,-710736378 +builtin_hash,DataViewPrototypeSetUint32,561326289 +builtin_hash,DataViewPrototypeSetFloat64,224815643 +builtin_hash,FunctionPrototypeHasInstance,-159239165 +builtin_hash,FastFunctionPrototypeBind,-835190429 +builtin_hash,ForInNext,-628108871 +builtin_hash,GetIteratorWithFeedback,412632852 +builtin_hash,GetIteratorBaseline,878549031 +builtin_hash,CallIteratorWithFeedback,-173921836 +builtin_hash,MathAbs,-418374171 +builtin_hash,MathCeil,466763348 +builtin_hash,MathFloor,471221640 +builtin_hash,MathRound,-989099866 +builtin_hash,MathPow,510691647 +builtin_hash,MathMax,45115699 +builtin_hash,MathMin,-996382942 +builtin_hash,MathAsin,261451622 +builtin_hash,MathAtan2,605332815 +builtin_hash,MathCos,515079504 +builtin_hash,MathExp,551351922 +builtin_hash,MathFround,564706237 +builtin_hash,MathImul,685265173 +builtin_hash,MathLog,-553256829 +builtin_hash,MathSin,302395292 +builtin_hash,MathSign,611819739 +builtin_hash,MathSqrt,55107225 +builtin_hash,MathTan,-332405887 +builtin_hash,MathTanh,939045985 +builtin_hash,MathRandom,-504157126 +builtin_hash,NumberPrototypeToString,145247584 +builtin_hash,NumberIsInteger,910409330 +builtin_hash,NumberIsNaN,343619286 +builtin_hash,NumberParseFloat,-745268146 +builtin_hash,ParseInt,423449565 +builtin_hash,NumberParseInt,348325306 +builtin_hash,Add,-712082634 +builtin_hash,Subtract,860006498 +builtin_hash,Multiply,966938552 +builtin_hash,Divide,501339465 +builtin_hash,Modulus,556264773 +builtin_hash,CreateObjectWithoutProperties,911390056 +builtin_hash,ObjectIsExtensible,-376770424 +builtin_hash,ObjectPreventExtensions,-675757061 +builtin_hash,ObjectGetPrototypeOf,-694816240 +builtin_hash,ObjectSetPrototypeOf,-335823538 +builtin_hash,ObjectPrototypeToString,158685312 +builtin_hash,ObjectPrototypeValueOf,-993024104 +builtin_hash,FulfillPromise,-68874675 +builtin_hash,NewPromiseCapability,-880232666 +builtin_hash,PromiseCapabilityDefaultResolve,694927325 +builtin_hash,PerformPromiseThen,-238303189 +builtin_hash,PromiseAll,-121414633 +builtin_hash,PromiseAllResolveElementClosure,797273436 +builtin_hash,PromiseConstructor,-424149894 +builtin_hash,PromisePrototypeCatch,235262026 +builtin_hash,PromiseFulfillReactionJob,927825363 +builtin_hash,PromiseResolveTrampoline,-549629094 +builtin_hash,PromiseResolve,-366429795 +builtin_hash,ResolvePromise,526061379 +builtin_hash,PromisePrototypeThen,959282415 +builtin_hash,PromiseResolveThenableJob,-977786068 +builtin_hash,ProxyConstructor,-54504231 +builtin_hash,ProxyGetProperty,-692505715 +builtin_hash,ProxyIsExtensible,-120987472 +builtin_hash,ProxyPreventExtensions,739592105 +builtin_hash,ReflectGet,1006327680 +builtin_hash,ReflectHas,-549629094 +builtin_hash,RegExpPrototypeExec,866694176 +builtin_hash,RegExpMatchFast,556779044 +builtin_hash,RegExpReplace,1037671691 +builtin_hash,RegExpPrototypeReplace,-488505709 +builtin_hash,RegExpSearchFast,744647901 +builtin_hash,RegExpPrototypeSourceGetter,-69902772 +builtin_hash,RegExpSplit,418335022 +builtin_hash,RegExpPrototypeTest,-893509849 +builtin_hash,RegExpPrototypeTestFast,-541676085 +builtin_hash,RegExpPrototypeGlobalGetter,612394650 +builtin_hash,RegExpPrototypeIgnoreCaseGetter,-595775382 +builtin_hash,RegExpPrototypeMultilineGetter,368200363 +builtin_hash,RegExpPrototypeHasIndicesGetter,99570183 +builtin_hash,RegExpPrototypeDotAllGetter,99570183 +builtin_hash,RegExpPrototypeStickyGetter,471291660 +builtin_hash,RegExpPrototypeUnicodeGetter,471291660 +builtin_hash,RegExpPrototypeFlagsGetter,-493351549 +builtin_hash,StringPrototypeEndsWith,-140669855 +builtin_hash,StringPrototypeIncludes,-538712449 +builtin_hash,StringPrototypeIndexOf,-279080867 +builtin_hash,StringPrototypeIterator,-906814404 +builtin_hash,StringIteratorPrototypeNext,-459023719 +builtin_hash,StringPrototypeMatch,950777323 +builtin_hash,StringPrototypeSearch,950777323 +builtin_hash,StringRepeat,333496990 +builtin_hash,StringPrototypeSlice,147923310 +builtin_hash,StringPrototypeStartsWith,-916453690 +builtin_hash,StringPrototypeSubstr,93046303 +builtin_hash,StringPrototypeSubstring,-486167723 +builtin_hash,StringPrototypeTrim,-537839064 +builtin_hash,SymbolPrototypeToString,-331094885 +builtin_hash,CreateTypedArray,946007034 +builtin_hash,TypedArrayFrom,-383816322 +builtin_hash,TypedArrayPrototypeSet,183639399 +builtin_hash,TypedArrayPrototypeSubArray,-654743264 +builtin_hash,NewSloppyArgumentsElements,-733955106 +builtin_hash,NewStrictArgumentsElements,27861461 +builtin_hash,NewRestArgumentsElements,-158196826 +builtin_hash,FastNewSloppyArguments,701807193 +builtin_hash,FastNewStrictArguments,-400637158 +builtin_hash,FastNewRestArguments,771398605 +builtin_hash,StringSlowFlatten,758688335 +builtin_hash,StringIndexOf,893861646 +builtin_hash,Load_FastSmiElements_0,41377987 +builtin_hash,Load_FastObjectElements_0,41377987 +builtin_hash,Store_FastSmiElements_0,987491586 +builtin_hash,Store_FastObjectElements_0,-907039137 +builtin_hash,SortCompareDefault,-712046902 +builtin_hash,SortCompareUserFn,-498446944 +builtin_hash,Copy,1005972100 +builtin_hash,MergeAt,-238184884 +builtin_hash,GallopLeft,-228579918 +builtin_hash,GallopRight,508662767 +builtin_hash,ArrayTimSort,-584574007 +builtin_hash,ArrayPrototypeSort,-446345392 +builtin_hash,StringFastLocaleCompare,-805723901 +builtin_hash,WasmInt32ToHeapNumber,186218317 +builtin_hash,WasmTaggedNonSmiToInt32,644195797 +builtin_hash,WasmTriggerTierUp,-448026998 +builtin_hash,WasmStackGuard,929375954 +builtin_hash,CanUseSameAccessor_FastSmiElements_0,333215288 +builtin_hash,CanUseSameAccessor_FastObjectElements_0,333215288 +builtin_hash,StringPrototypeToLowerCaseIntl,325118773 +builtin_hash,StringToLowerCaseIntl,729618594 +builtin_hash,WideHandler,-985531040 +builtin_hash,ExtraWideHandler,-985531040 +builtin_hash,LdarHandler,1066069071 +builtin_hash,LdaZeroHandler,697098880 +builtin_hash,LdaSmiHandler,-92763154 +builtin_hash,LdaUndefinedHandler,94159659 +builtin_hash,LdaNullHandler,94159659 +builtin_hash,LdaTheHoleHandler,94159659 +builtin_hash,LdaTrueHandler,66190034 +builtin_hash,LdaFalseHandler,66190034 +builtin_hash,LdaConstantHandler,-234672240 +builtin_hash,LdaContextSlotHandler,999512170 +builtin_hash,LdaImmutableContextSlotHandler,999512170 +builtin_hash,LdaCurrentContextSlotHandler,-705029165 +builtin_hash,LdaImmutableCurrentContextSlotHandler,-705029165 +builtin_hash,StarHandler,-825981541 +builtin_hash,MovHandler,-222623368 +builtin_hash,PushContextHandler,239039195 +builtin_hash,PopContextHandler,663403390 +builtin_hash,TestReferenceEqualHandler,107959616 +builtin_hash,TestUndetectableHandler,768306054 +builtin_hash,TestNullHandler,317848228 +builtin_hash,TestUndefinedHandler,317848228 +builtin_hash,TestTypeOfHandler,-585531608 +builtin_hash,LdaGlobalHandler,680542536 +builtin_hash,LdaGlobalInsideTypeofHandler,-812384965 +builtin_hash,StaGlobalHandler,-849976646 +builtin_hash,StaContextSlotHandler,-642236485 +builtin_hash,StaCurrentContextSlotHandler,515612512 +builtin_hash,LdaLookupGlobalSlotHandler,328181263 +builtin_hash,LdaLookupGlobalSlotInsideTypeofHandler,-152487163 +builtin_hash,StaLookupSlotHandler,1043986971 +builtin_hash,GetNamedPropertyHandler,-918198086 +builtin_hash,GetNamedPropertyFromSuperHandler,-605958764 +builtin_hash,GetKeyedPropertyHandler,-368783501 +builtin_hash,SetNamedPropertyHandler,512867069 +builtin_hash,DefineNamedOwnPropertyHandler,512867069 +builtin_hash,SetKeyedPropertyHandler,-529790650 +builtin_hash,DefineKeyedOwnPropertyHandler,-529790650 +builtin_hash,StaInArrayLiteralHandler,-529790650 +builtin_hash,DefineKeyedOwnPropertyInLiteralHandler,519916231 +builtin_hash,AddHandler,-1014428769 +builtin_hash,SubHandler,-971645828 +builtin_hash,MulHandler,-1072793455 +builtin_hash,DivHandler,-162323805 +builtin_hash,ModHandler,-485795098 +builtin_hash,ExpHandler,795159955 +builtin_hash,BitwiseOrHandler,-974394049 +builtin_hash,BitwiseXorHandler,580834482 +builtin_hash,BitwiseAndHandler,614318128 +builtin_hash,ShiftLeftHandler,-423182377 +builtin_hash,ShiftRightHandler,-255081510 +builtin_hash,ShiftRightLogicalHandler,735938776 +builtin_hash,AddSmiHandler,107839307 +builtin_hash,SubSmiHandler,-363881533 +builtin_hash,MulSmiHandler,169761579 +builtin_hash,DivSmiHandler,-681265328 +builtin_hash,ModSmiHandler,861935655 +builtin_hash,BitwiseOrSmiHandler,-680303745 +builtin_hash,BitwiseXorSmiHandler,576458108 +builtin_hash,BitwiseAndSmiHandler,-994511503 +builtin_hash,ShiftLeftSmiHandler,-728693655 +builtin_hash,ShiftRightSmiHandler,975905832 +builtin_hash,ShiftRightLogicalSmiHandler,686146238 +builtin_hash,IncHandler,117772531 +builtin_hash,DecHandler,-691015839 +builtin_hash,NegateHandler,212889736 +builtin_hash,BitwiseNotHandler,-960473652 +builtin_hash,ToBooleanLogicalNotHandler,-997041363 +builtin_hash,LogicalNotHandler,-404436240 +builtin_hash,TypeOfHandler,-868029172 +builtin_hash,DeletePropertyStrictHandler,-310645655 +builtin_hash,DeletePropertySloppyHandler,-884621901 +builtin_hash,GetSuperConstructorHandler,-336144805 +builtin_hash,CallAnyReceiverHandler,-483788286 +builtin_hash,CallPropertyHandler,-483788286 +builtin_hash,CallProperty0Handler,234175094 +builtin_hash,CallProperty1Handler,354307341 +builtin_hash,CallProperty2Handler,968021051 +builtin_hash,CallUndefinedReceiverHandler,472718464 +builtin_hash,CallUndefinedReceiver0Handler,1020191467 +builtin_hash,CallUndefinedReceiver1Handler,785762305 +builtin_hash,CallUndefinedReceiver2Handler,-921863582 +builtin_hash,CallWithSpreadHandler,-483788286 +builtin_hash,CallRuntimeHandler,575543766 +builtin_hash,CallJSRuntimeHandler,-279970155 +builtin_hash,InvokeIntrinsicHandler,315814934 +builtin_hash,ConstructHandler,750653559 +builtin_hash,ConstructWithSpreadHandler,-950529667 +builtin_hash,TestEqualHandler,469957169 +builtin_hash,TestEqualStrictHandler,774972588 +builtin_hash,TestLessThanHandler,876731233 +builtin_hash,TestGreaterThanHandler,854370589 +builtin_hash,TestLessThanOrEqualHandler,-616820445 +builtin_hash,TestGreaterThanOrEqualHandler,128578007 +builtin_hash,TestInstanceOfHandler,437146777 +builtin_hash,TestInHandler,-595986293 +builtin_hash,ToNameHandler,-388837341 +builtin_hash,ToNumberHandler,172727215 +builtin_hash,ToNumericHandler,518340123 +builtin_hash,ToObjectHandler,-388837341 +builtin_hash,ToStringHandler,-736791596 +builtin_hash,CreateRegExpLiteralHandler,-387261303 +builtin_hash,CreateArrayLiteralHandler,544722821 +builtin_hash,CreateArrayFromIterableHandler,-590862374 +builtin_hash,CreateEmptyArrayLiteralHandler,-215104396 +builtin_hash,CreateObjectLiteralHandler,536615992 +builtin_hash,CreateEmptyObjectLiteralHandler,810635729 +builtin_hash,CreateClosureHandler,-899658211 +builtin_hash,CreateBlockContextHandler,-125556632 +builtin_hash,CreateCatchContextHandler,551209828 +builtin_hash,CreateFunctionContextHandler,-65684761 +builtin_hash,CreateMappedArgumentsHandler,67709625 +builtin_hash,CreateUnmappedArgumentsHandler,608258279 +builtin_hash,CreateRestParameterHandler,1042430952 +builtin_hash,JumpLoopHandler,77742379 +builtin_hash,JumpHandler,-420188660 +builtin_hash,JumpConstantHandler,-998825364 +builtin_hash,JumpIfUndefinedConstantHandler,-326209739 +builtin_hash,JumpIfNotUndefinedConstantHandler,37208057 +builtin_hash,JumpIfUndefinedOrNullConstantHandler,-104381115 +builtin_hash,JumpIfTrueConstantHandler,-326209739 +builtin_hash,JumpIfFalseConstantHandler,-326209739 +builtin_hash,JumpIfToBooleanTrueConstantHandler,-234142841 +builtin_hash,JumpIfToBooleanFalseConstantHandler,-602774868 +builtin_hash,JumpIfToBooleanTrueHandler,-297635325 +builtin_hash,JumpIfToBooleanFalseHandler,1015367976 +builtin_hash,JumpIfTrueHandler,862147447 +builtin_hash,JumpIfFalseHandler,862147447 +builtin_hash,JumpIfNullHandler,862147447 +builtin_hash,JumpIfNotNullHandler,-481058680 +builtin_hash,JumpIfUndefinedHandler,862147447 +builtin_hash,JumpIfNotUndefinedHandler,-481058680 +builtin_hash,JumpIfUndefinedOrNullHandler,14126870 +builtin_hash,JumpIfJSReceiverHandler,-723850389 +builtin_hash,SwitchOnSmiNoFeedbackHandler,-902670490 +builtin_hash,ForInEnumerateHandler,-322331924 +builtin_hash,ForInPrepareHandler,20034175 +builtin_hash,ForInContinueHandler,827732360 +builtin_hash,ForInNextHandler,119110335 +builtin_hash,ForInStepHandler,757646701 +builtin_hash,SetPendingMessageHandler,996401409 +builtin_hash,ThrowHandler,122680912 +builtin_hash,ReThrowHandler,122680912 +builtin_hash,ReturnHandler,47039723 +builtin_hash,ThrowReferenceErrorIfHoleHandler,-342650955 +builtin_hash,ThrowSuperNotCalledIfHoleHandler,-285583864 +builtin_hash,ThrowSuperAlreadyCalledIfNotHoleHandler,-827541184 +builtin_hash,ThrowIfNotSuperConstructorHandler,1018623070 +builtin_hash,SwitchOnGeneratorStateHandler,717471818 +builtin_hash,SuspendGeneratorHandler,547514791 +builtin_hash,ResumeGeneratorHandler,-860485588 +builtin_hash,GetIteratorHandler,-6630463 +builtin_hash,ShortStarHandler,721894508 +builtin_hash,LdarWideHandler,-978392409 +builtin_hash,LdaSmiWideHandler,-366656871 +builtin_hash,LdaConstantWideHandler,972813981 +builtin_hash,LdaContextSlotWideHandler,628329787 +builtin_hash,LdaImmutableContextSlotWideHandler,628329787 +builtin_hash,LdaImmutableCurrentContextSlotWideHandler,489858159 +builtin_hash,StarWideHandler,-1038662456 +builtin_hash,MovWideHandler,483803273 +builtin_hash,PushContextWideHandler,216419588 +builtin_hash,PopContextWideHandler,272986324 +builtin_hash,TestReferenceEqualWideHandler,-4739833 +builtin_hash,LdaGlobalWideHandler,-434470564 +builtin_hash,LdaGlobalInsideTypeofWideHandler,888730933 +builtin_hash,StaGlobalWideHandler,459118950 +builtin_hash,StaContextSlotWideHandler,888275701 +builtin_hash,StaCurrentContextSlotWideHandler,-317584552 +builtin_hash,LdaLookupGlobalSlotWideHandler,1026575020 +builtin_hash,GetNamedPropertyWideHandler,664403992 +builtin_hash,GetKeyedPropertyWideHandler,322108853 +builtin_hash,SetNamedPropertyWideHandler,784668777 +builtin_hash,DefineNamedOwnPropertyWideHandler,784668777 +builtin_hash,SetKeyedPropertyWideHandler,1015904043 +builtin_hash,DefineKeyedOwnPropertyWideHandler,1015904043 +builtin_hash,StaInArrayLiteralWideHandler,1015904043 +builtin_hash,AddWideHandler,1006647977 +builtin_hash,SubWideHandler,212325320 +builtin_hash,MulWideHandler,-922622067 +builtin_hash,DivWideHandler,145054418 +builtin_hash,BitwiseOrWideHandler,805505097 +builtin_hash,BitwiseAndWideHandler,563101073 +builtin_hash,ShiftLeftWideHandler,448918085 +builtin_hash,AddSmiWideHandler,-135072104 +builtin_hash,SubSmiWideHandler,-169078418 +builtin_hash,MulSmiWideHandler,793690226 +builtin_hash,DivSmiWideHandler,-657180043 +builtin_hash,ModSmiWideHandler,335754550 +builtin_hash,BitwiseOrSmiWideHandler,-1067934836 +builtin_hash,BitwiseXorSmiWideHandler,-668709153 +builtin_hash,BitwiseAndSmiWideHandler,-90084544 +builtin_hash,ShiftLeftSmiWideHandler,-381247703 +builtin_hash,ShiftRightSmiWideHandler,-38676513 +builtin_hash,ShiftRightLogicalSmiWideHandler,-1026231042 +builtin_hash,IncWideHandler,389395178 +builtin_hash,DecWideHandler,1062128797 +builtin_hash,NegateWideHandler,375542705 +builtin_hash,CallPropertyWideHandler,479651507 +builtin_hash,CallProperty0WideHandler,402451236 +builtin_hash,CallProperty1WideHandler,864866147 +builtin_hash,CallProperty2WideHandler,672960581 +builtin_hash,CallUndefinedReceiverWideHandler,633606056 +builtin_hash,CallUndefinedReceiver0WideHandler,-782323787 +builtin_hash,CallUndefinedReceiver1WideHandler,52355318 +builtin_hash,CallUndefinedReceiver2WideHandler,297430331 +builtin_hash,CallWithSpreadWideHandler,479651507 +builtin_hash,ConstructWideHandler,-923801363 +builtin_hash,TestEqualWideHandler,745861994 +builtin_hash,TestEqualStrictWideHandler,982796365 +builtin_hash,TestLessThanWideHandler,665221830 +builtin_hash,TestGreaterThanWideHandler,776130121 +builtin_hash,TestLessThanOrEqualWideHandler,-299580558 +builtin_hash,TestGreaterThanOrEqualWideHandler,-356242933 +builtin_hash,TestInstanceOfWideHandler,406240218 +builtin_hash,TestInWideHandler,-754759119 +builtin_hash,ToNumericWideHandler,1034444948 +builtin_hash,CreateRegExpLiteralWideHandler,1015965077 +builtin_hash,CreateArrayLiteralWideHandler,238187057 +builtin_hash,CreateEmptyArrayLiteralWideHandler,-21075025 +builtin_hash,CreateObjectLiteralWideHandler,570835533 +builtin_hash,CreateClosureWideHandler,912422636 +builtin_hash,CreateBlockContextWideHandler,499748521 +builtin_hash,CreateFunctionContextWideHandler,-887672919 +builtin_hash,JumpLoopWideHandler,714317010 +builtin_hash,JumpWideHandler,-420188660 +builtin_hash,JumpIfToBooleanTrueWideHandler,230302934 +builtin_hash,JumpIfToBooleanFalseWideHandler,237768975 +builtin_hash,JumpIfTrueWideHandler,814624851 +builtin_hash,JumpIfFalseWideHandler,814624851 +builtin_hash,SwitchOnSmiNoFeedbackWideHandler,623977068 +builtin_hash,ForInPrepareWideHandler,430965432 +builtin_hash,ForInNextWideHandler,-899950637 +builtin_hash,ThrowReferenceErrorIfHoleWideHandler,-575574526 +builtin_hash,GetIteratorWideHandler,-626454663 +builtin_hash,LdaSmiExtraWideHandler,465680004 +builtin_hash,LdaGlobalExtraWideHandler,1016564513 +builtin_hash,AddSmiExtraWideHandler,585533206 +builtin_hash,SubSmiExtraWideHandler,-88717151 +builtin_hash,MulSmiExtraWideHandler,-508453390 +builtin_hash,DivSmiExtraWideHandler,-542490757 +builtin_hash,BitwiseOrSmiExtraWideHandler,776661340 +builtin_hash,BitwiseXorSmiExtraWideHandler,276228867 +builtin_hash,BitwiseAndSmiExtraWideHandler,739058259 +builtin_hash,CallUndefinedReceiverExtraWideHandler,488508421 +builtin_hash,CallUndefinedReceiver1ExtraWideHandler,700320270 +builtin_hash,CallUndefinedReceiver2ExtraWideHandler,-7276189 diff --git a/deps/v8/tools/builtins-pgo/generate.py b/deps/v8/tools/builtins-pgo/generate.py index 2a605fbed170ac..5eec3c15162784 100755 --- a/deps/v8/tools/builtins-pgo/generate.py +++ b/deps/v8/tools/builtins-pgo/generate.py @@ -19,6 +19,11 @@ default=None, help='target cpu for V8 binary (for simulator builds), by default it\'s equal to `v8_target_cpu`' ) +parser.add_argument( + '--use-qemu', + default=False, + help='Use qemu for running cross-compiled V8 binary.', + action=argparse.BooleanOptionalAction) parser.add_argument( 'benchmark_path', help='path to benchmark runner .js file, usually JetStream2\'s `cli.js`', @@ -67,6 +72,15 @@ def build_d8(path, gn_args): exit(1) has_goma_str = "true" if try_start_goma() else "false" +cmd_prefix = [] +if args.use_qemu: + if args.v8_target_cpu == "arm": + cmd_prefix = ["qemu-arm", "-L", "/usr/arm-linux-gnueabihf/"] + elif args.v8_target_cpu == "arm64": + cmd_prefix = ["qemu-aarch64", "-L", "/usr/aarch64-linux-gnu/"] + else: + print(f"{args.v8_target_cpu} binaries can't be run with qemu") + exit(1) GN_ARGS_TEMPLATE = f"""\ is_debug = false @@ -82,8 +96,10 @@ def build_d8(path, gn_args): benchmark_dir = args.benchmark_path.parent benchmark_file = args.benchmark_path.name log_path = (build_dir / "v8.builtins.pgo").absolute() - run([d8_path, f"--turbo-profiling-output={log_path}", benchmark_file], - cwd=benchmark_dir) + cmd = cmd_prefix + [ + d8_path, f"--turbo-profiling-output={log_path}", benchmark_file + ] + run(cmd, cwd=benchmark_dir) get_hints_path = tools_pgo_dir / "get_hints.py" profile_path = tools_pgo_dir / f"{arch}.profile" run([get_hints_path, log_path, profile_path]) diff --git a/deps/v8/tools/builtins-pgo/x64.profile b/deps/v8/tools/builtins-pgo/x64.profile index 8c44d60624bd07..d65ffe79e59dab 100644 --- a/deps/v8/tools/builtins-pgo/x64.profile +++ b/deps/v8/tools/builtins-pgo/x64.profile @@ -12,6 +12,7 @@ block_hint,RecordWriteIgnoreFP,6,7,0 block_hint,RecordWriteIgnoreFP,19,20,0 block_hint,RecordWriteIgnoreFP,9,10,0 block_hint,RecordWriteIgnoreFP,25,26,0 +block_hint,RecordWriteIgnoreFP,15,16,1 block_hint,Call_ReceiverIsNullOrUndefined_Baseline_Compact,19,20,1 block_hint,Call_ReceiverIsNullOrUndefined_Baseline_Compact,43,44,0 block_hint,Call_ReceiverIsNullOrUndefined_Baseline_Compact,83,84,0 @@ -245,291 +246,328 @@ block_hint,ToNumberConvertBigInt,9,10,1 block_hint,Typeof,17,18,0 block_hint,Typeof,9,10,0 block_hint,Typeof,13,14,1 -block_hint,KeyedLoadIC_PolymorphicName,238,239,1 -block_hint,KeyedLoadIC_PolymorphicName,98,99,1 -block_hint,KeyedLoadIC_PolymorphicName,256,257,0 -block_hint,KeyedLoadIC_PolymorphicName,60,61,0 -block_hint,KeyedLoadIC_PolymorphicName,129,130,1 -block_hint,KeyedLoadIC_PolymorphicName,292,293,1 -block_hint,KeyedLoadIC_PolymorphicName,240,241,1 -block_hint,KeyedLoadIC_PolymorphicName,100,101,0 -block_hint,KeyedLoadIC_PolymorphicName,24,25,1 -block_hint,KeyedLoadIC_PolymorphicName,161,162,0 -block_hint,KeyedLoadIC_PolymorphicName,118,119,1 -block_hint,KeyedLoadIC_PolymorphicName,242,243,1 -block_hint,KeyedLoadIC_PolymorphicName,171,172,0 -block_hint,KeyedLoadIC_PolymorphicName,45,46,1 -block_hint,KeyedLoadIC_PolymorphicName,76,77,0 -block_hint,KeyedLoadIC_PolymorphicName,246,247,0 -block_hint,KeyedLoadIC_PolymorphicName,281,282,1 -block_hint,KeyedLoadIC_PolymorphicName,28,29,0 +block_hint,KeyedLoadIC_PolymorphicName,244,245,1 +block_hint,KeyedLoadIC_PolymorphicName,96,97,1 +block_hint,KeyedLoadIC_PolymorphicName,260,261,0 +block_hint,KeyedLoadIC_PolymorphicName,58,59,0 +block_hint,KeyedLoadIC_PolymorphicName,133,134,1 +block_hint,KeyedLoadIC_PolymorphicName,298,299,1 +block_hint,KeyedLoadIC_PolymorphicName,330,331,1 +block_hint,KeyedLoadIC_PolymorphicName,98,99,0 +block_hint,KeyedLoadIC_PolymorphicName,279,280,1 +block_hint,KeyedLoadIC_PolymorphicName,22,23,1 +block_hint,KeyedLoadIC_PolymorphicName,165,166,0 +block_hint,KeyedLoadIC_PolymorphicName,122,123,1 +block_hint,KeyedLoadIC_PolymorphicName,332,333,1 +block_hint,KeyedLoadIC_PolymorphicName,110,111,0 +block_hint,KeyedLoadIC_PolymorphicName,175,176,0 +block_hint,KeyedLoadIC_PolymorphicName,43,44,1 +block_hint,KeyedLoadIC_PolymorphicName,74,75,0 +block_hint,KeyedLoadIC_PolymorphicName,250,251,0 +block_hint,KeyedLoadIC_PolymorphicName,287,288,1 block_hint,KeyedLoadIC_PolymorphicName,26,27,0 -block_hint,KeyedStoreIC_Megamorphic,375,376,1 -block_hint,KeyedStoreIC_Megamorphic,377,378,0 -block_hint,KeyedStoreIC_Megamorphic,1175,1176,0 -block_hint,KeyedStoreIC_Megamorphic,1177,1178,1 -block_hint,KeyedStoreIC_Megamorphic,1161,1162,1 -block_hint,KeyedStoreIC_Megamorphic,1110,1111,0 -block_hint,KeyedStoreIC_Megamorphic,905,906,1 +block_hint,KeyedLoadIC_PolymorphicName,24,25,0 block_hint,KeyedStoreIC_Megamorphic,379,380,1 -block_hint,KeyedStoreIC_Megamorphic,1169,1170,0 -block_hint,KeyedStoreIC_Megamorphic,1154,1155,0 -block_hint,KeyedStoreIC_Megamorphic,597,598,0 -block_hint,KeyedStoreIC_Megamorphic,191,192,1 -block_hint,KeyedStoreIC_Megamorphic,1025,1026,0 -block_hint,KeyedStoreIC_Megamorphic,195,196,1 -block_hint,KeyedStoreIC_Megamorphic,197,198,0 -block_hint,KeyedStoreIC_Megamorphic,1104,1105,0 -block_hint,KeyedStoreIC_Megamorphic,1113,1114,0 -block_hint,KeyedStoreIC_Megamorphic,917,918,0 -block_hint,KeyedStoreIC_Megamorphic,487,488,0 -block_hint,KeyedStoreIC_Megamorphic,887,888,0 -block_hint,KeyedStoreIC_Megamorphic,921,922,0 -block_hint,KeyedStoreIC_Megamorphic,919,920,1 -block_hint,KeyedStoreIC_Megamorphic,489,490,1 -block_hint,KeyedStoreIC_Megamorphic,495,496,1 -block_hint,KeyedStoreIC_Megamorphic,497,498,0 -block_hint,KeyedStoreIC_Megamorphic,925,926,1 -block_hint,KeyedStoreIC_Megamorphic,499,500,0 -block_hint,KeyedStoreIC_Megamorphic,501,502,1 -block_hint,KeyedStoreIC_Megamorphic,923,924,1 -block_hint,KeyedStoreIC_Megamorphic,493,494,1 +block_hint,KeyedStoreIC_Megamorphic,381,382,0 +block_hint,KeyedStoreIC_Megamorphic,1216,1217,0 +block_hint,KeyedStoreIC_Megamorphic,1218,1219,1 +block_hint,KeyedStoreIC_Megamorphic,1203,1204,1 +block_hint,KeyedStoreIC_Megamorphic,1140,1141,0 +block_hint,KeyedStoreIC_Megamorphic,915,916,1 +block_hint,KeyedStoreIC_Megamorphic,383,384,1 +block_hint,KeyedStoreIC_Megamorphic,1228,1229,0 +block_hint,KeyedStoreIC_Megamorphic,1211,1212,0 +block_hint,KeyedStoreIC_Megamorphic,601,602,0 +block_hint,KeyedStoreIC_Megamorphic,746,747,1 +block_hint,KeyedStoreIC_Megamorphic,603,604,0 +block_hint,KeyedStoreIC_Megamorphic,1191,1192,0 +block_hint,KeyedStoreIC_Megamorphic,1041,1042,0 +block_hint,KeyedStoreIC_Megamorphic,1168,1169,0 +block_hint,KeyedStoreIC_Megamorphic,192,193,1 +block_hint,KeyedStoreIC_Megamorphic,194,195,0 +block_hint,KeyedStoreIC_Megamorphic,1134,1135,0 +block_hint,KeyedStoreIC_Megamorphic,1143,1144,0 +block_hint,KeyedStoreIC_Megamorphic,927,928,0 block_hint,KeyedStoreIC_Megamorphic,491,492,0 -block_hint,KeyedStoreIC_Megamorphic,1087,1088,1 -block_hint,KeyedStoreIC_Megamorphic,1140,1141,1 -block_hint,KeyedStoreIC_Megamorphic,885,886,0 -block_hint,KeyedStoreIC_Megamorphic,347,348,1 -block_hint,KeyedStoreIC_Megamorphic,333,334,1 -block_hint,KeyedStoreIC_Megamorphic,1085,1086,1 -block_hint,KeyedStoreIC_Megamorphic,678,679,0 -block_hint,KeyedStoreIC_Megamorphic,535,536,0 -block_hint,KeyedStoreIC_Megamorphic,537,538,0 -block_hint,KeyedStoreIC_Megamorphic,1029,1030,0 -block_hint,KeyedStoreIC_Megamorphic,543,544,1 -block_hint,KeyedStoreIC_Megamorphic,600,601,0 -block_hint,KeyedStoreIC_Megamorphic,545,546,0 +block_hint,KeyedStoreIC_Megamorphic,895,896,0 +block_hint,KeyedStoreIC_Megamorphic,931,932,0 +block_hint,KeyedStoreIC_Megamorphic,929,930,1 +block_hint,KeyedStoreIC_Megamorphic,493,494,1 +block_hint,KeyedStoreIC_Megamorphic,499,500,1 +block_hint,KeyedStoreIC_Megamorphic,501,502,0 +block_hint,KeyedStoreIC_Megamorphic,935,936,1 +block_hint,KeyedStoreIC_Megamorphic,503,504,0 +block_hint,KeyedStoreIC_Megamorphic,505,506,1 +block_hint,KeyedStoreIC_Megamorphic,933,934,1 +block_hint,KeyedStoreIC_Megamorphic,497,498,1 +block_hint,KeyedStoreIC_Megamorphic,495,496,0 +block_hint,KeyedStoreIC_Megamorphic,1115,1116,1 +block_hint,KeyedStoreIC_Megamorphic,1177,1178,1 +block_hint,KeyedStoreIC_Megamorphic,893,894,0 +block_hint,KeyedStoreIC_Megamorphic,350,351,1 +block_hint,KeyedStoreIC_Megamorphic,336,337,1 +block_hint,KeyedStoreIC_Megamorphic,1113,1114,1 +block_hint,KeyedStoreIC_Megamorphic,683,684,0 +block_hint,KeyedStoreIC_Megamorphic,539,540,0 +block_hint,KeyedStoreIC_Megamorphic,541,542,0 +block_hint,KeyedStoreIC_Megamorphic,1045,1046,0 block_hint,KeyedStoreIC_Megamorphic,547,548,1 -block_hint,KeyedStoreIC_Megamorphic,203,204,1 +block_hint,KeyedStoreIC_Megamorphic,1071,1072,0 +block_hint,KeyedStoreIC_Megamorphic,606,607,0 +block_hint,KeyedStoreIC_Megamorphic,1193,1194,0 block_hint,KeyedStoreIC_Megamorphic,549,550,0 -block_hint,KeyedStoreIC_Megamorphic,205,206,0 -block_hint,KeyedStoreIC_Megamorphic,207,208,0 -block_hint,KeyedStoreIC_Megamorphic,940,941,0 +block_hint,KeyedStoreIC_Megamorphic,1047,1048,0 block_hint,KeyedStoreIC_Megamorphic,551,552,1 +block_hint,KeyedStoreIC_Megamorphic,200,201,1 block_hint,KeyedStoreIC_Megamorphic,553,554,0 +block_hint,KeyedStoreIC_Megamorphic,202,203,0 +block_hint,KeyedStoreIC_Megamorphic,204,205,0 +block_hint,KeyedStoreIC_Megamorphic,950,951,0 block_hint,KeyedStoreIC_Megamorphic,555,556,1 block_hint,KeyedStoreIC_Megamorphic,557,558,0 -block_hint,KeyedStoreIC_Megamorphic,1118,1119,0 block_hint,KeyedStoreIC_Megamorphic,559,560,1 -block_hint,KeyedStoreIC_Megamorphic,894,895,0 -block_hint,KeyedStoreIC_Megamorphic,1120,1121,0 -block_hint,KeyedStoreIC_Megamorphic,561,562,1 -block_hint,KeyedStoreIC_Megamorphic,567,568,1 -block_hint,KeyedStoreIC_Megamorphic,569,570,0 -block_hint,KeyedStoreIC_Megamorphic,571,572,0 -block_hint,KeyedStoreIC_Megamorphic,573,574,1 -block_hint,KeyedStoreIC_Megamorphic,947,948,1 +block_hint,KeyedStoreIC_Megamorphic,561,562,0 +block_hint,KeyedStoreIC_Megamorphic,1148,1149,0 +block_hint,KeyedStoreIC_Megamorphic,563,564,1 +block_hint,KeyedStoreIC_Megamorphic,902,903,0 +block_hint,KeyedStoreIC_Megamorphic,1150,1151,0 block_hint,KeyedStoreIC_Megamorphic,565,566,1 -block_hint,KeyedStoreIC_Megamorphic,563,564,0 -block_hint,KeyedStoreIC_Megamorphic,1173,1174,0 -block_hint,KeyedStoreIC_Megamorphic,1186,1187,1 -block_hint,KeyedStoreIC_Megamorphic,1183,1184,1 -block_hint,KeyedStoreIC_Megamorphic,1102,1103,1 -block_hint,KeyedStoreIC_Megamorphic,964,965,1 -block_hint,KeyedStoreIC_Megamorphic,209,210,0 -block_hint,KeyedStoreIC_Megamorphic,359,360,0 -block_hint,KeyedStoreIC_Megamorphic,605,606,1 -block_hint,KeyedStoreIC_Megamorphic,1013,1014,0 -block_hint,KeyedStoreIC_Megamorphic,688,689,0 +block_hint,KeyedStoreIC_Megamorphic,571,572,1 +block_hint,KeyedStoreIC_Megamorphic,573,574,0 block_hint,KeyedStoreIC_Megamorphic,575,576,0 -block_hint,KeyedStoreIC_Megamorphic,168,169,1 -block_hint,KeyedStoreIC_Megamorphic,577,578,0 +block_hint,KeyedStoreIC_Megamorphic,577,578,1 +block_hint,KeyedStoreIC_Megamorphic,957,958,1 +block_hint,KeyedStoreIC_Megamorphic,569,570,1 +block_hint,KeyedStoreIC_Megamorphic,567,568,0 +block_hint,KeyedStoreIC_Megamorphic,1214,1215,0 +block_hint,KeyedStoreIC_Megamorphic,1231,1232,1 +block_hint,KeyedStoreIC_Megamorphic,1224,1225,1 +block_hint,KeyedStoreIC_Megamorphic,1130,1131,1 +block_hint,KeyedStoreIC_Megamorphic,975,976,1 +block_hint,KeyedStoreIC_Megamorphic,206,207,0 +block_hint,KeyedStoreIC_Megamorphic,362,363,0 +block_hint,KeyedStoreIC_Megamorphic,977,978,1 +block_hint,KeyedStoreIC_Megamorphic,214,215,0 +block_hint,KeyedStoreIC_Megamorphic,1027,1028,0 +block_hint,KeyedStoreIC_Megamorphic,693,694,0 block_hint,KeyedStoreIC_Megamorphic,579,580,0 -block_hint,KeyedStoreIC_Megamorphic,1034,1035,0 -block_hint,KeyedStoreIC_Megamorphic,581,582,1 -block_hint,KeyedStoreIC_Megamorphic,953,954,0 -block_hint,KeyedStoreIC_Megamorphic,970,971,0 -block_hint,KeyedStoreIC_Megamorphic,749,750,1 -block_hint,KeyedStoreIC_Megamorphic,221,222,1 -block_hint,KeyedStoreIC_Megamorphic,1036,1037,0 -block_hint,KeyedStoreIC_Megamorphic,227,228,0 -block_hint,KeyedStoreIC_Megamorphic,753,754,0 -block_hint,KeyedStoreIC_Megamorphic,589,590,0 -block_hint,KeyedStoreIC_Megamorphic,1106,1107,0 -block_hint,KeyedStoreIC_Megamorphic,1143,1144,0 -block_hint,KeyedStoreIC_Megamorphic,896,897,0 -block_hint,KeyedStoreIC_Megamorphic,174,175,1 -block_hint,KeyedStoreIC_Megamorphic,176,177,1 -block_hint,KeyedStoreIC_Megamorphic,369,370,0 -block_hint,KeyedStoreIC_Megamorphic,178,179,1 -block_hint,KeyedStoreIC_Megamorphic,371,372,0 -block_hint,KeyedStoreIC_Megamorphic,180,181,1 -block_hint,KeyedStoreIC_Megamorphic,233,234,0 -block_hint,KeyedStoreIC_Megamorphic,182,183,1 -block_hint,KeyedStoreIC_Megamorphic,184,185,1 -block_hint,KeyedStoreIC_Megamorphic,1018,1019,0 -block_hint,KeyedStoreIC_Megamorphic,186,187,1 -block_hint,KeyedStoreIC_Megamorphic,915,916,1 -block_hint,KeyedStoreIC_Megamorphic,481,482,1 -block_hint,KeyedStoreIC_Megamorphic,728,729,0 -block_hint,KeyedStoreIC_Megamorphic,909,910,1 -block_hint,KeyedStoreIC_Megamorphic,409,410,0 -block_hint,KeyedStoreIC_Megamorphic,411,412,0 -block_hint,KeyedStoreIC_Megamorphic,251,252,1 +block_hint,KeyedStoreIC_Megamorphic,167,168,1 +block_hint,KeyedStoreIC_Megamorphic,581,582,0 +block_hint,KeyedStoreIC_Megamorphic,583,584,0 +block_hint,KeyedStoreIC_Megamorphic,1054,1055,0 +block_hint,KeyedStoreIC_Megamorphic,585,586,1 +block_hint,KeyedStoreIC_Megamorphic,963,964,0 +block_hint,KeyedStoreIC_Megamorphic,1174,1175,0 +block_hint,KeyedStoreIC_Megamorphic,1056,1057,1 +block_hint,KeyedStoreIC_Megamorphic,759,760,1 +block_hint,KeyedStoreIC_Megamorphic,612,613,0 +block_hint,KeyedStoreIC_Megamorphic,1196,1197,0 +block_hint,KeyedStoreIC_Megamorphic,1058,1059,0 +block_hint,KeyedStoreIC_Megamorphic,1172,1173,0 +block_hint,KeyedStoreIC_Megamorphic,224,225,0 +block_hint,KeyedStoreIC_Megamorphic,761,762,0 +block_hint,KeyedStoreIC_Megamorphic,593,594,0 +block_hint,KeyedStoreIC_Megamorphic,1136,1137,0 +block_hint,KeyedStoreIC_Megamorphic,1180,1181,0 +block_hint,KeyedStoreIC_Megamorphic,906,907,0 +block_hint,KeyedStoreIC_Megamorphic,173,174,1 +block_hint,KeyedStoreIC_Megamorphic,175,176,1 +block_hint,KeyedStoreIC_Megamorphic,373,374,0 +block_hint,KeyedStoreIC_Megamorphic,177,178,1 +block_hint,KeyedStoreIC_Megamorphic,375,376,0 +block_hint,KeyedStoreIC_Megamorphic,179,180,1 +block_hint,KeyedStoreIC_Megamorphic,234,235,0 +block_hint,KeyedStoreIC_Megamorphic,236,237,0 +block_hint,KeyedStoreIC_Megamorphic,181,182,1 +block_hint,KeyedStoreIC_Megamorphic,183,184,1 +block_hint,KeyedStoreIC_Megamorphic,1032,1033,0 +block_hint,KeyedStoreIC_Megamorphic,185,186,1 +block_hint,KeyedStoreIC_Megamorphic,925,926,1 +block_hint,KeyedStoreIC_Megamorphic,485,486,1 +block_hint,KeyedStoreIC_Megamorphic,733,734,0 +block_hint,KeyedStoreIC_Megamorphic,919,920,1 block_hint,KeyedStoreIC_Megamorphic,413,414,0 -block_hint,KeyedStoreIC_Megamorphic,625,626,1 -block_hint,KeyedStoreIC_Megamorphic,93,94,1 -block_hint,KeyedStoreIC_Megamorphic,95,96,0 -block_hint,KeyedStoreIC_Megamorphic,761,762,1 -block_hint,KeyedStoreIC_Megamorphic,383,384,0 -block_hint,KeyedStoreIC_Megamorphic,634,635,1 -block_hint,KeyedStoreIC_Megamorphic,65,66,1 -block_hint,KeyedStoreIC_Megamorphic,67,68,0 -block_hint,DefineKeyedOwnIC_Megamorphic,310,311,1 -block_hint,DefineKeyedOwnIC_Megamorphic,312,313,0 -block_hint,DefineKeyedOwnIC_Megamorphic,865,866,0 -block_hint,DefineKeyedOwnIC_Megamorphic,418,419,0 -block_hint,DefineKeyedOwnIC_Megamorphic,416,417,1 -block_hint,DefineKeyedOwnIC_Megamorphic,794,795,0 -block_hint,DefineKeyedOwnIC_Megamorphic,570,571,1 -block_hint,DefineKeyedOwnIC_Megamorphic,593,594,1 -block_hint,DefineKeyedOwnIC_Megamorphic,230,231,0 +block_hint,KeyedStoreIC_Megamorphic,415,416,0 +block_hint,KeyedStoreIC_Megamorphic,254,255,1 +block_hint,KeyedStoreIC_Megamorphic,417,418,0 +block_hint,KeyedStoreIC_Megamorphic,630,631,1 +block_hint,KeyedStoreIC_Megamorphic,92,93,1 +block_hint,KeyedStoreIC_Megamorphic,94,95,0 +block_hint,KeyedStoreIC_Megamorphic,769,770,1 +block_hint,KeyedStoreIC_Megamorphic,387,388,0 +block_hint,KeyedStoreIC_Megamorphic,639,640,1 +block_hint,KeyedStoreIC_Megamorphic,64,65,1 +block_hint,KeyedStoreIC_Megamorphic,66,67,0 +block_hint,DefineKeyedOwnIC_Megamorphic,312,313,1 +block_hint,DefineKeyedOwnIC_Megamorphic,314,315,0 +block_hint,DefineKeyedOwnIC_Megamorphic,887,888,0 +block_hint,DefineKeyedOwnIC_Megamorphic,420,421,0 +block_hint,DefineKeyedOwnIC_Megamorphic,418,419,1 +block_hint,DefineKeyedOwnIC_Megamorphic,803,804,0 +block_hint,DefineKeyedOwnIC_Megamorphic,575,576,1 +block_hint,DefineKeyedOwnIC_Megamorphic,601,602,1 +block_hint,DefineKeyedOwnIC_Megamorphic,232,233,0 block_hint,DefineKeyedOwnIC_Megamorphic,53,54,1 block_hint,DefineKeyedOwnIC_Megamorphic,55,56,0 -block_hint,LoadGlobalIC_NoFeedback,39,40,1 +block_hint,LoadGlobalIC_NoFeedback,41,42,1 block_hint,LoadGlobalIC_NoFeedback,6,7,1 block_hint,LoadGlobalIC_NoFeedback,8,9,1 block_hint,LoadGlobalIC_NoFeedback,10,11,1 block_hint,LoadGlobalIC_NoFeedback,12,13,1 -block_hint,LoadGlobalIC_NoFeedback,28,29,1 -block_hint,LoadGlobalIC_NoFeedback,43,44,1 +block_hint,LoadGlobalIC_NoFeedback,31,32,1 +block_hint,LoadGlobalIC_NoFeedback,49,50,1 block_hint,LoadGlobalIC_NoFeedback,18,19,1 +block_hint,LoadGlobalIC_NoFeedback,27,28,0 block_hint,LoadGlobalIC_NoFeedback,14,15,1 -block_hint,LoadGlobalIC_NoFeedback,30,31,0 +block_hint,LoadGlobalIC_NoFeedback,33,34,0 block_hint,LoadGlobalIC_NoFeedback,16,17,1 block_hint,LoadGlobalIC_NoFeedback,20,21,1 block_hint,LoadGlobalIC_NoFeedback,22,23,0 block_hint,LoadGlobalIC_NoFeedback,24,25,1 block_hint,LoadIC_FunctionPrototype,2,3,0 block_hint,LoadIC_FunctionPrototype,4,5,1 -block_hint,LoadIC_NoFeedback,93,94,1 -block_hint,LoadIC_NoFeedback,95,96,0 -block_hint,LoadIC_NoFeedback,277,278,1 -block_hint,LoadIC_NoFeedback,212,213,0 -block_hint,LoadIC_NoFeedback,201,202,1 -block_hint,LoadIC_NoFeedback,265,266,0 -block_hint,LoadIC_NoFeedback,56,57,1 -block_hint,LoadIC_NoFeedback,267,268,0 -block_hint,LoadIC_NoFeedback,58,59,0 -block_hint,LoadIC_NoFeedback,291,292,1 -block_hint,LoadIC_NoFeedback,253,254,0 -block_hint,LoadIC_NoFeedback,270,271,1 -block_hint,LoadIC_NoFeedback,138,139,1 -block_hint,LoadIC_NoFeedback,23,24,1 -block_hint,LoadIC_NoFeedback,36,37,1 -block_hint,LoadIC_NoFeedback,126,127,1 -block_hint,LoadIC_NoFeedback,140,141,0 -block_hint,LoadIC_NoFeedback,121,122,0 -block_hint,LoadIC_NoFeedback,242,243,0 -block_hint,LoadIC_NoFeedback,235,236,0 -block_hint,LoadIC_NoFeedback,144,145,1 -block_hint,LoadIC_NoFeedback,146,147,0 -block_hint,LoadIC_NoFeedback,73,74,1 -block_hint,LoadIC_NoFeedback,150,151,0 -block_hint,LoadIC_NoFeedback,75,76,0 -block_hint,LoadIC_NoFeedback,159,160,1 -block_hint,LoadIC_NoFeedback,293,294,1 -block_hint,LoadIC_NoFeedback,259,260,0 -block_hint,LoadIC_NoFeedback,257,258,0 -block_hint,LoadIC_NoFeedback,228,229,1 -block_hint,LoadIC_NoFeedback,131,132,1 -block_hint,LoadIC_NoFeedback,89,90,0 -block_hint,StoreIC_NoFeedback,143,144,1 -block_hint,StoreIC_NoFeedback,145,146,0 -block_hint,StoreIC_NoFeedback,427,428,0 -block_hint,StoreIC_NoFeedback,61,62,1 -block_hint,StoreIC_NoFeedback,63,64,0 -block_hint,StoreIC_NoFeedback,467,468,0 -block_hint,StoreIC_NoFeedback,357,358,0 -block_hint,StoreIC_NoFeedback,147,148,0 -block_hint,StoreIC_NoFeedback,341,342,0 -block_hint,StoreIC_NoFeedback,149,150,1 -block_hint,StoreIC_NoFeedback,155,156,1 -block_hint,StoreIC_NoFeedback,157,158,0 -block_hint,StoreIC_NoFeedback,159,160,0 -block_hint,StoreIC_NoFeedback,153,154,1 +block_hint,LoadIC_NoFeedback,97,98,1 +block_hint,LoadIC_NoFeedback,99,100,0 +block_hint,LoadIC_NoFeedback,306,307,1 +block_hint,LoadIC_NoFeedback,226,227,0 +block_hint,LoadIC_NoFeedback,285,286,1 +block_hint,LoadIC_NoFeedback,141,142,0 +block_hint,LoadIC_NoFeedback,320,321,0 +block_hint,LoadIC_NoFeedback,287,288,0 +block_hint,LoadIC_NoFeedback,302,303,0 +block_hint,LoadIC_NoFeedback,53,54,1 +block_hint,LoadIC_NoFeedback,289,290,0 +block_hint,LoadIC_NoFeedback,55,56,0 +block_hint,LoadIC_NoFeedback,324,325,1 +block_hint,LoadIC_NoFeedback,272,273,0 +block_hint,LoadIC_NoFeedback,295,296,1 +block_hint,LoadIC_NoFeedback,247,248,1 +block_hint,LoadIC_NoFeedback,59,60,0 +block_hint,LoadIC_NoFeedback,22,23,1 +block_hint,LoadIC_NoFeedback,35,36,1 +block_hint,LoadIC_NoFeedback,130,131,1 +block_hint,LoadIC_NoFeedback,145,146,0 +block_hint,LoadIC_NoFeedback,125,126,0 +block_hint,LoadIC_NoFeedback,261,262,0 +block_hint,LoadIC_NoFeedback,250,251,0 +block_hint,LoadIC_NoFeedback,149,150,1 +block_hint,LoadIC_NoFeedback,167,168,0 +block_hint,LoadIC_NoFeedback,322,323,0 +block_hint,LoadIC_NoFeedback,151,152,0 +block_hint,LoadIC_NoFeedback,291,292,0 +block_hint,LoadIC_NoFeedback,70,71,1 +block_hint,LoadIC_NoFeedback,155,156,0 +block_hint,LoadIC_NoFeedback,72,73,0 +block_hint,LoadIC_NoFeedback,254,255,1 +block_hint,LoadIC_NoFeedback,76,77,0 +block_hint,LoadIC_NoFeedback,326,327,1 +block_hint,LoadIC_NoFeedback,278,279,0 +block_hint,LoadIC_NoFeedback,276,277,0 +block_hint,LoadIC_NoFeedback,242,243,1 +block_hint,LoadIC_NoFeedback,135,136,1 +block_hint,LoadIC_NoFeedback,93,94,0 +block_hint,StoreIC_NoFeedback,147,148,1 +block_hint,StoreIC_NoFeedback,149,150,0 +block_hint,StoreIC_NoFeedback,259,260,0 +block_hint,StoreIC_NoFeedback,549,550,0 +block_hint,StoreIC_NoFeedback,443,444,0 +block_hint,StoreIC_NoFeedback,527,528,0 +block_hint,StoreIC_NoFeedback,58,59,1 +block_hint,StoreIC_NoFeedback,60,61,0 +block_hint,StoreIC_NoFeedback,498,499,0 +block_hint,StoreIC_NoFeedback,367,368,0 block_hint,StoreIC_NoFeedback,151,152,0 -block_hint,StoreIC_NoFeedback,497,498,1 -block_hint,StoreIC_NoFeedback,371,372,1 -block_hint,StoreIC_NoFeedback,175,176,0 -block_hint,StoreIC_NoFeedback,488,489,1 -block_hint,StoreIC_NoFeedback,195,196,0 -block_hint,StoreIC_NoFeedback,197,198,0 -block_hint,StoreIC_NoFeedback,431,432,0 -block_hint,StoreIC_NoFeedback,203,204,1 -block_hint,StoreIC_NoFeedback,256,257,0 -block_hint,StoreIC_NoFeedback,205,206,0 -block_hint,StoreIC_NoFeedback,69,70,1 +block_hint,StoreIC_NoFeedback,349,350,0 +block_hint,StoreIC_NoFeedback,153,154,1 +block_hint,StoreIC_NoFeedback,159,160,1 +block_hint,StoreIC_NoFeedback,161,162,0 +block_hint,StoreIC_NoFeedback,163,164,0 +block_hint,StoreIC_NoFeedback,157,158,1 +block_hint,StoreIC_NoFeedback,155,156,0 +block_hint,StoreIC_NoFeedback,536,537,1 +block_hint,StoreIC_NoFeedback,381,382,1 +block_hint,StoreIC_NoFeedback,179,180,0 +block_hint,StoreIC_NoFeedback,519,520,1 +block_hint,StoreIC_NoFeedback,199,200,0 +block_hint,StoreIC_NoFeedback,201,202,0 +block_hint,StoreIC_NoFeedback,447,448,0 +block_hint,StoreIC_NoFeedback,207,208,1 +block_hint,StoreIC_NoFeedback,473,474,0 +block_hint,StoreIC_NoFeedback,262,263,0 +block_hint,StoreIC_NoFeedback,551,552,0 block_hint,StoreIC_NoFeedback,209,210,0 -block_hint,StoreIC_NoFeedback,71,72,0 -block_hint,StoreIC_NoFeedback,380,381,0 -block_hint,StoreIC_NoFeedback,211,212,1 +block_hint,StoreIC_NoFeedback,449,450,0 +block_hint,StoreIC_NoFeedback,66,67,1 block_hint,StoreIC_NoFeedback,213,214,0 +block_hint,StoreIC_NoFeedback,68,69,0 +block_hint,StoreIC_NoFeedback,390,391,0 block_hint,StoreIC_NoFeedback,215,216,1 block_hint,StoreIC_NoFeedback,217,218,0 -block_hint,StoreIC_NoFeedback,478,479,0 block_hint,StoreIC_NoFeedback,219,220,1 -block_hint,StoreIC_NoFeedback,348,349,0 -block_hint,StoreIC_NoFeedback,480,481,0 -block_hint,StoreIC_NoFeedback,383,384,1 -block_hint,StoreIC_NoFeedback,227,228,1 -block_hint,StoreIC_NoFeedback,229,230,0 -block_hint,StoreIC_NoFeedback,231,232,0 -block_hint,StoreIC_NoFeedback,233,234,1 -block_hint,StoreIC_NoFeedback,223,224,0 -block_hint,StoreIC_NoFeedback,519,520,0 -block_hint,StoreIC_NoFeedback,465,466,1 -block_hint,StoreIC_NoFeedback,402,403,1 -block_hint,StoreIC_NoFeedback,75,76,0 -block_hint,StoreIC_NoFeedback,81,82,0 -block_hint,StoreIC_NoFeedback,127,128,0 -block_hint,StoreIC_NoFeedback,261,262,1 -block_hint,StoreIC_NoFeedback,83,84,0 -block_hint,StoreIC_NoFeedback,237,238,0 -block_hint,StoreIC_NoFeedback,239,240,0 -block_hint,StoreIC_NoFeedback,436,437,0 -block_hint,StoreIC_NoFeedback,241,242,1 -block_hint,StoreIC_NoFeedback,482,483,0 -block_hint,StoreIC_NoFeedback,393,394,0 -block_hint,StoreIC_NoFeedback,320,321,1 -block_hint,StoreIC_NoFeedback,438,439,0 -block_hint,StoreIC_NoFeedback,93,94,0 -block_hint,StoreIC_NoFeedback,324,325,0 -block_hint,StoreIC_NoFeedback,264,265,1 -block_hint,StoreIC_NoFeedback,97,98,0 -block_hint,StoreIC_NoFeedback,249,250,0 -block_hint,StoreIC_NoFeedback,251,252,1 -block_hint,StoreIC_NoFeedback,352,353,0 -block_hint,StoreIC_NoFeedback,41,42,1 -block_hint,StoreIC_NoFeedback,43,44,1 -block_hint,StoreIC_NoFeedback,137,138,0 -block_hint,StoreIC_NoFeedback,45,46,1 -block_hint,StoreIC_NoFeedback,139,140,0 -block_hint,StoreIC_NoFeedback,47,48,1 -block_hint,StoreIC_NoFeedback,99,100,0 -block_hint,StoreIC_NoFeedback,49,50,1 -block_hint,StoreIC_NoFeedback,51,52,1 -block_hint,StoreIC_NoFeedback,425,426,0 -block_hint,StoreIC_NoFeedback,53,54,1 -block_hint,DefineNamedOwnIC_NoFeedback,78,79,1 -block_hint,DefineNamedOwnIC_NoFeedback,80,81,0 -block_hint,DefineNamedOwnIC_NoFeedback,195,196,0 -block_hint,DefineNamedOwnIC_NoFeedback,150,151,1 -block_hint,DefineNamedOwnIC_NoFeedback,201,202,0 -block_hint,DefineNamedOwnIC_NoFeedback,152,153,1 -block_hint,DefineNamedOwnIC_NoFeedback,38,39,1 -block_hint,DefineNamedOwnIC_NoFeedback,84,85,0 +block_hint,StoreIC_NoFeedback,221,222,0 +block_hint,StoreIC_NoFeedback,509,510,0 +block_hint,StoreIC_NoFeedback,223,224,1 +block_hint,StoreIC_NoFeedback,356,357,0 +block_hint,StoreIC_NoFeedback,511,512,0 +block_hint,StoreIC_NoFeedback,393,394,1 +block_hint,StoreIC_NoFeedback,231,232,1 +block_hint,StoreIC_NoFeedback,233,234,0 +block_hint,StoreIC_NoFeedback,235,236,0 +block_hint,StoreIC_NoFeedback,237,238,1 +block_hint,StoreIC_NoFeedback,227,228,0 +block_hint,StoreIC_NoFeedback,564,565,0 +block_hint,StoreIC_NoFeedback,494,495,1 +block_hint,StoreIC_NoFeedback,413,414,1 +block_hint,StoreIC_NoFeedback,72,73,0 +block_hint,StoreIC_NoFeedback,78,79,0 +block_hint,StoreIC_NoFeedback,130,131,0 +block_hint,StoreIC_NoFeedback,415,416,1 +block_hint,StoreIC_NoFeedback,80,81,0 +block_hint,StoreIC_NoFeedback,82,83,0 +block_hint,StoreIC_NoFeedback,241,242,0 +block_hint,StoreIC_NoFeedback,243,244,0 +block_hint,StoreIC_NoFeedback,456,457,0 +block_hint,StoreIC_NoFeedback,245,246,1 +block_hint,StoreIC_NoFeedback,513,514,0 +block_hint,StoreIC_NoFeedback,403,404,0 +block_hint,StoreIC_NoFeedback,458,459,1 +block_hint,StoreIC_NoFeedback,268,269,0 +block_hint,StoreIC_NoFeedback,553,554,0 +block_hint,StoreIC_NoFeedback,460,461,0 +block_hint,StoreIC_NoFeedback,531,532,0 +block_hint,StoreIC_NoFeedback,90,91,0 +block_hint,StoreIC_NoFeedback,332,333,0 +block_hint,StoreIC_NoFeedback,420,421,1 +block_hint,StoreIC_NoFeedback,94,95,0 +block_hint,StoreIC_NoFeedback,96,97,0 +block_hint,StoreIC_NoFeedback,253,254,0 +block_hint,StoreIC_NoFeedback,255,256,1 +block_hint,StoreIC_NoFeedback,362,363,0 +block_hint,StoreIC_NoFeedback,40,41,1 +block_hint,StoreIC_NoFeedback,42,43,1 +block_hint,StoreIC_NoFeedback,141,142,0 +block_hint,StoreIC_NoFeedback,44,45,1 +block_hint,StoreIC_NoFeedback,143,144,0 +block_hint,StoreIC_NoFeedback,46,47,1 +block_hint,StoreIC_NoFeedback,100,101,0 +block_hint,StoreIC_NoFeedback,102,103,0 +block_hint,StoreIC_NoFeedback,48,49,1 +block_hint,StoreIC_NoFeedback,50,51,1 +block_hint,StoreIC_NoFeedback,439,440,0 +block_hint,StoreIC_NoFeedback,52,53,1 +block_hint,DefineNamedOwnIC_NoFeedback,80,81,1 +block_hint,DefineNamedOwnIC_NoFeedback,82,83,0 +block_hint,DefineNamedOwnIC_NoFeedback,236,237,0 +block_hint,DefineNamedOwnIC_NoFeedback,210,211,1 +block_hint,DefineNamedOwnIC_NoFeedback,136,137,0 +block_hint,DefineNamedOwnIC_NoFeedback,239,240,0 +block_hint,DefineNamedOwnIC_NoFeedback,212,213,0 +block_hint,DefineNamedOwnIC_NoFeedback,234,235,0 +block_hint,DefineNamedOwnIC_NoFeedback,157,158,1 +block_hint,DefineNamedOwnIC_NoFeedback,36,37,1 +block_hint,DefineNamedOwnIC_NoFeedback,86,87,0 +block_hint,DefineNamedOwnIC_NoFeedback,38,39,0 block_hint,DefineNamedOwnIC_NoFeedback,40,41,0 -block_hint,DefineNamedOwnIC_NoFeedback,42,43,0 block_hint,KeyedLoadIC_SloppyArguments,12,13,0 block_hint,KeyedLoadIC_SloppyArguments,14,15,1 block_hint,KeyedLoadIC_SloppyArguments,4,5,1 @@ -606,7 +644,6 @@ block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,489,490,0 block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,265,266,0 block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,661,662,1 block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,492,493,1 -block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,271,272,1 block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,587,588,0 block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,455,456,0 block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,214,215,0 @@ -625,7 +662,6 @@ block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,459,460,0 block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,219,220,0 block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,561,562,1 block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,182,183,0 -block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,184,185,0 block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,645,646,1 block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,461,462,0 block_hint,StoreFastElementIC_GrowNoTransitionHandleCOW,36,37,1 @@ -723,16 +759,17 @@ block_hint,ElementsTransitionAndStore_GrowNoTransitionHandleCOW,1059,1060,1 block_hint,ElementsTransitionAndStore_GrowNoTransitionHandleCOW,703,704,0 block_hint,ElementsTransitionAndStore_GrowNoTransitionHandleCOW,62,63,1 block_hint,ElementsTransitionAndStore_GrowNoTransitionHandleCOW,95,96,0 -block_hint,KeyedHasIC_PolymorphicName,61,62,1 -block_hint,KeyedHasIC_PolymorphicName,32,33,1 -block_hint,KeyedHasIC_PolymorphicName,28,29,0 +block_hint,KeyedHasIC_PolymorphicName,69,70,1 +block_hint,KeyedHasIC_PolymorphicName,28,29,1 +block_hint,KeyedHasIC_PolymorphicName,24,25,0 +block_hint,KeyedHasIC_PolymorphicName,26,27,0 +block_hint,KeyedHasIC_PolymorphicName,55,56,1 +block_hint,KeyedHasIC_PolymorphicName,89,90,1 +block_hint,KeyedHasIC_PolymorphicName,93,94,1 block_hint,KeyedHasIC_PolymorphicName,30,31,0 -block_hint,KeyedHasIC_PolymorphicName,47,48,1 -block_hint,KeyedHasIC_PolymorphicName,83,84,1 -block_hint,KeyedHasIC_PolymorphicName,63,64,1 -block_hint,KeyedHasIC_PolymorphicName,14,15,0 +block_hint,KeyedHasIC_PolymorphicName,78,79,1 +block_hint,KeyedHasIC_PolymorphicName,14,15,1 block_hint,KeyedHasIC_PolymorphicName,16,17,1 -block_hint,KeyedHasIC_PolymorphicName,18,19,1 block_hint,EnqueueMicrotask,4,5,0 block_hint,EnqueueMicrotask,2,3,0 block_hint,RunMicrotasks,18,19,0 @@ -742,111 +779,118 @@ block_hint,RunMicrotasks,36,37,1 block_hint,RunMicrotasks,85,86,0 block_hint,RunMicrotasks,67,68,0 block_hint,RunMicrotasks,38,39,1 -block_hint,HasProperty,133,134,1 -block_hint,HasProperty,135,136,1 -block_hint,HasProperty,253,254,0 -block_hint,HasProperty,207,208,1 -block_hint,HasProperty,245,246,0 -block_hint,HasProperty,93,94,0 -block_hint,HasProperty,228,229,1 -block_hint,HasProperty,119,120,1 block_hint,HasProperty,137,138,1 -block_hint,HasProperty,195,196,0 -block_hint,HasProperty,197,198,0 +block_hint,HasProperty,139,140,1 +block_hint,HasProperty,263,264,0 +block_hint,HasProperty,211,212,1 +block_hint,HasProperty,254,255,0 block_hint,HasProperty,97,98,0 -block_hint,HasProperty,95,96,0 -block_hint,HasProperty,241,242,0 -block_hint,HasProperty,232,233,0 -block_hint,HasProperty,199,200,1 -block_hint,HasProperty,249,250,0 -block_hint,HasProperty,201,202,1 -block_hint,HasProperty,45,46,1 -block_hint,HasProperty,63,64,0 -block_hint,HasProperty,47,48,0 -block_hint,HasProperty,103,104,1 -block_hint,HasProperty,51,52,0 -block_hint,HasProperty,258,259,0 -block_hint,HasProperty,222,223,0 -block_hint,HasProperty,39,40,0 -block_hint,DeleteProperty,35,36,1 -block_hint,DeleteProperty,60,61,0 -block_hint,DeleteProperty,37,38,0 -block_hint,DeleteProperty,64,65,1 -block_hint,DeleteProperty,86,87,0 -block_hint,DeleteProperty,69,70,0 -block_hint,DeleteProperty,62,63,1 -block_hint,DeleteProperty,54,55,1 -block_hint,DeleteProperty,39,40,1 -block_hint,DeleteProperty,82,83,0 -block_hint,DeleteProperty,84,85,0 +block_hint,HasProperty,234,235,1 +block_hint,HasProperty,123,124,1 +block_hint,HasProperty,141,142,1 +block_hint,HasProperty,199,200,0 +block_hint,HasProperty,201,202,0 +block_hint,HasProperty,101,102,0 +block_hint,HasProperty,99,100,0 +block_hint,HasProperty,250,251,0 +block_hint,HasProperty,270,271,0 +block_hint,HasProperty,259,260,1 +block_hint,HasProperty,106,107,0 +block_hint,HasProperty,277,278,0 +block_hint,HasProperty,282,283,0 +block_hint,HasProperty,268,269,0 +block_hint,HasProperty,203,204,1 +block_hint,HasProperty,42,43,1 +block_hint,HasProperty,65,66,0 +block_hint,HasProperty,44,45,0 +block_hint,HasProperty,239,240,1 +block_hint,HasProperty,48,49,0 +block_hint,HasProperty,272,273,0 +block_hint,HasProperty,228,229,0 +block_hint,HasProperty,38,39,0 +block_hint,DeleteProperty,38,39,1 +block_hint,DeleteProperty,62,63,0 +block_hint,DeleteProperty,40,41,0 +block_hint,DeleteProperty,66,67,1 +block_hint,DeleteProperty,91,92,0 block_hint,DeleteProperty,73,74,0 -block_hint,DeleteProperty,71,72,0 -block_hint,DeleteProperty,44,45,0 -block_hint,DeleteProperty,46,47,0 +block_hint,DeleteProperty,64,65,1 +block_hint,DeleteProperty,56,57,1 +block_hint,DeleteProperty,42,43,1 +block_hint,DeleteProperty,83,84,0 +block_hint,DeleteProperty,85,86,0 +block_hint,DeleteProperty,77,78,0 block_hint,DeleteProperty,75,76,0 -block_hint,DeleteProperty,50,51,1 -block_hint,DeleteProperty,52,53,0 -block_hint,DeleteProperty,8,9,1 -block_hint,DeleteProperty,10,11,1 -block_hint,DeleteProperty,12,13,1 -block_hint,DeleteProperty,14,15,1 -block_hint,DeleteProperty,16,17,1 -block_hint,SetDataProperties,132,133,1 -block_hint,SetDataProperties,253,254,1 -block_hint,SetDataProperties,251,252,1 -block_hint,SetDataProperties,140,141,0 -block_hint,SetDataProperties,298,299,0 -block_hint,SetDataProperties,142,143,0 -block_hint,SetDataProperties,60,61,0 -block_hint,SetDataProperties,317,318,0 -block_hint,SetDataProperties,257,258,0 -block_hint,SetDataProperties,322,323,1 -block_hint,SetDataProperties,263,264,0 -block_hint,SetDataProperties,681,682,0 -block_hint,SetDataProperties,703,704,1 -block_hint,SetDataProperties,679,680,0 -block_hint,SetDataProperties,677,678,0 +block_hint,DeleteProperty,47,48,0 +block_hint,DeleteProperty,49,50,0 +block_hint,DeleteProperty,87,88,0 +block_hint,DeleteProperty,71,72,1 +block_hint,DeleteProperty,20,21,0 +block_hint,DeleteProperty,54,55,0 +block_hint,DeleteProperty,7,8,1 +block_hint,DeleteProperty,9,10,1 +block_hint,DeleteProperty,11,12,1 +block_hint,DeleteProperty,13,14,1 +block_hint,DeleteProperty,15,16,1 +block_hint,SetDataProperties,136,137,1 +block_hint,SetDataProperties,263,264,1 +block_hint,SetDataProperties,261,262,1 +block_hint,SetDataProperties,144,145,0 +block_hint,SetDataProperties,316,317,0 +block_hint,SetDataProperties,146,147,0 +block_hint,SetDataProperties,59,60,0 +block_hint,SetDataProperties,341,342,0 +block_hint,SetDataProperties,267,268,0 +block_hint,SetDataProperties,385,386,1 +block_hint,SetDataProperties,277,278,0 +block_hint,SetDataProperties,752,753,0 +block_hint,SetDataProperties,762,763,1 +block_hint,SetDataProperties,750,751,0 +block_hint,SetDataProperties,748,749,0 +block_hint,SetDataProperties,659,660,0 +block_hint,SetDataProperties,451,452,1 +block_hint,SetDataProperties,221,222,1 +block_hint,SetDataProperties,87,88,0 +block_hint,SetDataProperties,223,224,0 +block_hint,SetDataProperties,513,514,0 +block_hint,SetDataProperties,515,516,0 +block_hint,SetDataProperties,519,520,1 +block_hint,SetDataProperties,449,450,0 +block_hint,SetDataProperties,329,330,1 +block_hint,SetDataProperties,326,327,0 +block_hint,SetDataProperties,158,159,0 +block_hint,SetDataProperties,399,400,0 +block_hint,SetDataProperties,447,448,0 +block_hint,SetDataProperties,352,353,0 +block_hint,SetDataProperties,226,227,1 +block_hint,SetDataProperties,93,94,1 +block_hint,SetDataProperties,521,522,0 +block_hint,SetDataProperties,95,96,0 +block_hint,SetDataProperties,97,98,0 +block_hint,SetDataProperties,617,618,0 +block_hint,SetDataProperties,523,524,1 +block_hint,SetDataProperties,525,526,0 +block_hint,SetDataProperties,527,528,1 block_hint,SetDataProperties,529,530,0 -block_hint,SetDataProperties,327,328,1 -block_hint,SetDataProperties,84,85,1 -block_hint,SetDataProperties,90,91,0 -block_hint,SetDataProperties,217,218,0 -block_hint,SetDataProperties,457,458,0 -block_hint,SetDataProperties,459,460,0 -block_hint,SetDataProperties,463,464,1 -block_hint,SetDataProperties,398,399,0 -block_hint,SetDataProperties,308,309,1 -block_hint,SetDataProperties,151,152,0 -block_hint,SetDataProperties,306,307,0 -block_hint,SetDataProperties,219,220,1 -block_hint,SetDataProperties,96,97,1 -block_hint,SetDataProperties,465,466,0 -block_hint,SetDataProperties,98,99,0 -block_hint,SetDataProperties,100,101,0 -block_hint,SetDataProperties,566,567,0 -block_hint,SetDataProperties,467,468,1 -block_hint,SetDataProperties,469,470,0 -block_hint,SetDataProperties,471,472,1 -block_hint,SetDataProperties,473,474,0 -block_hint,SetDataProperties,643,644,0 -block_hint,SetDataProperties,475,476,1 -block_hint,SetDataProperties,522,523,0 -block_hint,SetDataProperties,645,646,0 -block_hint,SetDataProperties,569,570,1 -block_hint,SetDataProperties,483,484,1 -block_hint,SetDataProperties,485,486,0 -block_hint,SetDataProperties,487,488,0 -block_hint,SetDataProperties,489,490,1 -block_hint,SetDataProperties,479,480,0 -block_hint,SetDataProperties,627,628,0 -block_hint,SetDataProperties,499,500,1 -block_hint,SetDataProperties,275,276,1 -block_hint,SetDataProperties,102,103,0 -block_hint,SetDataProperties,390,391,0 -block_hint,SetDataProperties,234,235,0 -block_hint,SetDataProperties,265,266,1 -block_hint,SetDataProperties,198,199,0 -block_hint,SetDataProperties,62,63,0 +block_hint,SetDataProperties,673,674,0 +block_hint,SetDataProperties,531,532,1 +block_hint,SetDataProperties,577,578,0 +block_hint,SetDataProperties,675,676,0 +block_hint,SetDataProperties,620,621,1 +block_hint,SetDataProperties,539,540,1 +block_hint,SetDataProperties,541,542,0 +block_hint,SetDataProperties,543,544,0 +block_hint,SetDataProperties,545,546,1 +block_hint,SetDataProperties,535,536,0 +block_hint,SetDataProperties,657,658,0 +block_hint,SetDataProperties,555,556,1 +block_hint,SetDataProperties,292,293,1 +block_hint,SetDataProperties,99,100,0 +block_hint,SetDataProperties,437,438,0 +block_hint,SetDataProperties,241,242,0 +block_hint,SetDataProperties,279,280,1 +block_hint,SetDataProperties,204,205,0 +block_hint,SetDataProperties,61,62,0 block_hint,ReturnReceiver,3,4,1 block_hint,ArrayConstructorImpl,40,41,0 block_hint,ArrayConstructorImpl,15,16,1 @@ -1046,6 +1090,7 @@ block_hint,ExtractFastJSArray,10,11,0 block_hint,ExtractFastJSArray,39,40,1 block_hint,ExtractFastJSArray,35,36,1 block_hint,ExtractFastJSArray,20,21,1 +block_hint,ExtractFastJSArray,8,9,0 block_hint,ExtractFastJSArray,12,13,0 block_hint,ExtractFastJSArray,14,15,1 block_hint,ExtractFastJSArray,37,38,1 @@ -1174,452 +1219,476 @@ block_hint,ResumeGeneratorBaseline,6,7,0 block_hint,GlobalIsFinite,9,10,1 block_hint,GlobalIsNaN,9,10,1 block_hint,GlobalIsNaN,11,12,1 -block_hint,LoadIC,360,361,1 -block_hint,LoadIC,135,136,0 -block_hint,LoadIC,61,62,0 -block_hint,LoadIC,227,228,0 -block_hint,LoadIC,339,340,1 -block_hint,LoadIC,229,230,0 -block_hint,LoadIC,374,375,1 -block_hint,LoadIC,371,372,1 -block_hint,LoadIC,288,289,1 -block_hint,LoadIC,102,103,1 -block_hint,LoadIC,274,275,0 -block_hint,LoadIC,313,314,0 -block_hint,LoadIC,137,138,1 +block_hint,LoadIC,370,371,1 block_hint,LoadIC,139,140,0 -block_hint,LoadIC,302,303,1 -block_hint,LoadIC,258,259,1 +block_hint,LoadIC,59,60,0 +block_hint,LoadIC,233,234,0 +block_hint,LoadIC,345,346,1 +block_hint,LoadIC,235,236,0 +block_hint,LoadIC,387,388,1 +block_hint,LoadIC,384,385,0 +block_hint,LoadIC,381,382,1 +block_hint,LoadIC,292,293,1 +block_hint,LoadIC,100,101,1 +block_hint,LoadIC,278,279,0 +block_hint,LoadIC,319,320,0 +block_hint,LoadIC,141,142,1 +block_hint,LoadIC,143,144,0 +block_hint,LoadIC,308,309,1 +block_hint,LoadIC,358,359,1 +block_hint,LoadIC,102,103,0 +block_hint,LoadIC,19,20,1 +block_hint,LoadIC,62,63,0 block_hint,LoadIC,21,22,1 -block_hint,LoadIC,64,65,0 -block_hint,LoadIC,23,24,1 -block_hint,LoadIC,169,170,0 -block_hint,LoadIC,354,355,0 -block_hint,LoadIC,356,357,0 -block_hint,LoadIC,311,312,0 -block_hint,LoadIC,125,126,0 -block_hint,LoadIC,51,52,1 -block_hint,LoadIC,203,204,0 -block_hint,LoadIC,86,87,0 -block_hint,LoadIC,46,47,0 -block_hint,LoadIC,260,261,1 -block_hint,LoadIC,179,180,0 -block_hint,LoadIC,44,45,1 -block_hint,LoadIC,78,79,0 -block_hint,LoadIC,264,265,0 -block_hint,LoadIC,304,305,1 -block_hint,LoadIC,27,28,0 +block_hint,LoadIC,173,174,0 +block_hint,LoadIC,364,365,0 +block_hint,LoadIC,366,367,0 +block_hint,LoadIC,317,318,0 +block_hint,LoadIC,129,130,0 +block_hint,LoadIC,49,50,1 +block_hint,LoadIC,209,210,0 +block_hint,LoadIC,84,85,0 +block_hint,LoadIC,44,45,0 +block_hint,LoadIC,360,361,1 +block_hint,LoadIC,114,115,0 +block_hint,LoadIC,183,184,0 +block_hint,LoadIC,42,43,1 +block_hint,LoadIC,76,77,0 +block_hint,LoadIC,268,269,0 +block_hint,LoadIC,310,311,1 +block_hint,LoadIC,25,26,0 +block_hint,LoadIC,179,180,1 +block_hint,LoadIC,181,182,1 block_hint,LoadIC,175,176,1 block_hint,LoadIC,177,178,1 -block_hint,LoadIC,171,172,1 -block_hint,LoadIC,173,174,1 -block_hint,LoadIC,129,130,1 -block_hint,LoadIC,131,132,0 -block_hint,LoadIC_Megamorphic,342,343,1 -block_hint,LoadIC_Megamorphic,339,340,1 -block_hint,LoadIC_Megamorphic,253,254,1 -block_hint,LoadIC_Megamorphic,255,256,1 -block_hint,LoadIC_Megamorphic,251,252,0 -block_hint,LoadIC_Megamorphic,58,59,0 -block_hint,LoadIC_Megamorphic,288,289,0 -block_hint,LoadIC_Megamorphic,126,127,1 -block_hint,LoadIC_Megamorphic,274,275,1 -block_hint,LoadIC_Megamorphic,128,129,0 -block_hint,LoadIC_Megamorphic,276,277,1 -block_hint,LoadIC_Megamorphic,235,236,1 -block_hint,LoadIC_Megamorphic,97,98,0 -block_hint,LoadIC_Megamorphic,22,23,1 -block_hint,LoadIC_Megamorphic,158,159,0 -block_hint,LoadIC_Megamorphic,281,282,0 -block_hint,LoadIC_Megamorphic,245,246,1 -block_hint,LoadIC_Megamorphic,324,325,0 -block_hint,LoadIC_Megamorphic,326,327,0 -block_hint,LoadIC_Megamorphic,285,286,0 -block_hint,LoadIC_Megamorphic,118,119,0 -block_hint,LoadIC_Megamorphic,50,51,1 -block_hint,LoadIC_Megamorphic,45,46,0 -block_hint,LoadIC_Megamorphic,241,242,0 -block_hint,LoadIC_Megamorphic,278,279,1 -block_hint,LoadIC_Megamorphic,26,27,0 +block_hint,LoadIC,133,134,1 +block_hint,LoadIC,135,136,0 +block_hint,LoadIC_Megamorphic,355,356,1 +block_hint,LoadIC_Megamorphic,352,353,0 +block_hint,LoadIC_Megamorphic,349,350,1 +block_hint,LoadIC_Megamorphic,257,258,1 +block_hint,LoadIC_Megamorphic,259,260,1 +block_hint,LoadIC_Megamorphic,255,256,0 +block_hint,LoadIC_Megamorphic,56,57,0 +block_hint,LoadIC_Megamorphic,294,295,0 +block_hint,LoadIC_Megamorphic,130,131,1 +block_hint,LoadIC_Megamorphic,280,281,1 +block_hint,LoadIC_Megamorphic,132,133,0 +block_hint,LoadIC_Megamorphic,282,283,1 +block_hint,LoadIC_Megamorphic,328,329,1 +block_hint,LoadIC_Megamorphic,95,96,0 +block_hint,LoadIC_Megamorphic,20,21,1 +block_hint,LoadIC_Megamorphic,162,163,0 +block_hint,LoadIC_Megamorphic,287,288,0 +block_hint,LoadIC_Megamorphic,249,250,1 +block_hint,LoadIC_Megamorphic,334,335,0 +block_hint,LoadIC_Megamorphic,336,337,0 +block_hint,LoadIC_Megamorphic,291,292,0 +block_hint,LoadIC_Megamorphic,122,123,0 +block_hint,LoadIC_Megamorphic,48,49,1 +block_hint,LoadIC_Megamorphic,43,44,0 +block_hint,LoadIC_Megamorphic,245,246,0 +block_hint,LoadIC_Megamorphic,284,285,1 block_hint,LoadIC_Megamorphic,24,25,0 -block_hint,LoadIC_Megamorphic,160,161,1 -block_hint,LoadIC_Megamorphic,162,163,1 -block_hint,LoadIC_Megamorphic,122,123,1 -block_hint,LoadIC_Noninlined,356,357,1 -block_hint,LoadIC_Noninlined,128,129,0 -block_hint,LoadIC_Noninlined,359,360,1 -block_hint,LoadIC_Noninlined,354,355,1 -block_hint,LoadIC_Noninlined,263,264,0 -block_hint,LoadIC_Noninlined,58,59,0 -block_hint,LoadIC_Noninlined,302,303,0 -block_hint,LoadIC_Noninlined,138,139,1 -block_hint,LoadIC_Noninlined,286,287,1 -block_hint,LoadIC_Noninlined,22,23,1 -block_hint,LoadIC_Noninlined,170,171,0 -block_hint,LoadIC_Noninlined,39,40,1 -block_hint,LoadIC_Noninlined,253,254,0 -block_hint,LoadIC_Noninlined,290,291,1 -block_hint,LoadIC_Noninlined,26,27,0 +block_hint,LoadIC_Megamorphic,22,23,0 +block_hint,LoadIC_Megamorphic,164,165,1 +block_hint,LoadIC_Megamorphic,166,167,1 +block_hint,LoadIC_Megamorphic,126,127,1 +block_hint,LoadIC_Noninlined,366,367,1 +block_hint,LoadIC_Noninlined,132,133,0 +block_hint,LoadIC_Noninlined,372,373,1 +block_hint,LoadIC_Noninlined,369,370,0 +block_hint,LoadIC_Noninlined,364,365,1 +block_hint,LoadIC_Noninlined,267,268,0 +block_hint,LoadIC_Noninlined,56,57,0 +block_hint,LoadIC_Noninlined,308,309,0 +block_hint,LoadIC_Noninlined,142,143,1 +block_hint,LoadIC_Noninlined,292,293,1 +block_hint,LoadIC_Noninlined,20,21,1 +block_hint,LoadIC_Noninlined,174,175,0 +block_hint,LoadIC_Noninlined,37,38,1 +block_hint,LoadIC_Noninlined,257,258,0 +block_hint,LoadIC_Noninlined,296,297,1 block_hint,LoadIC_Noninlined,24,25,0 +block_hint,LoadIC_Noninlined,22,23,0 block_hint,LoadICTrampoline,3,4,1 block_hint,LoadICTrampoline_Megamorphic,3,4,1 -block_hint,LoadSuperIC,508,509,0 -block_hint,LoadSuperIC,245,246,0 -block_hint,LoadSuperIC,544,545,1 +block_hint,LoadSuperIC,528,529,0 +block_hint,LoadSuperIC,253,254,0 +block_hint,LoadSuperIC,564,565,1 +block_hint,LoadSuperIC,440,441,0 +block_hint,LoadSuperIC,75,76,0 +block_hint,LoadSuperIC,540,541,0 +block_hint,LoadSuperIC,255,256,1 +block_hint,LoadSuperIC,515,516,1 +block_hint,LoadSuperIC,41,42,1 +block_hint,LoadSuperIC,550,551,0 +block_hint,LoadSuperIC,287,288,0 +block_hint,LoadSuperIC,60,61,1 +block_hint,LoadSuperIC,429,430,0 block_hint,LoadSuperIC,427,428,0 -block_hint,LoadSuperIC,78,79,0 -block_hint,LoadSuperIC,520,521,0 -block_hint,LoadSuperIC,247,248,1 -block_hint,LoadSuperIC,497,498,1 -block_hint,LoadSuperIC,44,45,1 -block_hint,LoadSuperIC,530,531,0 -block_hint,LoadSuperIC,279,280,0 -block_hint,LoadSuperIC,63,64,1 -block_hint,LoadSuperIC,416,417,0 -block_hint,LoadSuperIC,414,415,0 -block_hint,LoadSuperIC,501,502,1 -block_hint,LoadSuperIC,48,49,0 -block_hint,KeyedLoadIC,619,620,1 -block_hint,KeyedLoadIC,253,254,0 -block_hint,KeyedLoadIC,245,246,0 -block_hint,KeyedLoadIC,379,380,0 -block_hint,KeyedLoadIC,490,491,1 -block_hint,KeyedLoadIC,661,662,0 -block_hint,KeyedLoadIC,613,614,0 -block_hint,KeyedLoadIC,568,569,1 -block_hint,KeyedLoadIC,385,386,1 -block_hint,KeyedLoadIC,383,384,1 -block_hint,KeyedLoadIC,647,648,0 -block_hint,KeyedLoadIC,649,650,0 -block_hint,KeyedLoadIC,617,618,0 -block_hint,KeyedLoadIC,570,571,1 -block_hint,KeyedLoadIC,153,154,1 -block_hint,KeyedLoadIC,611,612,0 -block_hint,KeyedLoadIC,471,472,0 -block_hint,KeyedLoadIC,103,104,1 +block_hint,LoadSuperIC,519,520,1 +block_hint,LoadSuperIC,45,46,0 +block_hint,LoadSuperIC,671,672,0 +block_hint,KeyedLoadIC,629,630,1 +block_hint,KeyedLoadIC,257,258,0 +block_hint,KeyedLoadIC,249,250,0 +block_hint,KeyedLoadIC,385,386,0 +block_hint,KeyedLoadIC,494,495,1 +block_hint,KeyedLoadIC,671,672,0 +block_hint,KeyedLoadIC,623,624,0 +block_hint,KeyedLoadIC,574,575,1 +block_hint,KeyedLoadIC,391,392,1 +block_hint,KeyedLoadIC,389,390,1 block_hint,KeyedLoadIC,657,658,0 block_hint,KeyedLoadIC,659,660,0 -block_hint,KeyedLoadIC,625,626,1 -block_hint,KeyedLoadIC,627,628,1 -block_hint,KeyedLoadIC,289,290,1 -block_hint,KeyedLoadIC,291,292,0 -block_hint,KeyedLoadIC,653,654,1 -block_hint,KeyedLoadIC,511,512,1 -block_hint,KeyedLoadIC,609,610,0 -block_hint,KeyedLoadIC,596,597,0 -block_hint,KeyedLoadIC,548,549,1 -block_hint,KeyedLoadIC,311,312,1 -block_hint,KeyedLoadIC,65,66,0 -block_hint,KeyedLoadIC,301,302,0 -block_hint,KeyedLoadIC,515,516,1 -block_hint,KeyedLoadIC,303,304,1 -block_hint,KeyedLoadIC,218,219,0 -block_hint,KeyedLoadIC,175,176,1 -block_hint,KeyedLoadIC,550,551,0 -block_hint,KeyedLoadIC,446,447,1 +block_hint,KeyedLoadIC,627,628,0 +block_hint,KeyedLoadIC,576,577,1 +block_hint,KeyedLoadIC,151,152,1 +block_hint,KeyedLoadIC,621,622,0 +block_hint,KeyedLoadIC,475,476,0 +block_hint,KeyedLoadIC,101,102,1 +block_hint,KeyedLoadIC,667,668,0 +block_hint,KeyedLoadIC,669,670,0 +block_hint,KeyedLoadIC,635,636,1 +block_hint,KeyedLoadIC,637,638,1 +block_hint,KeyedLoadIC,293,294,1 +block_hint,KeyedLoadIC,295,296,0 +block_hint,KeyedLoadIC,663,664,1 +block_hint,KeyedLoadIC,517,518,1 +block_hint,KeyedLoadIC,619,620,0 +block_hint,KeyedLoadIC,606,607,0 +block_hint,KeyedLoadIC,554,555,1 +block_hint,KeyedLoadIC,315,316,1 +block_hint,KeyedLoadIC,63,64,0 +block_hint,KeyedLoadIC,305,306,0 +block_hint,KeyedLoadIC,521,522,1 +block_hint,KeyedLoadIC,307,308,1 +block_hint,KeyedLoadIC,221,222,0 +block_hint,KeyedLoadIC,177,178,1 +block_hint,KeyedLoadIC,556,557,0 +block_hint,KeyedLoadIC,450,451,1 +block_hint,KeyedLoadIC,113,114,0 block_hint,KeyedLoadIC,115,116,0 -block_hint,KeyedLoadIC,117,118,0 -block_hint,KeyedLoadIC,400,401,1 -block_hint,KeyedLoadIC,599,600,1 -block_hint,KeyedLoadIC,241,242,1 -block_hint,KeyedLoadIC,552,553,0 -block_hint,KeyedLoadIC,504,505,0 -block_hint,KeyedLoadIC,432,433,1 -block_hint,KeyedLoadIC,663,664,0 -block_hint,KeyedLoadIC,121,122,1 -block_hint,KeyedLoadIC,319,320,1 -block_hint,KeyedLoadIC,321,322,1 -block_hint,KeyedLoadIC,67,68,0 -block_hint,KeyedLoadIC_Megamorphic,482,483,1 -block_hint,KeyedLoadIC_Megamorphic,484,485,0 -block_hint,KeyedLoadIC_Megamorphic,1139,1140,0 -block_hint,KeyedLoadIC_Megamorphic,1141,1142,1 -block_hint,KeyedLoadIC_Megamorphic,1117,1118,1 -block_hint,KeyedLoadIC_Megamorphic,1078,1079,0 -block_hint,KeyedLoadIC_Megamorphic,1133,1134,1 -block_hint,KeyedLoadIC_Megamorphic,1143,1144,1 -block_hint,KeyedLoadIC_Megamorphic,1119,1120,1 -block_hint,KeyedLoadIC_Megamorphic,1135,1136,0 -block_hint,KeyedLoadIC_Megamorphic,1053,1054,0 -block_hint,KeyedLoadIC_Megamorphic,893,894,1 -block_hint,KeyedLoadIC_Megamorphic,891,892,1 -block_hint,KeyedLoadIC_Megamorphic,514,515,1 +block_hint,KeyedLoadIC,406,407,1 +block_hint,KeyedLoadIC,609,610,1 +block_hint,KeyedLoadIC,245,246,1 +block_hint,KeyedLoadIC,558,559,0 +block_hint,KeyedLoadIC,508,509,0 +block_hint,KeyedLoadIC,436,437,1 +block_hint,KeyedLoadIC,673,674,0 +block_hint,KeyedLoadIC,119,120,1 +block_hint,KeyedLoadIC,323,324,1 +block_hint,KeyedLoadIC,325,326,1 +block_hint,KeyedLoadIC,65,66,0 +block_hint,KeyedLoadIC_Megamorphic,496,497,1 +block_hint,KeyedLoadIC_Megamorphic,498,499,0 +block_hint,KeyedLoadIC_Megamorphic,1218,1219,0 +block_hint,KeyedLoadIC_Megamorphic,1220,1221,1 +block_hint,KeyedLoadIC_Megamorphic,1192,1193,1 +block_hint,KeyedLoadIC_Megamorphic,1145,1146,0 +block_hint,KeyedLoadIC_Megamorphic,1212,1213,1 +block_hint,KeyedLoadIC_Megamorphic,1222,1223,1 +block_hint,KeyedLoadIC_Megamorphic,1194,1195,1 +block_hint,KeyedLoadIC_Megamorphic,1214,1215,0 block_hint,KeyedLoadIC_Megamorphic,1111,1112,0 -block_hint,KeyedLoadIC_Megamorphic,1113,1114,0 -block_hint,KeyedLoadIC_Megamorphic,1088,1089,0 -block_hint,KeyedLoadIC_Megamorphic,1086,1087,1 -block_hint,KeyedLoadIC_Megamorphic,1107,1108,0 -block_hint,KeyedLoadIC_Megamorphic,1082,1083,0 -block_hint,KeyedLoadIC_Megamorphic,895,896,1 -block_hint,KeyedLoadIC_Megamorphic,905,906,0 -block_hint,KeyedLoadIC_Megamorphic,1034,1035,0 -block_hint,KeyedLoadIC_Megamorphic,239,240,1 -block_hint,KeyedLoadIC_Megamorphic,1036,1037,0 -block_hint,KeyedLoadIC_Megamorphic,241,242,0 -block_hint,KeyedLoadIC_Megamorphic,1022,1023,0 -block_hint,KeyedLoadIC_Megamorphic,1129,1130,1 -block_hint,KeyedLoadIC_Megamorphic,1020,1021,0 -block_hint,KeyedLoadIC_Megamorphic,1018,1019,0 -block_hint,KeyedLoadIC_Megamorphic,627,628,1 -block_hint,KeyedLoadIC_Megamorphic,125,126,1 -block_hint,KeyedLoadIC_Megamorphic,199,200,0 -block_hint,KeyedLoadIC_Megamorphic,629,630,0 -block_hint,KeyedLoadIC_Megamorphic,544,545,0 -block_hint,KeyedLoadIC_Megamorphic,995,996,0 -block_hint,KeyedLoadIC_Megamorphic,948,949,0 -block_hint,KeyedLoadIC_Megamorphic,633,634,1 -block_hint,KeyedLoadIC_Megamorphic,635,636,0 -block_hint,KeyedLoadIC_Megamorphic,637,638,1 -block_hint,KeyedLoadIC_Megamorphic,256,257,1 -block_hint,KeyedLoadIC_Megamorphic,639,640,0 -block_hint,KeyedLoadIC_Megamorphic,258,259,0 -block_hint,KeyedLoadIC_Megamorphic,817,818,0 +block_hint,KeyedLoadIC_Megamorphic,927,928,1 +block_hint,KeyedLoadIC_Megamorphic,925,926,1 +block_hint,KeyedLoadIC_Megamorphic,528,529,1 +block_hint,KeyedLoadIC_Megamorphic,1186,1187,0 +block_hint,KeyedLoadIC_Megamorphic,1188,1189,0 +block_hint,KeyedLoadIC_Megamorphic,1155,1156,0 +block_hint,KeyedLoadIC_Megamorphic,1153,1154,1 +block_hint,KeyedLoadIC_Megamorphic,1182,1183,0 +block_hint,KeyedLoadIC_Megamorphic,1149,1150,0 +block_hint,KeyedLoadIC_Megamorphic,929,930,1 +block_hint,KeyedLoadIC_Megamorphic,939,940,0 +block_hint,KeyedLoadIC_Megamorphic,640,641,0 +block_hint,KeyedLoadIC_Megamorphic,1172,1173,0 +block_hint,KeyedLoadIC_Megamorphic,1085,1086,0 +block_hint,KeyedLoadIC_Megamorphic,1131,1132,0 +block_hint,KeyedLoadIC_Megamorphic,233,234,1 +block_hint,KeyedLoadIC_Megamorphic,1087,1088,0 +block_hint,KeyedLoadIC_Megamorphic,235,236,0 +block_hint,KeyedLoadIC_Megamorphic,1070,1071,0 +block_hint,KeyedLoadIC_Megamorphic,1204,1205,1 +block_hint,KeyedLoadIC_Megamorphic,1068,1069,0 +block_hint,KeyedLoadIC_Megamorphic,1066,1067,0 +block_hint,KeyedLoadIC_Megamorphic,980,981,1 +block_hint,KeyedLoadIC_Megamorphic,239,240,0 +block_hint,KeyedLoadIC_Megamorphic,121,122,1 +block_hint,KeyedLoadIC_Megamorphic,195,196,0 +block_hint,KeyedLoadIC_Megamorphic,644,645,0 +block_hint,KeyedLoadIC_Megamorphic,558,559,0 +block_hint,KeyedLoadIC_Megamorphic,1043,1044,0 +block_hint,KeyedLoadIC_Megamorphic,983,984,0 block_hint,KeyedLoadIC_Megamorphic,648,649,1 -block_hint,KeyedLoadIC_Megamorphic,641,642,0 -block_hint,KeyedLoadIC_Megamorphic,1028,1029,0 -block_hint,KeyedLoadIC_Megamorphic,1102,1103,0 -block_hint,KeyedLoadIC_Megamorphic,1131,1132,1 +block_hint,KeyedLoadIC_Megamorphic,666,667,0 +block_hint,KeyedLoadIC_Megamorphic,1174,1175,0 +block_hint,KeyedLoadIC_Megamorphic,650,651,0 +block_hint,KeyedLoadIC_Megamorphic,1089,1090,0 +block_hint,KeyedLoadIC_Megamorphic,652,653,1 +block_hint,KeyedLoadIC_Megamorphic,250,251,1 +block_hint,KeyedLoadIC_Megamorphic,654,655,0 +block_hint,KeyedLoadIC_Megamorphic,252,253,0 +block_hint,KeyedLoadIC_Megamorphic,842,843,0 +block_hint,KeyedLoadIC_Megamorphic,987,988,1 +block_hint,KeyedLoadIC_Megamorphic,256,257,0 +block_hint,KeyedLoadIC_Megamorphic,656,657,0 +block_hint,KeyedLoadIC_Megamorphic,258,259,1 +block_hint,KeyedLoadIC_Megamorphic,1076,1077,0 +block_hint,KeyedLoadIC_Megamorphic,1169,1170,0 +block_hint,KeyedLoadIC_Megamorphic,1206,1207,1 +block_hint,KeyedLoadIC_Megamorphic,1074,1075,0 +block_hint,KeyedLoadIC_Megamorphic,123,124,1 +block_hint,KeyedLoadIC_Megamorphic,203,204,0 +block_hint,KeyedLoadIC_Megamorphic,923,924,0 +block_hint,KeyedLoadIC_Megamorphic,675,676,0 +block_hint,KeyedLoadIC_Megamorphic,1176,1177,0 +block_hint,KeyedLoadIC_Megamorphic,1208,1209,0 +block_hint,KeyedLoadIC_Megamorphic,1135,1136,0 +block_hint,KeyedLoadIC_Megamorphic,844,845,1 +block_hint,KeyedLoadIC_Megamorphic,268,269,1 +block_hint,KeyedLoadIC_Megamorphic,1200,1201,0 +block_hint,KeyedLoadIC_Megamorphic,270,271,0 +block_hint,KeyedLoadIC_Megamorphic,1056,1057,0 +block_hint,KeyedLoadIC_Megamorphic,1198,1199,1 +block_hint,KeyedLoadIC_Megamorphic,1054,1055,0 +block_hint,KeyedLoadIC_Megamorphic,1116,1117,1 +block_hint,KeyedLoadIC_Megamorphic,1107,1108,0 +block_hint,KeyedLoadIC_Megamorphic,1210,1211,0 +block_hint,KeyedLoadIC_Megamorphic,1101,1102,1 +block_hint,KeyedLoadIC_Megamorphic,740,741,1 +block_hint,KeyedLoadIC_Megamorphic,1017,1018,1 +block_hint,KeyedLoadIC_Megamorphic,736,737,0 +block_hint,KeyedLoadIC_Megamorphic,112,113,0 +block_hint,KeyedLoadIC_Megamorphic,877,878,0 +block_hint,KeyedLoadIC_Megamorphic,338,339,1 +block_hint,KeyedLoadIC_Megamorphic,863,864,1 +block_hint,KeyedLoadIC_Megamorphic,76,77,1 +block_hint,KeyedLoadIC_Megamorphic,368,369,0 +block_hint,KeyedLoadIC_Megamorphic,728,729,0 +block_hint,KeyedLoadIC_Megamorphic,93,94,1 +block_hint,KeyedLoadIC_Megamorphic,998,999,1 +block_hint,KeyedLoadIC_Megamorphic,294,295,0 +block_hint,KeyedLoadIC_Megamorphic,115,116,1 +block_hint,KeyedLoadIC_Megamorphic,179,180,0 +block_hint,KeyedLoadIC_Megamorphic,960,961,0 +block_hint,KeyedLoadIC_Megamorphic,817,818,1 +block_hint,KeyedLoadIC_Megamorphic,183,184,1 +block_hint,KeyedLoadIC_Megamorphic,681,682,0 +block_hint,KeyedLoadIC_Megamorphic,524,525,0 +block_hint,KeyedLoadIC_Megamorphic,1031,1032,0 +block_hint,KeyedLoadIC_Megamorphic,1001,1002,0 +block_hint,KeyedLoadIC_Megamorphic,685,686,1 +block_hint,KeyedLoadIC_Megamorphic,856,857,1 +block_hint,KeyedLoadIC_Megamorphic,1178,1179,0 +block_hint,KeyedLoadIC_Megamorphic,307,308,0 +block_hint,KeyedLoadIC_Megamorphic,858,859,0 +block_hint,KeyedLoadIC_Megamorphic,1062,1063,0 +block_hint,KeyedLoadIC_Megamorphic,187,188,0 +block_hint,KeyedLoadIC_Megamorphic,947,948,0 +block_hint,KeyedLoadIC_Megamorphic,1142,1143,0 +block_hint,KeyedLoadIC_Megamorphic,905,906,1 +block_hint,KeyedLoadIC_Megamorphic,127,128,0 +block_hint,KeyedLoadIC_Megamorphic,718,719,0 block_hint,KeyedLoadIC_Megamorphic,1026,1027,0 -block_hint,KeyedLoadIC_Megamorphic,127,128,1 -block_hint,KeyedLoadIC_Megamorphic,207,208,0 -block_hint,KeyedLoadIC_Megamorphic,889,890,0 -block_hint,KeyedLoadIC_Megamorphic,1038,1039,0 -block_hint,KeyedLoadIC_Megamorphic,821,822,1 -block_hint,KeyedLoadIC_Megamorphic,272,273,1 -block_hint,KeyedLoadIC_Megamorphic,1125,1126,0 -block_hint,KeyedLoadIC_Megamorphic,274,275,0 -block_hint,KeyedLoadIC_Megamorphic,1008,1009,0 -block_hint,KeyedLoadIC_Megamorphic,1123,1124,1 -block_hint,KeyedLoadIC_Megamorphic,1006,1007,0 -block_hint,KeyedLoadIC_Megamorphic,1058,1059,1 -block_hint,KeyedLoadIC_Megamorphic,1049,1050,0 -block_hint,KeyedLoadIC_Megamorphic,1043,1044,1 -block_hint,KeyedLoadIC_Megamorphic,718,719,1 -block_hint,KeyedLoadIC_Megamorphic,969,970,1 +block_hint,KeyedLoadIC_Megamorphic,562,563,1 +block_hint,KeyedLoadIC_Megamorphic,321,322,0 block_hint,KeyedLoadIC_Megamorphic,714,715,0 -block_hint,KeyedLoadIC_Megamorphic,116,117,0 -block_hint,KeyedLoadIC_Megamorphic,847,848,0 -block_hint,KeyedLoadIC_Megamorphic,332,333,1 -block_hint,KeyedLoadIC_Megamorphic,833,834,1 -block_hint,KeyedLoadIC_Megamorphic,80,81,1 -block_hint,KeyedLoadIC_Megamorphic,362,363,0 -block_hint,KeyedLoadIC_Megamorphic,706,707,0 -block_hint,KeyedLoadIC_Megamorphic,97,98,1 -block_hint,KeyedLoadIC_Megamorphic,661,662,1 -block_hint,KeyedLoadIC_Megamorphic,119,120,1 -block_hint,KeyedLoadIC_Megamorphic,183,184,0 -block_hint,KeyedLoadIC_Megamorphic,926,927,0 -block_hint,KeyedLoadIC_Megamorphic,795,796,1 -block_hint,KeyedLoadIC_Megamorphic,187,188,1 -block_hint,KeyedLoadIC_Megamorphic,663,664,0 -block_hint,KeyedLoadIC_Megamorphic,510,511,0 -block_hint,KeyedLoadIC_Megamorphic,983,984,0 -block_hint,KeyedLoadIC_Megamorphic,957,958,0 -block_hint,KeyedLoadIC_Megamorphic,667,668,1 -block_hint,KeyedLoadIC_Megamorphic,301,302,1 -block_hint,KeyedLoadIC_Megamorphic,307,308,0 -block_hint,KeyedLoadIC_Megamorphic,830,831,0 -block_hint,KeyedLoadIC_Megamorphic,1014,1015,0 -block_hint,KeyedLoadIC_Megamorphic,191,192,0 -block_hint,KeyedLoadIC_Megamorphic,913,914,0 -block_hint,KeyedLoadIC_Megamorphic,1075,1076,0 -block_hint,KeyedLoadIC_Megamorphic,871,872,1 -block_hint,KeyedLoadIC_Megamorphic,131,132,0 -block_hint,KeyedLoadIC_Megamorphic,696,697,0 -block_hint,KeyedLoadIC_Megamorphic,978,979,0 -block_hint,KeyedLoadIC_Megamorphic,548,549,1 -block_hint,KeyedLoadIC_Megamorphic,317,318,0 -block_hint,KeyedLoadIC_Megamorphic,692,693,0 -block_hint,KeyedLoadIC_Megamorphic,550,551,0 -block_hint,KeyedLoadIC_Megamorphic,133,134,1 -block_hint,KeyedLoadIC_Megamorphic,558,559,0 -block_hint,KeyedLoadIC_Megamorphic,875,876,1 -block_hint,KeyedLoadIC_Megamorphic,478,479,0 -block_hint,KeyedLoadIC_Megamorphic,911,912,0 -block_hint,KeyedLoadIC_Megamorphic,700,701,1 -block_hint,KeyedLoadIC_Megamorphic,566,567,0 -block_hint,KeyedLoadIC_Megamorphic,217,218,0 -block_hint,KeyedLoadIC_Megamorphic,476,477,1 -block_hint,KeyedLoadIC_Megamorphic,568,569,1 -block_hint,KeyedLoadIC_Megamorphic,149,150,1 +block_hint,KeyedLoadIC_Megamorphic,564,565,0 +block_hint,KeyedLoadIC_Megamorphic,129,130,1 +block_hint,KeyedLoadIC_Megamorphic,572,573,0 +block_hint,KeyedLoadIC_Megamorphic,909,910,1 +block_hint,KeyedLoadIC_Megamorphic,492,493,0 +block_hint,KeyedLoadIC_Megamorphic,945,946,0 +block_hint,KeyedLoadIC_Megamorphic,722,723,1 +block_hint,KeyedLoadIC_Megamorphic,580,581,0 +block_hint,KeyedLoadIC_Megamorphic,213,214,0 +block_hint,KeyedLoadIC_Megamorphic,490,491,1 +block_hint,KeyedLoadIC_Megamorphic,582,583,1 +block_hint,KeyedLoadIC_Megamorphic,145,146,1 block_hint,KeyedLoadICTrampoline,3,4,1 block_hint,KeyedLoadICTrampoline_Megamorphic,3,4,1 -block_hint,StoreGlobalIC,73,74,0 -block_hint,StoreGlobalIC,224,225,1 -block_hint,StoreGlobalIC,263,264,0 -block_hint,StoreGlobalIC,141,142,0 -block_hint,StoreGlobalIC,201,202,0 -block_hint,StoreGlobalIC,87,88,0 -block_hint,StoreGlobalIC,89,90,0 -block_hint,StoreGlobalIC,143,144,1 -block_hint,StoreGlobalIC,91,92,1 -block_hint,StoreGlobalIC,16,17,1 +block_hint,StoreGlobalIC,72,73,0 +block_hint,StoreGlobalIC,229,230,1 +block_hint,StoreGlobalIC,268,269,0 +block_hint,StoreGlobalIC,144,145,0 +block_hint,StoreGlobalIC,205,206,0 +block_hint,StoreGlobalIC,92,93,0 +block_hint,StoreGlobalIC,146,147,1 +block_hint,StoreGlobalIC,94,95,1 +block_hint,StoreGlobalIC,15,16,1 block_hint,StoreGlobalICTrampoline,3,4,1 -block_hint,StoreIC,328,329,1 -block_hint,StoreIC,140,141,0 -block_hint,StoreIC,71,72,0 -block_hint,StoreIC,202,203,0 -block_hint,StoreIC,204,205,1 -block_hint,StoreIC,374,375,1 -block_hint,StoreIC,357,358,1 -block_hint,StoreIC,234,235,1 -block_hint,StoreIC,236,237,1 -block_hint,StoreIC,76,77,1 -block_hint,StoreIC,244,245,1 -block_hint,StoreIC,104,105,0 -block_hint,StoreIC,37,38,0 -block_hint,StoreIC,210,211,1 -block_hint,StoreIC,142,143,0 +block_hint,StoreIC,338,339,1 block_hint,StoreIC,144,145,0 -block_hint,StoreIC,18,19,1 +block_hint,StoreIC,69,70,0 +block_hint,StoreIC,208,209,0 +block_hint,StoreIC,210,211,1 +block_hint,StoreIC,395,396,1 +block_hint,StoreIC,386,387,0 +block_hint,StoreIC,240,241,1 +block_hint,StoreIC,242,243,1 +block_hint,StoreIC,74,75,1 +block_hint,StoreIC,250,251,1 +block_hint,StoreIC,108,109,0 +block_hint,StoreIC,35,36,0 +block_hint,StoreIC,316,317,1 block_hint,StoreIC,92,93,0 -block_hint,StoreIC,20,21,0 -block_hint,StoreIC,347,348,0 -block_hint,StoreIC,154,155,1 +block_hint,StoreIC,146,147,0 +block_hint,StoreIC,94,95,1 +block_hint,StoreIC,150,151,0 +block_hint,StoreIC,16,17,1 +block_hint,StoreIC,96,97,0 +block_hint,StoreIC,18,19,0 +block_hint,StoreIC,359,360,0 +block_hint,StoreIC,160,161,1 +block_hint,StoreIC,162,163,1 +block_hint,StoreIC,327,328,1 +block_hint,StoreIC,164,165,0 +block_hint,StoreIC,105,106,0 +block_hint,StoreIC,103,104,1 +block_hint,StoreIC,320,321,1 +block_hint,StoreIC,23,24,0 +block_hint,StoreIC,152,153,1 +block_hint,StoreIC,287,288,0 +block_hint,StoreIC,154,155,0 block_hint,StoreIC,156,157,1 -block_hint,StoreIC,317,318,1 +block_hint,StoreIC,323,324,1 +block_hint,StoreIC,25,26,1 block_hint,StoreIC,158,159,0 -block_hint,StoreIC,101,102,0 -block_hint,StoreIC,99,100,1 -block_hint,StoreIC,310,311,1 -block_hint,StoreIC,25,26,0 -block_hint,StoreIC,146,147,1 -block_hint,StoreIC,281,282,0 -block_hint,StoreIC,148,149,0 -block_hint,StoreIC,150,151,1 -block_hint,StoreIC,313,314,1 -block_hint,StoreIC,27,28,1 -block_hint,StoreIC,152,153,0 -block_hint,StoreIC,315,316,1 -block_hint,StoreIC,33,34,0 -block_hint,StoreIC,31,32,1 -block_hint,StoreIC,223,224,1 -block_hint,StoreIC,65,66,0 -block_hint,StoreIC,285,286,0 -block_hint,StoreIC,160,161,1 -block_hint,StoreIC,287,288,1 -block_hint,StoreIC,206,207,1 -block_hint,StoreIC,240,241,0 -block_hint,StoreIC,170,171,0 +block_hint,StoreIC,325,326,1 +block_hint,StoreIC,31,32,0 +block_hint,StoreIC,29,30,1 +block_hint,StoreIC,227,228,1 +block_hint,StoreIC,63,64,0 +block_hint,StoreIC,291,292,0 +block_hint,StoreIC,166,167,1 +block_hint,StoreIC,293,294,1 +block_hint,StoreIC,312,313,1 +block_hint,StoreIC,76,77,0 +block_hint,StoreIC,246,247,0 +block_hint,StoreIC,176,177,0 +block_hint,StoreIC,43,44,1 +block_hint,StoreIC,112,113,0 +block_hint,StoreIC,178,179,0 +block_hint,StoreIC,271,272,0 +block_hint,StoreIC,125,126,1 +block_hint,StoreIC,371,372,0 +block_hint,StoreIC,267,268,1 block_hint,StoreIC,45,46,1 -block_hint,StoreIC,108,109,0 -block_hint,StoreIC,172,173,0 -block_hint,StoreIC,265,266,0 -block_hint,StoreIC,121,122,1 -block_hint,StoreIC,359,360,0 -block_hint,StoreIC,261,262,1 block_hint,StoreIC,47,48,1 +block_hint,StoreIC,121,122,0 block_hint,StoreIC,49,50,1 -block_hint,StoreIC,117,118,0 +block_hint,StoreIC,123,124,0 block_hint,StoreIC,51,52,1 -block_hint,StoreIC,119,120,0 +block_hint,StoreIC,80,81,0 block_hint,StoreIC,53,54,1 block_hint,StoreIC,55,56,1 +block_hint,StoreIC,333,334,0 block_hint,StoreIC,57,58,1 -block_hint,StoreIC,323,324,0 -block_hint,StoreIC,59,60,1 -block_hint,StoreIC,178,179,0 -block_hint,StoreIC,180,181,0 -block_hint,StoreIC,225,226,0 -block_hint,StoreIC,129,130,0 -block_hint,StoreIC,293,294,0 -block_hint,StoreIC,184,185,1 +block_hint,StoreIC,184,185,0 block_hint,StoreIC,186,187,0 -block_hint,StoreIC,275,276,0 -block_hint,StoreIC,353,354,0 -block_hint,StoreIC,295,296,1 -block_hint,StoreIC,188,189,1 +block_hint,StoreIC,229,230,0 +block_hint,StoreIC,133,134,0 +block_hint,StoreIC,299,300,0 +block_hint,StoreIC,190,191,1 +block_hint,StoreIC,192,193,0 +block_hint,StoreIC,281,282,0 +block_hint,StoreIC,365,366,0 +block_hint,StoreIC,301,302,1 block_hint,StoreIC,194,195,1 -block_hint,StoreIC,196,197,0 -block_hint,StoreIC,198,199,0 block_hint,StoreIC,200,201,1 -block_hint,StoreIC,192,193,1 -block_hint,StoreIC,190,191,0 -block_hint,StoreIC,372,373,0 -block_hint,StoreIC,376,377,1 -block_hint,StoreIC,345,346,1 -block_hint,StoreIC,306,307,1 -block_hint,StoreIC,82,83,0 -block_hint,StoreIC,135,136,0 -block_hint,StoreIC,227,228,1 +block_hint,StoreIC,202,203,0 +block_hint,StoreIC,204,205,0 +block_hint,StoreIC,206,207,1 +block_hint,StoreIC,198,199,1 +block_hint,StoreIC,196,197,0 +block_hint,StoreIC,384,385,0 +block_hint,StoreIC,388,389,1 +block_hint,StoreIC,357,358,1 +block_hint,StoreIC,314,315,1 +block_hint,StoreIC,84,85,0 +block_hint,StoreIC,139,140,0 +block_hint,StoreIC,231,232,1 block_hint,StoreICTrampoline,3,4,1 -block_hint,DefineNamedOwnIC,320,321,1 -block_hint,DefineNamedOwnIC,139,140,0 -block_hint,DefineNamedOwnIC,295,296,1 -block_hint,DefineNamedOwnIC,197,198,0 -block_hint,DefineNamedOwnIC,71,72,0 -block_hint,DefineNamedOwnIC,199,200,0 -block_hint,DefineNamedOwnIC,317,318,0 -block_hint,DefineNamedOwnIC,238,239,1 -block_hint,DefineNamedOwnIC,89,90,0 -block_hint,DefineNamedOwnIC,19,20,0 -block_hint,DefineNamedOwnIC,339,340,0 -block_hint,DefineNamedOwnIC,277,278,1 -block_hint,DefineNamedOwnIC,151,152,1 -block_hint,DefineNamedOwnIC,153,154,1 -block_hint,DefineNamedOwnIC,249,250,1 -block_hint,DefineNamedOwnIC,34,35,0 -block_hint,DefineNamedOwnIC,241,242,1 -block_hint,DefineNamedOwnIC,24,25,0 -block_hint,DefineNamedOwnIC,143,144,1 -block_hint,DefineNamedOwnIC,341,342,0 -block_hint,DefineNamedOwnIC,275,276,0 +block_hint,DefineNamedOwnIC,329,330,1 block_hint,DefineNamedOwnIC,145,146,0 -block_hint,DefineNamedOwnIC,147,148,1 +block_hint,DefineNamedOwnIC,300,301,1 +block_hint,DefineNamedOwnIC,203,204,0 +block_hint,DefineNamedOwnIC,69,70,0 +block_hint,DefineNamedOwnIC,205,206,0 +block_hint,DefineNamedOwnIC,326,327,0 block_hint,DefineNamedOwnIC,243,244,1 -block_hint,DefineNamedOwnIC,28,29,0 -block_hint,DefineNamedOwnIC,149,150,0 -block_hint,DefineNamedOwnIC,245,246,1 +block_hint,DefineNamedOwnIC,93,94,0 +block_hint,DefineNamedOwnIC,17,18,0 +block_hint,DefineNamedOwnIC,350,351,0 +block_hint,DefineNamedOwnIC,282,283,1 +block_hint,DefineNamedOwnIC,157,158,1 +block_hint,DefineNamedOwnIC,159,160,1 +block_hint,DefineNamedOwnIC,254,255,1 block_hint,DefineNamedOwnIC,32,33,0 -block_hint,KeyedStoreIC,391,392,1 +block_hint,DefineNamedOwnIC,246,247,1 +block_hint,DefineNamedOwnIC,22,23,0 +block_hint,DefineNamedOwnIC,149,150,1 +block_hint,DefineNamedOwnIC,352,353,0 +block_hint,DefineNamedOwnIC,280,281,0 +block_hint,DefineNamedOwnIC,151,152,0 +block_hint,DefineNamedOwnIC,153,154,1 +block_hint,DefineNamedOwnIC,248,249,1 +block_hint,DefineNamedOwnIC,26,27,0 +block_hint,DefineNamedOwnIC,155,156,0 +block_hint,DefineNamedOwnIC,250,251,1 +block_hint,DefineNamedOwnIC,30,31,0 +block_hint,KeyedStoreIC,401,402,1 +block_hint,KeyedStoreIC,173,174,0 block_hint,KeyedStoreIC,169,170,0 -block_hint,KeyedStoreIC,165,166,0 -block_hint,KeyedStoreIC,233,234,0 -block_hint,KeyedStoreIC,167,168,1 -block_hint,KeyedStoreIC,85,86,1 -block_hint,KeyedStoreIC,89,90,1 -block_hint,KeyedStoreIC,388,389,1 -block_hint,KeyedStoreIC,105,106,0 -block_hint,KeyedStoreIC,24,25,0 -block_hint,KeyedStoreIC,420,421,0 -block_hint,KeyedStoreIC,175,176,1 -block_hint,KeyedStoreIC,422,423,0 +block_hint,KeyedStoreIC,239,240,0 +block_hint,KeyedStoreIC,171,172,1 +block_hint,KeyedStoreIC,83,84,1 +block_hint,KeyedStoreIC,87,88,1 +block_hint,KeyedStoreIC,398,399,1 +block_hint,KeyedStoreIC,109,110,0 +block_hint,KeyedStoreIC,22,23,0 +block_hint,KeyedStoreIC,432,433,0 +block_hint,KeyedStoreIC,181,182,1 +block_hint,KeyedStoreIC,434,435,0 +block_hint,KeyedStoreIC,351,352,0 +block_hint,KeyedStoreIC,298,299,1 +block_hint,KeyedStoreIC,31,32,0 +block_hint,KeyedStoreIC,272,273,0 +block_hint,KeyedStoreIC,355,356,0 +block_hint,KeyedStoreIC,195,196,1 +block_hint,KeyedStoreIC,260,261,1 +block_hint,KeyedStoreIC,436,437,1 +block_hint,KeyedStoreIC,329,330,0 +block_hint,KeyedStoreIC,137,138,1 +block_hint,KeyedStoreIC,45,46,1 +block_hint,KeyedStoreIC,197,198,0 +block_hint,KeyedStoreIC,47,48,0 +block_hint,KeyedStoreIC,215,216,0 +block_hint,KeyedStoreIC,361,362,1 +block_hint,KeyedStoreIC,363,364,0 +block_hint,KeyedStoreIC,221,222,1 +block_hint,KeyedStoreIC,223,224,0 block_hint,KeyedStoreIC,345,346,0 -block_hint,KeyedStoreIC,292,293,1 -block_hint,KeyedStoreIC,33,34,0 -block_hint,KeyedStoreIC,268,269,0 -block_hint,KeyedStoreIC,349,350,0 -block_hint,KeyedStoreIC,189,190,1 -block_hint,KeyedStoreIC,256,257,1 -block_hint,KeyedStoreIC,424,425,1 -block_hint,KeyedStoreIC,323,324,0 -block_hint,KeyedStoreIC,133,134,1 -block_hint,KeyedStoreIC,47,48,1 -block_hint,KeyedStoreIC,191,192,0 -block_hint,KeyedStoreIC,49,50,0 -block_hint,KeyedStoreIC,209,210,0 -block_hint,KeyedStoreIC,355,356,1 -block_hint,KeyedStoreIC,357,358,0 -block_hint,KeyedStoreIC,215,216,1 -block_hint,KeyedStoreIC,217,218,0 -block_hint,KeyedStoreIC,339,340,0 -block_hint,KeyedStoreIC,361,362,0 -block_hint,KeyedStoreIC,426,427,0 -block_hint,KeyedStoreIC,359,360,1 -block_hint,KeyedStoreIC,225,226,1 -block_hint,KeyedStoreIC,227,228,0 -block_hint,KeyedStoreIC,229,230,0 +block_hint,KeyedStoreIC,367,368,0 +block_hint,KeyedStoreIC,438,439,0 +block_hint,KeyedStoreIC,365,366,1 block_hint,KeyedStoreIC,231,232,1 -block_hint,KeyedStoreIC,441,442,0 -block_hint,KeyedStoreIC,418,419,1 -block_hint,KeyedStoreIC,274,275,0 -block_hint,KeyedStoreIC,369,370,1 -block_hint,KeyedStoreIC,95,96,0 -block_hint,KeyedStoreIC,160,161,0 +block_hint,KeyedStoreIC,233,234,0 +block_hint,KeyedStoreIC,235,236,0 +block_hint,KeyedStoreIC,237,238,1 +block_hint,KeyedStoreIC,453,454,0 +block_hint,KeyedStoreIC,430,431,1 +block_hint,KeyedStoreIC,278,279,0 +block_hint,KeyedStoreIC,377,378,1 +block_hint,KeyedStoreIC,97,98,0 +block_hint,KeyedStoreIC,164,165,0 block_hint,KeyedStoreICTrampoline,3,4,1 -block_hint,DefineKeyedOwnIC,383,384,1 -block_hint,DefineKeyedOwnIC,168,169,0 -block_hint,DefineKeyedOwnIC,164,165,1 +block_hint,DefineKeyedOwnIC,392,393,1 +block_hint,DefineKeyedOwnIC,174,175,0 +block_hint,DefineKeyedOwnIC,170,171,1 block_hint,StoreInArrayLiteralIC,30,31,1 block_hint,StoreInArrayLiteralIC,19,20,0 block_hint,StoreInArrayLiteralIC,23,24,0 @@ -1627,29 +1696,29 @@ block_hint,StoreInArrayLiteralIC,14,15,1 block_hint,StoreInArrayLiteralIC,16,17,1 block_hint,StoreInArrayLiteralIC,8,9,1 block_hint,StoreInArrayLiteralIC,4,5,1 -block_hint,LoadGlobalIC,61,62,0 -block_hint,LoadGlobalIC,15,16,1 -block_hint,LoadGlobalIC,17,18,1 -block_hint,LoadGlobalIC,19,20,1 -block_hint,LoadGlobalIC,188,189,0 -block_hint,LoadGlobalIC,13,14,0 -block_hint,LoadGlobalIC,109,110,1 -block_hint,LoadGlobalICInsideTypeof,61,62,0 -block_hint,LoadGlobalICInsideTypeof,190,191,1 -block_hint,LoadGlobalICInsideTypeof,13,14,0 -block_hint,LoadGlobalICInsideTypeof,109,110,0 -block_hint,LoadGlobalICInsideTypeof,21,22,1 -block_hint,LoadGlobalICInsideTypeof,23,24,1 -block_hint,LoadGlobalICInsideTypeof,249,250,1 -block_hint,LoadGlobalICInsideTypeof,205,206,0 -block_hint,LoadGlobalICInsideTypeof,59,60,0 -block_hint,LoadGlobalICInsideTypeof,217,218,0 -block_hint,LoadGlobalICInsideTypeof,111,112,1 -block_hint,LoadGlobalICInsideTypeof,25,26,1 -block_hint,LoadGlobalICInsideTypeof,226,227,1 -block_hint,LoadGlobalICInsideTypeof,196,197,0 -block_hint,LoadGlobalICInsideTypeof,44,45,0 -block_hint,LoadGlobalICInsideTypeof,42,43,1 +block_hint,LoadGlobalIC,60,61,0 +block_hint,LoadGlobalIC,14,15,1 +block_hint,LoadGlobalIC,16,17,1 +block_hint,LoadGlobalIC,18,19,1 +block_hint,LoadGlobalIC,191,192,0 +block_hint,LoadGlobalIC,12,13,0 +block_hint,LoadGlobalIC,111,112,1 +block_hint,LoadGlobalICInsideTypeof,60,61,0 +block_hint,LoadGlobalICInsideTypeof,193,194,1 +block_hint,LoadGlobalICInsideTypeof,12,13,0 +block_hint,LoadGlobalICInsideTypeof,111,112,0 +block_hint,LoadGlobalICInsideTypeof,20,21,1 +block_hint,LoadGlobalICInsideTypeof,22,23,1 +block_hint,LoadGlobalICInsideTypeof,254,255,1 +block_hint,LoadGlobalICInsideTypeof,208,209,0 +block_hint,LoadGlobalICInsideTypeof,58,59,0 +block_hint,LoadGlobalICInsideTypeof,220,221,0 +block_hint,LoadGlobalICInsideTypeof,113,114,1 +block_hint,LoadGlobalICInsideTypeof,24,25,1 +block_hint,LoadGlobalICInsideTypeof,229,230,1 +block_hint,LoadGlobalICInsideTypeof,199,200,0 +block_hint,LoadGlobalICInsideTypeof,43,44,0 +block_hint,LoadGlobalICInsideTypeof,41,42,1 block_hint,LoadGlobalICTrampoline,3,4,1 block_hint,LoadGlobalICInsideTypeofTrampoline,3,4,1 block_hint,LookupGlobalICBaseline,3,4,1 @@ -1658,50 +1727,54 @@ block_hint,LookupGlobalICBaseline,5,6,1 block_hint,LookupGlobalICBaseline,11,12,1 block_hint,LookupGlobalICBaseline,7,8,1 block_hint,LookupGlobalICBaseline,9,10,0 -block_hint,KeyedHasIC,251,252,1 +block_hint,KeyedHasIC,261,262,1 +block_hint,KeyedHasIC,125,126,0 block_hint,KeyedHasIC,117,118,0 -block_hint,KeyedHasIC,109,110,0 -block_hint,KeyedHasIC,233,234,0 -block_hint,KeyedHasIC,157,158,0 -block_hint,KeyedHasIC,81,82,0 -block_hint,KeyedHasIC,111,112,1 -block_hint,KeyedHasIC,159,160,0 -block_hint,KeyedHasIC,115,116,1 -block_hint,KeyedHasIC,83,84,1 -block_hint,KeyedHasIC,193,194,0 -block_hint,KeyedHasIC,215,216,0 -block_hint,KeyedHasIC,273,274,0 -block_hint,KeyedHasIC,271,272,0 -block_hint,KeyedHasIC,153,154,1 -block_hint,KeyedHasIC,63,64,0 -block_hint,KeyedHasIC_Megamorphic,133,134,1 -block_hint,KeyedHasIC_Megamorphic,135,136,1 -block_hint,KeyedHasIC_Megamorphic,253,254,0 -block_hint,KeyedHasIC_Megamorphic,207,208,1 -block_hint,KeyedHasIC_Megamorphic,245,246,0 -block_hint,KeyedHasIC_Megamorphic,93,94,0 -block_hint,KeyedHasIC_Megamorphic,228,229,1 -block_hint,KeyedHasIC_Megamorphic,119,120,1 +block_hint,KeyedHasIC,239,240,0 +block_hint,KeyedHasIC,165,166,0 +block_hint,KeyedHasIC,77,78,0 +block_hint,KeyedHasIC,119,120,1 +block_hint,KeyedHasIC,167,168,0 +block_hint,KeyedHasIC,123,124,1 +block_hint,KeyedHasIC,79,80,1 +block_hint,KeyedHasIC,197,198,0 +block_hint,KeyedHasIC,221,222,0 +block_hint,KeyedHasIC,283,284,0 +block_hint,KeyedHasIC,281,282,0 +block_hint,KeyedHasIC,161,162,1 +block_hint,KeyedHasIC,61,62,0 block_hint,KeyedHasIC_Megamorphic,137,138,1 -block_hint,KeyedHasIC_Megamorphic,195,196,0 -block_hint,KeyedHasIC_Megamorphic,197,198,0 +block_hint,KeyedHasIC_Megamorphic,139,140,1 +block_hint,KeyedHasIC_Megamorphic,263,264,0 +block_hint,KeyedHasIC_Megamorphic,211,212,1 +block_hint,KeyedHasIC_Megamorphic,254,255,0 block_hint,KeyedHasIC_Megamorphic,97,98,0 -block_hint,KeyedHasIC_Megamorphic,95,96,0 -block_hint,KeyedHasIC_Megamorphic,241,242,0 -block_hint,KeyedHasIC_Megamorphic,232,233,0 -block_hint,KeyedHasIC_Megamorphic,249,250,0 +block_hint,KeyedHasIC_Megamorphic,234,235,1 +block_hint,KeyedHasIC_Megamorphic,123,124,1 +block_hint,KeyedHasIC_Megamorphic,141,142,1 +block_hint,KeyedHasIC_Megamorphic,199,200,0 block_hint,KeyedHasIC_Megamorphic,201,202,0 -block_hint,KeyedHasIC_Megamorphic,47,48,0 -block_hint,KeyedHasIC_Megamorphic,61,62,0 -block_hint,KeyedHasIC_Megamorphic,103,104,1 -block_hint,KeyedHasIC_Megamorphic,258,259,0 +block_hint,KeyedHasIC_Megamorphic,101,102,0 +block_hint,KeyedHasIC_Megamorphic,99,100,0 +block_hint,KeyedHasIC_Megamorphic,250,251,0 +block_hint,KeyedHasIC_Megamorphic,270,271,0 +block_hint,KeyedHasIC_Megamorphic,106,107,0 +block_hint,KeyedHasIC_Megamorphic,277,278,0 +block_hint,KeyedHasIC_Megamorphic,282,283,0 +block_hint,KeyedHasIC_Megamorphic,268,269,0 +block_hint,KeyedHasIC_Megamorphic,203,204,0 +block_hint,KeyedHasIC_Megamorphic,44,45,0 +block_hint,KeyedHasIC_Megamorphic,63,64,0 +block_hint,KeyedHasIC_Megamorphic,239,240,1 +block_hint,KeyedHasIC_Megamorphic,48,49,0 +block_hint,KeyedHasIC_Megamorphic,272,273,0 +block_hint,KeyedHasIC_Megamorphic,228,229,0 +block_hint,KeyedHasIC_Megamorphic,87,88,0 +block_hint,KeyedHasIC_Megamorphic,155,156,0 +block_hint,KeyedHasIC_Megamorphic,196,197,0 +block_hint,KeyedHasIC_Megamorphic,59,60,0 block_hint,KeyedHasIC_Megamorphic,222,223,0 -block_hint,KeyedHasIC_Megamorphic,83,84,0 -block_hint,KeyedHasIC_Megamorphic,151,152,0 -block_hint,KeyedHasIC_Megamorphic,192,193,0 -block_hint,KeyedHasIC_Megamorphic,58,59,0 -block_hint,KeyedHasIC_Megamorphic,216,217,0 -block_hint,KeyedHasIC_Megamorphic,56,57,1 +block_hint,KeyedHasIC_Megamorphic,57,58,1 block_hint,IterableToList,42,43,1 block_hint,IterableToList,44,45,1 block_hint,IterableToList,46,47,1 @@ -1762,21 +1835,21 @@ block_hint,FindOrderedHashMapEntry,22,23,0 block_hint,FindOrderedHashMapEntry,68,69,0 block_hint,FindOrderedHashMapEntry,58,59,1 block_hint,FindOrderedHashMapEntry,60,61,1 -block_hint,MapConstructor,323,324,1 -block_hint,MapConstructor,243,244,1 -block_hint,MapConstructor,100,101,0 +block_hint,MapConstructor,328,329,1 +block_hint,MapConstructor,248,249,1 +block_hint,MapConstructor,105,106,0 block_hint,MapConstructor,13,14,1 -block_hint,MapConstructor,265,266,1 -block_hint,MapConstructor,205,206,1 -block_hint,MapConstructor,83,84,0 -block_hint,MapConstructor,85,86,1 -block_hint,MapConstructor,267,268,1 -block_hint,MapConstructor,300,301,0 -block_hint,MapConstructor,314,315,0 -block_hint,MapConstructor,215,216,0 -block_hint,MapConstructor,104,105,0 -block_hint,MapConstructor,233,234,1 -block_hint,MapConstructor,98,99,1 +block_hint,MapConstructor,270,271,1 +block_hint,MapConstructor,211,212,1 +block_hint,MapConstructor,86,87,0 +block_hint,MapConstructor,88,89,1 +block_hint,MapConstructor,272,273,1 +block_hint,MapConstructor,308,309,0 +block_hint,MapConstructor,319,320,0 +block_hint,MapConstructor,220,221,0 +block_hint,MapConstructor,109,110,0 +block_hint,MapConstructor,238,239,1 +block_hint,MapConstructor,103,104,1 block_hint,MapPrototypeSet,98,99,1 block_hint,MapPrototypeSet,62,63,1 block_hint,MapPrototypeSet,64,65,1 @@ -1849,91 +1922,91 @@ block_hint,MapIteratorPrototypeNext,15,16,1 block_hint,MapIteratorPrototypeNext,17,18,1 block_hint,MapIteratorPrototypeNext,25,26,1 block_hint,SameValueNumbersOnly,4,5,1 -block_hint,Add_Baseline,32,33,0 -block_hint,Add_Baseline,21,22,0 -block_hint,Add_Baseline,8,9,1 -block_hint,Add_Baseline,58,59,0 -block_hint,Add_Baseline,35,36,1 -block_hint,Add_Baseline,47,48,0 -block_hint,Add_Baseline,17,18,1 -block_hint,Add_Baseline,53,54,1 -block_hint,Add_Baseline,19,20,1 -block_hint,Add_Baseline,26,27,1 -block_hint,Add_Baseline,10,11,1 -block_hint,AddSmi_Baseline,32,33,0 -block_hint,AddSmi_Baseline,21,22,0 -block_hint,AddSmi_Baseline,8,9,1 -block_hint,AddSmi_Baseline,49,50,1 -block_hint,AddSmi_Baseline,26,27,1 -block_hint,AddSmi_Baseline,10,11,1 -block_hint,Subtract_Baseline,21,22,0 -block_hint,Subtract_Baseline,8,9,1 -block_hint,Subtract_Baseline,46,47,1 -block_hint,Subtract_Baseline,56,57,1 -block_hint,Subtract_Baseline,54,55,0 -block_hint,Subtract_Baseline,42,43,0 -block_hint,Subtract_Baseline,48,49,1 -block_hint,Subtract_Baseline,17,18,1 +block_hint,Add_Baseline,39,40,0 +block_hint,Add_Baseline,25,26,0 +block_hint,Add_Baseline,9,10,1 +block_hint,Add_Baseline,84,85,0 +block_hint,Add_Baseline,46,47,1 +block_hint,Add_Baseline,56,57,0 +block_hint,Add_Baseline,20,21,1 +block_hint,Add_Baseline,64,65,1 +block_hint,Add_Baseline,23,24,1 +block_hint,Add_Baseline,31,32,1 +block_hint,Add_Baseline,11,12,1 +block_hint,AddSmi_Baseline,39,40,0 +block_hint,AddSmi_Baseline,25,26,0 +block_hint,AddSmi_Baseline,9,10,1 +block_hint,AddSmi_Baseline,60,61,1 +block_hint,AddSmi_Baseline,31,32,1 +block_hint,AddSmi_Baseline,11,12,1 +block_hint,Subtract_Baseline,31,32,0 +block_hint,Subtract_Baseline,11,12,1 +block_hint,Subtract_Baseline,60,61,1 +block_hint,Subtract_Baseline,82,83,1 +block_hint,Subtract_Baseline,76,77,0 +block_hint,Subtract_Baseline,53,54,0 +block_hint,Subtract_Baseline,62,63,1 block_hint,Subtract_Baseline,23,24,1 -block_hint,Subtract_Baseline,10,11,1 -block_hint,SubtractSmi_Baseline,21,22,0 -block_hint,SubtractSmi_Baseline,8,9,1 -block_hint,SubtractSmi_Baseline,38,39,1 -block_hint,SubtractSmi_Baseline,23,24,1 -block_hint,SubtractSmi_Baseline,10,11,1 -block_hint,Multiply_Baseline,62,63,0 -block_hint,Multiply_Baseline,40,41,0 -block_hint,Multiply_Baseline,48,49,0 -block_hint,Multiply_Baseline,54,55,1 -block_hint,Multiply_Baseline,50,51,1 -block_hint,Multiply_Baseline,6,7,1 -block_hint,Multiply_Baseline,42,43,1 -block_hint,Multiply_Baseline,60,61,1 -block_hint,Multiply_Baseline,44,45,1 -block_hint,Multiply_Baseline,19,20,1 -block_hint,Multiply_Baseline,8,9,1 -block_hint,MultiplySmi_Baseline,54,55,0 -block_hint,MultiplySmi_Baseline,40,41,0 -block_hint,MultiplySmi_Baseline,42,43,0 -block_hint,MultiplySmi_Baseline,44,45,1 -block_hint,MultiplySmi_Baseline,17,18,0 -block_hint,MultiplySmi_Baseline,6,7,1 -block_hint,MultiplySmi_Baseline,33,34,1 -block_hint,MultiplySmi_Baseline,19,20,1 -block_hint,MultiplySmi_Baseline,8,9,1 -block_hint,Divide_Baseline,52,53,0 -block_hint,Divide_Baseline,54,55,0 -block_hint,Divide_Baseline,41,42,0 -block_hint,Divide_Baseline,26,27,1 -block_hint,Divide_Baseline,6,7,1 -block_hint,Divide_Baseline,45,46,1 -block_hint,Divide_Baseline,60,61,1 -block_hint,Divide_Baseline,47,48,1 -block_hint,Divide_Baseline,33,34,0 -block_hint,Divide_Baseline,14,15,1 -block_hint,Divide_Baseline,20,21,1 -block_hint,Divide_Baseline,8,9,1 -block_hint,DivideSmi_Baseline,46,47,0 -block_hint,DivideSmi_Baseline,54,55,0 -block_hint,DivideSmi_Baseline,48,49,0 -block_hint,DivideSmi_Baseline,41,42,0 -block_hint,DivideSmi_Baseline,26,27,1 -block_hint,DivideSmi_Baseline,6,7,1 -block_hint,DivideSmi_Baseline,35,36,1 -block_hint,DivideSmi_Baseline,20,21,1 -block_hint,DivideSmi_Baseline,8,9,1 -block_hint,Modulus_Baseline,61,62,0 -block_hint,Modulus_Baseline,57,58,0 -block_hint,Modulus_Baseline,43,44,1 -block_hint,Modulus_Baseline,38,39,1 -block_hint,Modulus_Baseline,17,18,0 +block_hint,Subtract_Baseline,33,34,1 +block_hint,Subtract_Baseline,13,14,1 +block_hint,SubtractSmi_Baseline,31,32,0 +block_hint,SubtractSmi_Baseline,11,12,1 +block_hint,SubtractSmi_Baseline,51,52,1 +block_hint,SubtractSmi_Baseline,33,34,1 +block_hint,SubtractSmi_Baseline,13,14,1 +block_hint,Multiply_Baseline,100,101,0 +block_hint,Multiply_Baseline,61,62,0 +block_hint,Multiply_Baseline,77,78,0 +block_hint,Multiply_Baseline,87,88,1 +block_hint,Multiply_Baseline,79,80,1 +block_hint,Multiply_Baseline,13,14,1 +block_hint,Multiply_Baseline,63,64,1 +block_hint,Multiply_Baseline,93,94,1 +block_hint,Multiply_Baseline,65,66,1 +block_hint,Multiply_Baseline,34,35,1 +block_hint,Multiply_Baseline,15,16,1 +block_hint,MultiplySmi_Baseline,92,93,0 +block_hint,MultiplySmi_Baseline,61,62,0 +block_hint,MultiplySmi_Baseline,71,72,0 +block_hint,MultiplySmi_Baseline,73,74,1 +block_hint,MultiplySmi_Baseline,32,33,0 +block_hint,MultiplySmi_Baseline,13,14,1 +block_hint,MultiplySmi_Baseline,51,52,1 +block_hint,MultiplySmi_Baseline,34,35,1 +block_hint,MultiplySmi_Baseline,15,16,1 +block_hint,Divide_Baseline,69,70,0 +block_hint,Divide_Baseline,71,72,0 +block_hint,Divide_Baseline,50,51,0 +block_hint,Divide_Baseline,31,32,1 +block_hint,Divide_Baseline,10,11,1 +block_hint,Divide_Baseline,54,55,1 +block_hint,Divide_Baseline,79,80,1 +block_hint,Divide_Baseline,56,57,1 +block_hint,Divide_Baseline,39,40,0 +block_hint,Divide_Baseline,19,20,1 +block_hint,Divide_Baseline,25,26,1 +block_hint,Divide_Baseline,12,13,1 +block_hint,DivideSmi_Baseline,63,64,0 +block_hint,DivideSmi_Baseline,76,77,0 +block_hint,DivideSmi_Baseline,65,66,0 +block_hint,DivideSmi_Baseline,50,51,0 +block_hint,DivideSmi_Baseline,31,32,1 +block_hint,DivideSmi_Baseline,10,11,1 +block_hint,DivideSmi_Baseline,41,42,1 +block_hint,DivideSmi_Baseline,25,26,1 +block_hint,DivideSmi_Baseline,12,13,1 +block_hint,Modulus_Baseline,76,77,0 +block_hint,Modulus_Baseline,72,73,0 +block_hint,Modulus_Baseline,55,56,1 +block_hint,Modulus_Baseline,50,51,1 +block_hint,Modulus_Baseline,18,19,0 block_hint,Modulus_Baseline,6,7,1 -block_hint,ModulusSmi_Baseline,43,44,1 -block_hint,ModulusSmi_Baseline,38,39,1 -block_hint,ModulusSmi_Baseline,17,18,0 +block_hint,ModulusSmi_Baseline,55,56,1 +block_hint,ModulusSmi_Baseline,50,51,1 +block_hint,ModulusSmi_Baseline,18,19,0 block_hint,ModulusSmi_Baseline,6,7,1 -block_hint,ModulusSmi_Baseline,32,33,1 -block_hint,ModulusSmi_Baseline,19,20,1 +block_hint,ModulusSmi_Baseline,40,41,1 +block_hint,ModulusSmi_Baseline,20,21,1 block_hint,ModulusSmi_Baseline,8,9,1 block_hint,BitwiseAnd_Baseline,35,36,0 block_hint,BitwiseAnd_Baseline,23,24,1 @@ -2000,23 +2073,23 @@ block_hint,ShiftRightLogicalSmi_Baseline,25,26,1 block_hint,ShiftRightLogicalSmi_Baseline,33,34,0 block_hint,ShiftRightLogicalSmi_Baseline,23,24,0 block_hint,ShiftRightLogicalSmi_Baseline,9,10,1 -block_hint,Add_WithFeedback,49,50,1 -block_hint,Add_WithFeedback,60,61,0 -block_hint,Add_WithFeedback,58,59,0 -block_hint,Add_WithFeedback,45,46,1 -block_hint,Add_WithFeedback,35,36,1 -block_hint,Add_WithFeedback,28,29,0 -block_hint,Add_WithFeedback,19,20,1 -block_hint,Subtract_WithFeedback,52,53,1 -block_hint,Subtract_WithFeedback,56,57,0 -block_hint,Subtract_WithFeedback,54,55,0 -block_hint,Subtract_WithFeedback,42,43,0 -block_hint,Subtract_WithFeedback,17,18,1 -block_hint,Modulus_WithFeedback,61,62,0 -block_hint,Modulus_WithFeedback,57,58,0 -block_hint,Modulus_WithFeedback,43,44,1 -block_hint,Modulus_WithFeedback,38,39,1 -block_hint,Modulus_WithFeedback,17,18,0 +block_hint,Add_WithFeedback,60,61,1 +block_hint,Add_WithFeedback,86,87,0 +block_hint,Add_WithFeedback,84,85,0 +block_hint,Add_WithFeedback,54,55,1 +block_hint,Add_WithFeedback,46,47,1 +block_hint,Add_WithFeedback,33,34,0 +block_hint,Add_WithFeedback,23,24,1 +block_hint,Subtract_WithFeedback,74,75,1 +block_hint,Subtract_WithFeedback,82,83,0 +block_hint,Subtract_WithFeedback,76,77,0 +block_hint,Subtract_WithFeedback,53,54,0 +block_hint,Subtract_WithFeedback,23,24,1 +block_hint,Modulus_WithFeedback,76,77,0 +block_hint,Modulus_WithFeedback,72,73,0 +block_hint,Modulus_WithFeedback,55,56,1 +block_hint,Modulus_WithFeedback,50,51,1 +block_hint,Modulus_WithFeedback,18,19,0 block_hint,Modulus_WithFeedback,6,7,1 block_hint,BitwiseOr_WithFeedback,6,7,1 block_hint,BitwiseOr_WithFeedback,35,36,0 @@ -2227,43 +2300,43 @@ block_hint,ObjectCreate,13,14,1 block_hint,ObjectCreate,15,16,1 block_hint,ObjectCreate,20,21,0 block_hint,ObjectCreate,61,62,1 -block_hint,ObjectGetOwnPropertyDescriptor,493,494,1 -block_hint,ObjectGetOwnPropertyDescriptor,490,491,0 -block_hint,ObjectGetOwnPropertyDescriptor,487,488,0 -block_hint,ObjectGetOwnPropertyDescriptor,479,480,1 -block_hint,ObjectGetOwnPropertyDescriptor,466,467,1 -block_hint,ObjectGetOwnPropertyDescriptor,384,385,0 -block_hint,ObjectGetOwnPropertyDescriptor,444,445,1 +block_hint,ObjectGetOwnPropertyDescriptor,519,520,1 +block_hint,ObjectGetOwnPropertyDescriptor,516,517,0 +block_hint,ObjectGetOwnPropertyDescriptor,513,514,0 +block_hint,ObjectGetOwnPropertyDescriptor,505,506,1 +block_hint,ObjectGetOwnPropertyDescriptor,492,493,1 +block_hint,ObjectGetOwnPropertyDescriptor,408,409,0 +block_hint,ObjectGetOwnPropertyDescriptor,470,471,1 +block_hint,ObjectGetOwnPropertyDescriptor,488,489,0 +block_hint,ObjectGetOwnPropertyDescriptor,434,435,0 +block_hint,ObjectGetOwnPropertyDescriptor,467,468,1 +block_hint,ObjectGetOwnPropertyDescriptor,410,411,1 block_hint,ObjectGetOwnPropertyDescriptor,462,463,0 -block_hint,ObjectGetOwnPropertyDescriptor,410,411,0 -block_hint,ObjectGetOwnPropertyDescriptor,441,442,1 -block_hint,ObjectGetOwnPropertyDescriptor,386,387,1 +block_hint,ObjectGetOwnPropertyDescriptor,464,465,0 block_hint,ObjectGetOwnPropertyDescriptor,436,437,0 -block_hint,ObjectGetOwnPropertyDescriptor,438,439,0 -block_hint,ObjectGetOwnPropertyDescriptor,412,413,0 -block_hint,ObjectGetOwnPropertyDescriptor,382,383,0 -block_hint,ObjectGetOwnPropertyDescriptor,312,313,0 -block_hint,ObjectGetOwnPropertyDescriptor,184,185,1 -block_hint,ObjectGetOwnPropertyDescriptor,134,135,1 -block_hint,ObjectGetOwnPropertyDescriptor,140,141,0 -block_hint,ObjectGetOwnPropertyDescriptor,473,474,0 -block_hint,ObjectGetOwnPropertyDescriptor,481,482,1 -block_hint,ObjectGetOwnPropertyDescriptor,469,470,0 -block_hint,ObjectGetOwnPropertyDescriptor,402,403,0 -block_hint,ObjectGetOwnPropertyDescriptor,310,311,0 +block_hint,ObjectGetOwnPropertyDescriptor,406,407,0 +block_hint,ObjectGetOwnPropertyDescriptor,331,332,0 +block_hint,ObjectGetOwnPropertyDescriptor,197,198,1 +block_hint,ObjectGetOwnPropertyDescriptor,307,308,1 +block_hint,ObjectGetOwnPropertyDescriptor,138,139,0 +block_hint,ObjectGetOwnPropertyDescriptor,499,500,0 +block_hint,ObjectGetOwnPropertyDescriptor,507,508,1 +block_hint,ObjectGetOwnPropertyDescriptor,495,496,0 +block_hint,ObjectGetOwnPropertyDescriptor,426,427,0 +block_hint,ObjectGetOwnPropertyDescriptor,329,330,0 block_hint,ObjectGetOwnPropertyDescriptor,31,32,1 -block_hint,ObjectGetOwnPropertyDescriptor,340,341,1 +block_hint,ObjectGetOwnPropertyDescriptor,361,362,1 block_hint,ObjectGetOwnPropertyDescriptor,150,151,0 -block_hint,ObjectGetOwnPropertyDescriptor,448,449,0 -block_hint,ObjectGetOwnPropertyDescriptor,367,368,0 -block_hint,ObjectGetOwnPropertyDescriptor,249,250,0 -block_hint,ObjectGetOwnPropertyDescriptor,245,246,0 -block_hint,ObjectGetOwnPropertyDescriptor,265,266,0 -block_hint,ObjectGetOwnPropertyDescriptor,267,268,1 +block_hint,ObjectGetOwnPropertyDescriptor,474,475,0 +block_hint,ObjectGetOwnPropertyDescriptor,390,391,0 +block_hint,ObjectGetOwnPropertyDescriptor,264,265,0 +block_hint,ObjectGetOwnPropertyDescriptor,260,261,0 +block_hint,ObjectGetOwnPropertyDescriptor,282,283,0 +block_hint,ObjectGetOwnPropertyDescriptor,284,285,1 block_hint,ObjectGetOwnPropertyDescriptor,36,37,1 -block_hint,ObjectGetOwnPropertyDescriptor,344,345,1 -block_hint,ObjectGetOwnPropertyDescriptor,174,175,0 -block_hint,ObjectGetOwnPropertyDescriptor,253,254,1 +block_hint,ObjectGetOwnPropertyDescriptor,365,366,1 +block_hint,ObjectGetOwnPropertyDescriptor,186,187,0 +block_hint,ObjectGetOwnPropertyDescriptor,268,269,1 block_hint,ObjectKeys,32,33,1 block_hint,ObjectKeys,27,28,1 block_hint,ObjectKeys,23,24,1 @@ -2274,29 +2347,33 @@ block_hint,ObjectKeys,21,22,1 block_hint,ObjectKeys,9,10,0 block_hint,ObjectKeys,7,8,1 block_hint,ObjectKeys,14,15,1 -block_hint,ObjectPrototypeHasOwnProperty,212,213,1 -block_hint,ObjectPrototypeHasOwnProperty,190,191,1 -block_hint,ObjectPrototypeHasOwnProperty,206,207,1 -block_hint,ObjectPrototypeHasOwnProperty,223,224,0 -block_hint,ObjectPrototypeHasOwnProperty,203,204,0 -block_hint,ObjectPrototypeHasOwnProperty,194,195,1 -block_hint,ObjectPrototypeHasOwnProperty,156,157,1 -block_hint,ObjectPrototypeHasOwnProperty,217,218,0 +block_hint,ObjectPrototypeHasOwnProperty,230,231,1 +block_hint,ObjectPrototypeHasOwnProperty,205,206,1 +block_hint,ObjectPrototypeHasOwnProperty,222,223,1 +block_hint,ObjectPrototypeHasOwnProperty,241,242,0 block_hint,ObjectPrototypeHasOwnProperty,219,220,0 -block_hint,ObjectPrototypeHasOwnProperty,215,216,0 -block_hint,ObjectPrototypeHasOwnProperty,210,211,0 -block_hint,ObjectPrototypeHasOwnProperty,183,184,1 -block_hint,ObjectPrototypeHasOwnProperty,131,132,0 -block_hint,ObjectPrototypeHasOwnProperty,196,197,0 -block_hint,ObjectPrototypeHasOwnProperty,33,34,1 -block_hint,ObjectPrototypeHasOwnProperty,72,73,0 -block_hint,ObjectPrototypeHasOwnProperty,37,38,1 -block_hint,ObjectPrototypeHasOwnProperty,50,51,0 -block_hint,ObjectPrototypeHasOwnProperty,39,40,0 -block_hint,ObjectPrototypeHasOwnProperty,136,137,1 -block_hint,ObjectPrototypeHasOwnProperty,164,165,0 -block_hint,ObjectPrototypeHasOwnProperty,169,170,1 -block_hint,ObjectPrototypeHasOwnProperty,54,55,0 +block_hint,ObjectPrototypeHasOwnProperty,209,210,1 +block_hint,ObjectPrototypeHasOwnProperty,163,164,1 +block_hint,ObjectPrototypeHasOwnProperty,235,236,0 +block_hint,ObjectPrototypeHasOwnProperty,237,238,0 +block_hint,ObjectPrototypeHasOwnProperty,233,234,0 +block_hint,ObjectPrototypeHasOwnProperty,228,229,0 +block_hint,ObjectPrototypeHasOwnProperty,192,193,1 +block_hint,ObjectPrototypeHasOwnProperty,137,138,0 +block_hint,ObjectPrototypeHasOwnProperty,211,212,0 +block_hint,ObjectPrototypeHasOwnProperty,175,176,1 +block_hint,ObjectPrototypeHasOwnProperty,141,142,0 +block_hint,ObjectPrototypeHasOwnProperty,226,227,0 +block_hint,ObjectPrototypeHasOwnProperty,76,77,0 +block_hint,ObjectPrototypeHasOwnProperty,203,204,0 +block_hint,ObjectPrototypeHasOwnProperty,34,35,1 +block_hint,ObjectPrototypeHasOwnProperty,52,53,0 +block_hint,ObjectPrototypeHasOwnProperty,36,37,0 +block_hint,ObjectPrototypeHasOwnProperty,197,198,1 +block_hint,ObjectPrototypeHasOwnProperty,40,41,0 +block_hint,ObjectPrototypeHasOwnProperty,171,172,0 +block_hint,ObjectPrototypeHasOwnProperty,178,179,1 +block_hint,ObjectPrototypeHasOwnProperty,58,59,0 block_hint,ObjectToString,42,43,0 block_hint,ObjectToString,57,58,0 block_hint,ObjectToString,65,66,0 @@ -2329,34 +2406,38 @@ block_hint,ForInEnumerate,9,10,1 block_hint,ForInPrepare,7,8,1 block_hint,ForInPrepare,12,13,1 block_hint,ForInPrepare,5,6,1 -block_hint,ForInFilter,226,227,1 -block_hint,ForInFilter,228,229,1 -block_hint,ForInFilter,219,220,0 -block_hint,ForInFilter,115,116,1 -block_hint,ForInFilter,210,211,0 -block_hint,ForInFilter,60,61,0 -block_hint,ForInFilter,125,126,1 -block_hint,ForInFilter,214,215,1 -block_hint,ForInFilter,101,102,0 +block_hint,ForInFilter,234,235,1 +block_hint,ForInFilter,236,237,1 +block_hint,ForInFilter,227,228,0 +block_hint,ForInFilter,117,118,1 +block_hint,ForInFilter,217,218,0 +block_hint,ForInFilter,62,63,0 +block_hint,ForInFilter,129,130,1 +block_hint,ForInFilter,221,222,1 block_hint,ForInFilter,103,104,0 +block_hint,ForInFilter,105,106,0 +block_hint,ForInFilter,66,67,0 block_hint,ForInFilter,64,65,0 -block_hint,ForInFilter,62,63,0 -block_hint,ForInFilter,241,242,0 -block_hint,ForInFilter,105,106,1 -block_hint,ForInFilter,39,40,1 -block_hint,ForInFilter,217,218,0 +block_hint,ForInFilter,270,271,0 +block_hint,ForInFilter,225,226,1 +block_hint,ForInFilter,109,110,1 +block_hint,ForInFilter,71,72,0 +block_hint,ForInFilter,266,267,0 +block_hint,ForInFilter,264,265,0 +block_hint,ForInFilter,251,252,0 block_hint,ForInFilter,107,108,1 -block_hint,ForInFilter,43,44,1 -block_hint,ForInFilter,196,197,0 -block_hint,ForInFilter,45,46,0 -block_hint,ForInFilter,70,71,1 -block_hint,ForInFilter,111,112,0 -block_hint,ForInFilter,127,128,0 -block_hint,ForInFilter,37,38,0 -block_hint,ForInFilter,238,239,0 -block_hint,ForInFilter,243,244,1 -block_hint,ForInFilter,184,185,0 -block_hint,ForInFilter,34,35,1 +block_hint,ForInFilter,40,41,1 +block_hint,ForInFilter,201,202,0 +block_hint,ForInFilter,42,43,0 +block_hint,ForInFilter,144,145,1 +block_hint,ForInFilter,46,47,0 +block_hint,ForInFilter,113,114,0 +block_hint,ForInFilter,131,132,0 +block_hint,ForInFilter,36,37,0 +block_hint,ForInFilter,248,249,0 +block_hint,ForInFilter,255,256,1 +block_hint,ForInFilter,189,190,0 +block_hint,ForInFilter,33,34,1 block_hint,RegExpConstructor,55,56,1 block_hint,RegExpConstructor,7,8,1 block_hint,RegExpConstructor,131,132,1 @@ -2390,42 +2471,42 @@ block_hint,FindOrderedHashSetEntry,42,43,1 block_hint,FindOrderedHashSetEntry,68,69,0 block_hint,FindOrderedHashSetEntry,58,59,1 block_hint,FindOrderedHashSetEntry,60,61,1 -block_hint,SetConstructor,193,194,1 -block_hint,SetConstructor,71,72,0 +block_hint,SetConstructor,202,203,1 +block_hint,SetConstructor,74,75,0 block_hint,SetConstructor,11,12,1 -block_hint,SetConstructor,168,169,1 -block_hint,SetConstructor,130,131,1 -block_hint,SetConstructor,54,55,0 -block_hint,SetConstructor,56,57,1 -block_hint,SetConstructor,207,208,1 -block_hint,SetConstructor,199,200,0 -block_hint,SetConstructor,76,77,1 +block_hint,SetConstructor,172,173,1 +block_hint,SetConstructor,135,136,1 +block_hint,SetConstructor,56,57,0 +block_hint,SetConstructor,58,59,1 +block_hint,SetConstructor,218,219,1 +block_hint,SetConstructor,210,211,0 +block_hint,SetConstructor,79,80,1 block_hint,SetConstructor,23,24,1 -block_hint,SetConstructor,211,212,1 -block_hint,SetConstructor,203,204,0 -block_hint,SetConstructor,144,145,1 +block_hint,SetConstructor,222,223,1 +block_hint,SetConstructor,214,215,0 +block_hint,SetConstructor,150,151,1 block_hint,SetConstructor,25,26,1 -block_hint,SetConstructor,172,173,1 -block_hint,SetConstructor,137,138,1 -block_hint,SetConstructor,80,81,1 -block_hint,SetConstructor,82,83,1 -block_hint,SetConstructor,84,85,1 -block_hint,SetConstructor,86,87,1 -block_hint,SetConstructor,88,89,1 -block_hint,SetConstructor,90,91,1 -block_hint,SetConstructor,32,33,1 -block_hint,SetConstructor,92,93,1 -block_hint,SetConstructor,140,141,1 +block_hint,SetConstructor,178,179,1 +block_hint,SetConstructor,143,144,1 +block_hint,SetConstructor,83,84,1 +block_hint,SetConstructor,85,86,1 +block_hint,SetConstructor,87,88,1 +block_hint,SetConstructor,89,90,1 +block_hint,SetConstructor,91,92,1 +block_hint,SetConstructor,93,94,1 +block_hint,SetConstructor,34,35,1 +block_hint,SetConstructor,95,96,1 block_hint,SetConstructor,146,147,1 +block_hint,SetConstructor,152,153,1 +block_hint,SetConstructor,190,191,0 block_hint,SetConstructor,183,184,0 -block_hint,SetConstructor,176,177,0 -block_hint,SetConstructor,148,149,0 -block_hint,SetConstructor,102,103,0 -block_hint,SetConstructor,132,133,1 +block_hint,SetConstructor,154,155,0 +block_hint,SetConstructor,105,106,0 +block_hint,SetConstructor,137,138,1 block_hint,SetConstructor,27,28,1 -block_hint,SetConstructor,60,61,1 -block_hint,SetConstructor,159,160,0 -block_hint,SetConstructor,64,65,1 +block_hint,SetConstructor,62,63,1 +block_hint,SetConstructor,176,177,0 +block_hint,SetConstructor,66,67,1 block_hint,SetPrototypeHas,10,11,1 block_hint,SetPrototypeHas,5,6,1 block_hint,SetPrototypeHas,7,8,1 @@ -2580,35 +2661,36 @@ block_hint,TypedArrayPrototypeLength,52,53,0 block_hint,TypedArrayPrototypeLength,44,45,0 block_hint,TypedArrayPrototypeLength,28,29,0 block_hint,TypedArrayPrototypeLength,19,20,0 -block_hint,WeakMapConstructor,346,347,1 -block_hint,WeakMapConstructor,266,267,1 -block_hint,WeakMapConstructor,115,116,0 -block_hint,WeakMapConstructor,14,15,1 -block_hint,WeakMapConstructor,288,289,1 -block_hint,WeakMapConstructor,226,227,1 -block_hint,WeakMapConstructor,90,91,0 -block_hint,WeakMapConstructor,92,93,1 -block_hint,WeakMapConstructor,290,291,1 -block_hint,WeakMapConstructor,323,324,0 -block_hint,WeakMapConstructor,337,338,0 -block_hint,WeakMapConstructor,236,237,0 +block_hint,WeakMapConstructor,351,352,1 +block_hint,WeakMapConstructor,271,272,1 block_hint,WeakMapConstructor,119,120,0 -block_hint,WeakMapConstructor,238,239,0 -block_hint,WeakMapConstructor,106,107,0 -block_hint,WeakMapConstructor,240,241,1 -block_hint,WeakMapConstructor,207,208,1 +block_hint,WeakMapConstructor,14,15,1 +block_hint,WeakMapConstructor,293,294,1 +block_hint,WeakMapConstructor,230,231,1 +block_hint,WeakMapConstructor,93,94,0 +block_hint,WeakMapConstructor,95,96,1 +block_hint,WeakMapConstructor,295,296,1 +block_hint,WeakMapConstructor,331,332,0 +block_hint,WeakMapConstructor,342,343,0 +block_hint,WeakMapConstructor,239,240,0 +block_hint,WeakMapConstructor,123,124,0 +block_hint,WeakMapConstructor,241,242,0 +block_hint,WeakMapConstructor,109,110,0 +block_hint,WeakMapConstructor,243,244,1 +block_hint,WeakMapConstructor,211,212,1 block_hint,WeakMapConstructor,28,29,1 block_hint,WeakMapConstructor,30,31,1 block_hint,WeakMapConstructor,32,33,1 -block_hint,WeakMapConstructor,95,96,0 -block_hint,WeakMapConstructor,113,114,1 -block_hint,WeakMapLookupHashIndex,10,11,1 -block_hint,WeakMapLookupHashIndex,12,13,1 -block_hint,WeakMapLookupHashIndex,14,15,0 -block_hint,WeakMapLookupHashIndex,16,17,0 +block_hint,WeakMapConstructor,98,99,0 +block_hint,WeakMapConstructor,117,118,1 +block_hint,WeakMapLookupHashIndex,9,10,1 +block_hint,WeakMapLookupHashIndex,31,32,1 +block_hint,WeakMapLookupHashIndex,11,12,0 +block_hint,WeakMapLookupHashIndex,13,14,0 block_hint,WeakMapLookupHashIndex,25,26,1 -block_hint,WeakMapLookupHashIndex,21,22,0 -block_hint,WeakMapLookupHashIndex,18,19,0 +block_hint,WeakMapLookupHashIndex,33,34,1 +block_hint,WeakMapLookupHashIndex,27,28,0 +block_hint,WeakMapLookupHashIndex,23,24,0 block_hint,WeakMapGet,12,13,1 block_hint,WeakMapGet,7,8,1 block_hint,WeakMapGet,9,10,1 @@ -2616,31 +2698,32 @@ block_hint,WeakMapGet,3,4,1 block_hint,WeakMapPrototypeHas,10,11,1 block_hint,WeakMapPrototypeHas,5,6,1 block_hint,WeakMapPrototypeHas,7,8,1 -block_hint,WeakMapPrototypeSet,17,18,1 -block_hint,WeakMapPrototypeSet,6,7,1 -block_hint,WeakMapPrototypeSet,8,9,1 -block_hint,WeakMapPrototypeSet,10,11,1 -block_hint,WeakMapPrototypeSet,12,13,1 -block_hint,WeakMapPrototypeSet,14,15,0 -block_hint,WeakMapPrototypeSet,4,5,0 -block_hint,WeakCollectionSet,18,19,0 -block_hint,WeakCollectionSet,6,7,1 -block_hint,WeakCollectionSet,14,15,0 +block_hint,WeakMapPrototypeSet,24,25,1 +block_hint,WeakMapPrototypeSet,5,6,1 +block_hint,WeakMapPrototypeSet,7,8,1 +block_hint,WeakMapPrototypeSet,13,14,1 +block_hint,WeakMapPrototypeSet,22,23,1 +block_hint,WeakMapPrototypeSet,15,16,0 +block_hint,WeakMapPrototypeSet,9,10,0 +block_hint,WeakCollectionSet,17,18,1 +block_hint,WeakCollectionSet,20,21,0 +block_hint,WeakCollectionSet,7,8,1 +block_hint,WeakCollectionSet,13,14,0 block_hint,AsyncGeneratorResolve,9,10,1 block_hint,AsyncGeneratorResolve,3,4,1 block_hint,AsyncGeneratorResolve,11,12,0 block_hint,AsyncGeneratorResolve,7,8,0 -block_hint,AsyncGeneratorYield,24,25,1 -block_hint,AsyncGeneratorYield,19,20,0 -block_hint,AsyncGeneratorYield,6,7,1 -block_hint,AsyncGeneratorYield,42,43,1 -block_hint,AsyncGeneratorYield,37,38,0 -block_hint,AsyncGeneratorYield,28,29,1 -block_hint,AsyncGeneratorYield,8,9,1 -block_hint,AsyncGeneratorYield,10,11,1 -block_hint,AsyncGeneratorYield,12,13,1 -block_hint,AsyncGeneratorYield,14,15,1 -block_hint,AsyncGeneratorYield,22,23,0 +block_hint,AsyncGeneratorYieldWithAwait,24,25,1 +block_hint,AsyncGeneratorYieldWithAwait,19,20,0 +block_hint,AsyncGeneratorYieldWithAwait,6,7,1 +block_hint,AsyncGeneratorYieldWithAwait,42,43,1 +block_hint,AsyncGeneratorYieldWithAwait,37,38,0 +block_hint,AsyncGeneratorYieldWithAwait,28,29,1 +block_hint,AsyncGeneratorYieldWithAwait,8,9,1 +block_hint,AsyncGeneratorYieldWithAwait,10,11,1 +block_hint,AsyncGeneratorYieldWithAwait,12,13,1 +block_hint,AsyncGeneratorYieldWithAwait,14,15,1 +block_hint,AsyncGeneratorYieldWithAwait,22,23,0 block_hint,AsyncGeneratorResumeNext,18,19,0 block_hint,AsyncGeneratorResumeNext,14,15,0 block_hint,AsyncGeneratorPrototypeNext,27,28,1 @@ -2667,8 +2750,8 @@ block_hint,AsyncGeneratorAwaitUncaught,22,23,0 block_hint,AsyncGeneratorAwaitResolveClosure,8,9,1 block_hint,AsyncGeneratorAwaitResolveClosure,2,3,1 block_hint,AsyncGeneratorAwaitResolveClosure,6,7,0 -block_hint,AsyncGeneratorYieldResolveClosure,5,6,1 -block_hint,AsyncGeneratorYieldResolveClosure,2,3,1 +block_hint,AsyncGeneratorYieldWithAwaitResolveClosure,5,6,1 +block_hint,AsyncGeneratorYieldWithAwaitResolveClosure,2,3,1 block_hint,StringAdd_CheckNone,19,20,1 block_hint,StringAdd_CheckNone,58,59,0 block_hint,StringAdd_CheckNone,78,79,1 @@ -2714,108 +2797,113 @@ block_hint,SubString,139,140,0 block_hint,SubString,103,104,1 block_hint,SubString,34,35,1 block_hint,SubString,31,32,0 -block_hint,GetProperty,53,54,1 -block_hint,GetProperty,98,99,0 -block_hint,GetProperty,168,169,1 -block_hint,GetProperty,195,196,0 -block_hint,GetProperty,158,159,1 -block_hint,GetProperty,130,131,1 -block_hint,GetProperty,57,58,1 -block_hint,GetProperty,136,137,0 -block_hint,GetProperty,138,139,0 -block_hint,GetProperty,107,108,0 -block_hint,GetProperty,59,60,0 -block_hint,GetProperty,160,161,0 -block_hint,GetProperty,179,180,0 -block_hint,GetProperty,140,141,1 -block_hint,GetProperty,199,200,0 -block_hint,GetProperty,38,39,1 -block_hint,GetProperty,207,208,0 -block_hint,GetProperty,40,41,0 -block_hint,GetProperty,144,145,0 -block_hint,GetProperty,110,111,1 -block_hint,GetProperty,44,45,0 -block_hint,GetProperty,150,151,0 -block_hint,GetProperty,154,155,1 -block_hint,GetProperty,148,149,0 -block_hint,GetProperty,46,47,0 -block_hint,GetProperty,214,215,0 -block_hint,GetProperty,186,187,1 -block_hint,GetProperty,89,90,0 -block_hint,GetProperty,91,92,0 -block_hint,GetProperty,93,94,0 -block_hint,GetProperty,156,157,0 -block_hint,GetProperty,95,96,1 -block_hint,GetProperty,193,194,0 -block_hint,GetProperty,211,212,0 -block_hint,GetProperty,216,217,1 -block_hint,GetProperty,191,192,0 -block_hint,GetProperty,189,190,0 -block_hint,GetProperty,23,24,0 +block_hint,GetProperty,56,57,1 +block_hint,GetProperty,101,102,0 block_hint,GetProperty,175,176,1 -block_hint,GetProperty,101,102,1 -block_hint,GetPropertyWithReceiver,55,56,1 -block_hint,GetPropertyWithReceiver,57,58,1 -block_hint,GetPropertyWithReceiver,193,194,0 -block_hint,GetPropertyWithReceiver,167,168,1 -block_hint,GetPropertyWithReceiver,201,202,0 -block_hint,GetPropertyWithReceiver,109,110,0 -block_hint,GetPropertyWithReceiver,155,156,1 -block_hint,GetPropertyWithReceiver,135,136,1 -block_hint,GetPropertyWithReceiver,59,60,1 -block_hint,GetPropertyWithReceiver,141,142,0 -block_hint,GetPropertyWithReceiver,143,144,0 -block_hint,GetPropertyWithReceiver,111,112,0 -block_hint,GetPropertyWithReceiver,61,62,0 -block_hint,GetPropertyWithReceiver,157,158,0 -block_hint,GetPropertyWithReceiver,145,146,1 -block_hint,GetPropertyWithReceiver,206,207,0 -block_hint,GetPropertyWithReceiver,147,148,1 -block_hint,GetPropertyWithReceiver,41,42,1 -block_hint,GetPropertyWithReceiver,219,220,0 -block_hint,GetPropertyWithReceiver,43,44,0 -block_hint,GetPropertyWithReceiver,176,177,0 -block_hint,GetPropertyWithReceiver,35,36,0 -block_hint,GetPropertyWithReceiver,216,217,1 -block_hint,GetPropertyWithReceiver,195,196,0 +block_hint,GetProperty,205,206,0 +block_hint,GetProperty,165,166,1 +block_hint,GetProperty,133,134,1 +block_hint,GetProperty,60,61,1 +block_hint,GetProperty,139,140,0 +block_hint,GetProperty,141,142,0 +block_hint,GetProperty,110,111,0 +block_hint,GetProperty,62,63,0 +block_hint,GetProperty,167,168,0 +block_hint,GetProperty,220,221,0 +block_hint,GetProperty,210,211,1 +block_hint,GetProperty,112,113,0 +block_hint,GetProperty,231,232,0 +block_hint,GetProperty,222,223,0 +block_hint,GetProperty,218,219,0 +block_hint,GetProperty,35,36,1 +block_hint,GetProperty,224,225,0 +block_hint,GetProperty,37,38,0 +block_hint,GetProperty,147,148,0 +block_hint,GetProperty,187,188,1 +block_hint,GetProperty,41,42,0 +block_hint,GetProperty,43,44,0 +block_hint,GetProperty,157,158,0 +block_hint,GetProperty,161,162,1 +block_hint,GetProperty,151,152,0 +block_hint,GetProperty,47,48,0 +block_hint,GetProperty,233,234,0 +block_hint,GetProperty,196,197,1 +block_hint,GetProperty,92,93,0 +block_hint,GetProperty,94,95,0 +block_hint,GetProperty,96,97,0 +block_hint,GetProperty,163,164,0 +block_hint,GetProperty,98,99,1 +block_hint,GetProperty,203,204,0 +block_hint,GetProperty,228,229,0 +block_hint,GetProperty,235,236,1 +block_hint,GetProperty,201,202,0 +block_hint,GetProperty,199,200,0 +block_hint,GetProperty,22,23,0 +block_hint,GetProperty,182,183,1 +block_hint,GetProperty,104,105,1 +block_hint,GetPropertyWithReceiver,58,59,1 +block_hint,GetPropertyWithReceiver,60,61,1 +block_hint,GetPropertyWithReceiver,203,204,0 block_hint,GetPropertyWithReceiver,174,175,1 -block_hint,GetPropertyWithReceiver,105,106,1 -block_hint,SetProperty,375,376,1 -block_hint,SetProperty,377,378,0 -block_hint,SetProperty,1159,1160,0 -block_hint,SetProperty,915,916,1 -block_hint,SetProperty,1020,1021,1 -block_hint,SetProperty,1022,1023,0 -block_hint,SetProperty,728,729,0 -block_hint,SetProperty,909,910,1 -block_hint,SetProperty,409,410,0 -block_hint,SetProperty,411,412,0 -block_hint,SetProperty,253,254,1 +block_hint,GetPropertyWithReceiver,211,212,0 +block_hint,GetPropertyWithReceiver,112,113,0 +block_hint,GetPropertyWithReceiver,162,163,1 +block_hint,GetPropertyWithReceiver,138,139,1 +block_hint,GetPropertyWithReceiver,62,63,1 +block_hint,GetPropertyWithReceiver,144,145,0 +block_hint,GetPropertyWithReceiver,146,147,0 +block_hint,GetPropertyWithReceiver,114,115,0 +block_hint,GetPropertyWithReceiver,64,65,0 +block_hint,GetPropertyWithReceiver,164,165,0 +block_hint,GetPropertyWithReceiver,217,218,1 +block_hint,GetPropertyWithReceiver,117,118,0 +block_hint,GetPropertyWithReceiver,238,239,0 +block_hint,GetPropertyWithReceiver,234,235,0 +block_hint,GetPropertyWithReceiver,225,226,0 +block_hint,GetPropertyWithReceiver,148,149,1 +block_hint,GetPropertyWithReceiver,38,39,1 +block_hint,GetPropertyWithReceiver,236,237,0 +block_hint,GetPropertyWithReceiver,40,41,0 +block_hint,GetPropertyWithReceiver,183,184,0 +block_hint,GetPropertyWithReceiver,34,35,0 +block_hint,GetPropertyWithReceiver,231,232,1 +block_hint,GetPropertyWithReceiver,205,206,0 +block_hint,SetProperty,379,380,1 +block_hint,SetProperty,381,382,0 +block_hint,SetProperty,1201,1202,0 +block_hint,SetProperty,925,926,1 +block_hint,SetProperty,1034,1035,1 +block_hint,SetProperty,1036,1037,0 +block_hint,SetProperty,733,734,0 +block_hint,SetProperty,919,920,1 block_hint,SetProperty,413,414,0 -block_hint,SetProperty,625,626,1 -block_hint,SetProperty,93,94,1 -block_hint,SetProperty,95,96,1 -block_hint,SetProperty,1061,1062,0 -block_hint,SetProperty,800,801,1 -block_hint,SetProperty,802,803,1 -block_hint,SetProperty,804,805,0 -block_hint,SetProperty,105,106,1 -block_hint,SetProperty,109,110,1 -block_hint,SetProperty,425,426,1 -block_hint,SetProperty,111,112,1 -block_hint,SetProperty,107,108,1 -block_hint,CreateDataProperty,317,318,1 -block_hint,CreateDataProperty,319,320,0 -block_hint,CreateDataProperty,955,956,0 -block_hint,CreateDataProperty,772,773,1 -block_hint,CreateDataProperty,860,861,1 -block_hint,CreateDataProperty,536,537,1 -block_hint,CreateDataProperty,638,639,0 -block_hint,CreateDataProperty,640,641,1 -block_hint,CreateDataProperty,886,887,1 -block_hint,CreateDataProperty,331,332,0 +block_hint,SetProperty,415,416,0 +block_hint,SetProperty,256,257,1 +block_hint,SetProperty,417,418,0 +block_hint,SetProperty,630,631,1 +block_hint,SetProperty,92,93,1 +block_hint,SetProperty,94,95,1 +block_hint,SetProperty,1089,1090,0 +block_hint,SetProperty,808,809,1 +block_hint,SetProperty,810,811,1 +block_hint,SetProperty,812,813,0 +block_hint,SetProperty,104,105,1 +block_hint,SetProperty,108,109,1 +block_hint,SetProperty,429,430,1 +block_hint,SetProperty,110,111,1 +block_hint,SetProperty,106,107,1 +block_hint,CreateDataProperty,319,320,1 +block_hint,CreateDataProperty,321,322,0 +block_hint,CreateDataProperty,978,979,0 +block_hint,CreateDataProperty,779,780,1 +block_hint,CreateDataProperty,868,869,1 +block_hint,CreateDataProperty,539,540,1 +block_hint,CreateDataProperty,645,646,0 +block_hint,CreateDataProperty,647,648,1 +block_hint,CreateDataProperty,903,904,1 +block_hint,CreateDataProperty,333,334,0 block_hint,CreateDataProperty,55,56,1 -block_hint,CreateDataProperty,540,541,1 +block_hint,CreateDataProperty,543,544,1 block_hint,CreateDataProperty,57,58,1 block_hint,ArrayPrototypeConcat,79,80,1 block_hint,ArrayPrototypeConcat,54,55,0 @@ -3928,9 +4016,11 @@ block_hint,ParseInt,10,11,0 block_hint,NumberParseInt,3,4,1 block_hint,Add,66,67,1 block_hint,Add,24,25,0 +block_hint,Add,52,53,1 block_hint,Add,68,69,0 block_hint,Add,35,36,0 block_hint,Add,40,41,0 +block_hint,Add,29,30,1 block_hint,Subtract,24,25,0 block_hint,Subtract,9,10,0 block_hint,Subtract,22,23,0 @@ -4001,16 +4091,17 @@ block_hint,PerformPromiseThen,20,21,1 block_hint,PerformPromiseThen,115,116,1 block_hint,PromiseFulfillReactionJob,22,23,0 block_hint,PromiseFulfillReactionJob,2,3,1 -block_hint,ResolvePromise,27,28,0 block_hint,ResolvePromise,29,30,0 -block_hint,ResolvePromise,15,16,1 -block_hint,ResolvePromise,45,46,0 block_hint,ResolvePromise,31,32,0 +block_hint,ResolvePromise,15,16,1 +block_hint,ResolvePromise,47,48,0 +block_hint,ResolvePromise,33,34,0 block_hint,ResolvePromise,6,7,1 block_hint,ResolvePromise,17,18,0 -block_hint,ResolvePromise,51,52,1 -block_hint,ResolvePromise,47,48,0 -block_hint,ResolvePromise,21,22,0 +block_hint,ResolvePromise,19,20,1 +block_hint,ResolvePromise,53,54,1 +block_hint,ResolvePromise,49,50,0 +block_hint,ResolvePromise,23,24,0 block_hint,ProxyConstructor,30,31,1 block_hint,ProxyConstructor,10,11,0 block_hint,ProxyConstructor,22,23,1 @@ -4021,30 +4112,30 @@ block_hint,ProxyConstructor,7,8,1 block_hint,ProxyConstructor,17,18,1 block_hint,ProxyConstructor,5,6,1 block_hint,ProxyConstructor,12,13,1 -block_hint,ProxyGetProperty,139,140,1 -block_hint,ProxyGetProperty,33,34,0 -block_hint,ProxyGetProperty,11,12,0 -block_hint,ProxyGetProperty,82,83,0 -block_hint,ProxyGetProperty,84,85,0 -block_hint,ProxyGetProperty,78,79,1 -block_hint,ProxyGetProperty,80,81,1 -block_hint,ProxyGetProperty,160,161,1 -block_hint,ProxyGetProperty,163,164,0 -block_hint,ProxyGetProperty,108,109,0 -block_hint,ProxyGetProperty,37,38,1 -block_hint,ProxyGetProperty,21,22,1 -block_hint,ProxyGetProperty,27,28,0 -block_hint,ProxyGetProperty,29,30,1 -block_hint,ProxyGetProperty,189,190,1 -block_hint,ProxyGetProperty,179,180,0 -block_hint,ProxyGetProperty,69,70,1 -block_hint,ProxyGetProperty,31,32,0 -block_hint,ProxyGetProperty,151,152,0 -block_hint,ProxyGetProperty,170,171,1 -block_hint,ProxyGetProperty,121,122,1 block_hint,ProxyGetProperty,153,154,1 -block_hint,ProxyGetProperty,155,156,0 -block_hint,ProxyGetProperty,53,54,0 +block_hint,ProxyGetProperty,34,35,0 +block_hint,ProxyGetProperty,10,11,0 +block_hint,ProxyGetProperty,89,90,0 +block_hint,ProxyGetProperty,91,92,0 +block_hint,ProxyGetProperty,85,86,1 +block_hint,ProxyGetProperty,87,88,1 +block_hint,ProxyGetProperty,176,177,1 +block_hint,ProxyGetProperty,180,181,0 +block_hint,ProxyGetProperty,118,119,0 +block_hint,ProxyGetProperty,40,41,1 +block_hint,ProxyGetProperty,114,115,1 +block_hint,ProxyGetProperty,24,25,0 +block_hint,ProxyGetProperty,26,27,1 +block_hint,ProxyGetProperty,208,209,1 +block_hint,ProxyGetProperty,198,199,0 +block_hint,ProxyGetProperty,149,150,1 +block_hint,ProxyGetProperty,28,29,0 +block_hint,ProxyGetProperty,167,168,0 +block_hint,ProxyGetProperty,187,188,1 +block_hint,ProxyGetProperty,131,132,1 +block_hint,ProxyGetProperty,169,170,1 +block_hint,ProxyGetProperty,171,172,0 +block_hint,ProxyGetProperty,60,61,0 block_hint,ReflectGet,20,21,1 block_hint,ReflectGet,15,16,0 block_hint,ReflectGet,5,6,1 @@ -4054,162 +4145,164 @@ block_hint,ReflectGet,9,10,0 block_hint,ReflectHas,8,9,1 block_hint,ReflectHas,5,6,1 block_hint,ReflectHas,3,4,0 -block_hint,RegExpPrototypeExec,200,201,1 -block_hint,RegExpPrototypeExec,128,129,1 -block_hint,RegExpPrototypeExec,130,131,1 block_hint,RegExpPrototypeExec,202,203,1 -block_hint,RegExpPrototypeExec,164,165,1 +block_hint,RegExpPrototypeExec,130,131,1 +block_hint,RegExpPrototypeExec,132,133,1 +block_hint,RegExpPrototypeExec,204,205,1 +block_hint,RegExpPrototypeExec,166,167,1 block_hint,RegExpPrototypeExec,16,17,1 -block_hint,RegExpPrototypeExec,146,147,1 -block_hint,RegExpPrototypeExec,148,149,0 +block_hint,RegExpPrototypeExec,148,149,1 block_hint,RegExpPrototypeExec,150,151,0 -block_hint,RegExpPrototypeExec,206,207,0 block_hint,RegExpPrototypeExec,152,153,0 +block_hint,RegExpPrototypeExec,208,209,0 +block_hint,RegExpPrototypeExec,154,155,0 block_hint,RegExpPrototypeExec,18,19,1 -block_hint,RegExpPrototypeExec,183,184,0 -block_hint,RegExpPrototypeExec,132,133,0 -block_hint,RegExpPrototypeExec,157,158,0 -block_hint,RegExpPrototypeExec,234,235,0 -block_hint,RegExpPrototypeExec,225,226,1 -block_hint,RegExpPrototypeExec,210,211,1 -block_hint,RegExpPrototypeExec,169,170,1 +block_hint,RegExpPrototypeExec,185,186,0 +block_hint,RegExpPrototypeExec,134,135,0 block_hint,RegExpPrototypeExec,159,160,0 +block_hint,RegExpPrototypeExec,236,237,0 +block_hint,RegExpPrototypeExec,227,228,1 +block_hint,RegExpPrototypeExec,212,213,1 +block_hint,RegExpPrototypeExec,171,172,1 +block_hint,RegExpPrototypeExec,161,162,0 block_hint,RegExpPrototypeExec,73,74,0 block_hint,RegExpPrototypeExec,24,25,1 -block_hint,RegExpPrototypeExec,136,137,1 -block_hint,RegExpPrototypeExec,26,27,1 -block_hint,RegExpPrototypeExec,188,189,0 block_hint,RegExpPrototypeExec,138,139,1 -block_hint,RegExpPrototypeExec,240,241,1 -block_hint,RegExpPrototypeExec,212,213,0 -block_hint,RegExpPrototypeExec,177,178,1 +block_hint,RegExpPrototypeExec,26,27,1 +block_hint,RegExpPrototypeExec,190,191,0 +block_hint,RegExpPrototypeExec,140,141,1 +block_hint,RegExpPrototypeExec,242,243,1 +block_hint,RegExpPrototypeExec,214,215,0 +block_hint,RegExpPrototypeExec,179,180,1 block_hint,RegExpPrototypeExec,77,78,0 block_hint,RegExpPrototypeExec,34,35,1 -block_hint,RegExpPrototypeExec,142,143,1 -block_hint,RegExpPrototypeExec,114,115,1 -block_hint,RegExpPrototypeExec,154,155,1 -block_hint,RegExpMatchFast,251,252,0 -block_hint,RegExpMatchFast,292,293,1 +block_hint,RegExpPrototypeExec,144,145,1 +block_hint,RegExpPrototypeExec,116,117,1 +block_hint,RegExpPrototypeExec,156,157,1 +block_hint,RegExpMatchFast,363,364,0 +block_hint,RegExpMatchFast,293,294,1 block_hint,RegExpMatchFast,34,35,1 -block_hint,RegExpMatchFast,328,329,0 -block_hint,RegExpMatchFast,237,238,0 -block_hint,RegExpMatchFast,286,287,0 -block_hint,RegExpMatchFast,449,450,0 -block_hint,RegExpMatchFast,392,393,1 -block_hint,RegExpMatchFast,294,295,1 -block_hint,RegExpMatchFast,288,289,0 +block_hint,RegExpMatchFast,331,332,0 +block_hint,RegExpMatchFast,240,241,0 +block_hint,RegExpMatchFast,287,288,0 +block_hint,RegExpMatchFast,454,455,0 +block_hint,RegExpMatchFast,397,398,1 +block_hint,RegExpMatchFast,295,296,1 +block_hint,RegExpMatchFast,289,290,0 block_hint,RegExpMatchFast,127,128,0 -block_hint,RegExpMatchFast,239,240,1 -block_hint,RegExpMatchFast,241,242,1 +block_hint,RegExpMatchFast,242,243,1 +block_hint,RegExpMatchFast,244,245,1 block_hint,RegExpMatchFast,42,43,1 -block_hint,RegExpMatchFast,333,334,0 -block_hint,RegExpMatchFast,243,244,1 -block_hint,RegExpMatchFast,457,458,1 -block_hint,RegExpMatchFast,394,395,0 -block_hint,RegExpMatchFast,322,323,1 +block_hint,RegExpMatchFast,336,337,0 +block_hint,RegExpMatchFast,246,247,1 +block_hint,RegExpMatchFast,462,463,1 +block_hint,RegExpMatchFast,399,400,0 +block_hint,RegExpMatchFast,325,326,1 block_hint,RegExpMatchFast,131,132,0 block_hint,RegExpMatchFast,50,51,1 -block_hint,RegExpMatchFast,247,248,1 -block_hint,RegExpMatchFast,183,184,1 -block_hint,RegExpMatchFast,262,263,1 +block_hint,RegExpMatchFast,250,251,1 +block_hint,RegExpMatchFast,186,187,1 +block_hint,RegExpMatchFast,263,264,1 +block_hint,RegExpMatchFast,301,302,0 block_hint,RegExpMatchFast,84,85,1 block_hint,RegExpMatchFast,86,87,1 -block_hint,RegExpMatchFast,302,303,0 -block_hint,RegExpMatchFast,347,348,0 -block_hint,RegExpMatchFast,378,379,0 -block_hint,RegExpMatchFast,300,301,0 +block_hint,RegExpMatchFast,305,306,0 +block_hint,RegExpMatchFast,350,351,0 +block_hint,RegExpMatchFast,383,384,0 +block_hint,RegExpMatchFast,303,304,0 block_hint,RegExpMatchFast,88,89,1 -block_hint,RegExpMatchFast,342,343,0 -block_hint,RegExpMatchFast,253,254,0 -block_hint,RegExpMatchFast,278,279,0 -block_hint,RegExpMatchFast,193,194,1 -block_hint,RegExpMatchFast,451,452,0 -block_hint,RegExpMatchFast,437,438,1 -block_hint,RegExpMatchFast,390,391,1 -block_hint,RegExpMatchFast,304,305,1 -block_hint,RegExpMatchFast,280,281,0 +block_hint,RegExpMatchFast,345,346,0 +block_hint,RegExpMatchFast,254,255,0 +block_hint,RegExpMatchFast,279,280,0 +block_hint,RegExpMatchFast,196,197,1 +block_hint,RegExpMatchFast,456,457,0 +block_hint,RegExpMatchFast,442,443,1 +block_hint,RegExpMatchFast,395,396,1 +block_hint,RegExpMatchFast,307,308,1 +block_hint,RegExpMatchFast,281,282,0 block_hint,RegExpMatchFast,115,116,0 -block_hint,RegExpMatchFast,344,345,0 -block_hint,RegExpMatchFast,255,256,0 +block_hint,RegExpMatchFast,347,348,0 +block_hint,RegExpMatchFast,256,257,0 block_hint,RegExpMatchFast,94,95,1 -block_hint,RegExpMatchFast,382,383,1 -block_hint,RegExpMatchFast,306,307,0 -block_hint,RegExpMatchFast,178,179,1 -block_hint,RegExpMatchFast,176,177,1 -block_hint,RegExpMatchFast,308,309,0 -block_hint,RegExpMatchFast,180,181,0 +block_hint,RegExpMatchFast,387,388,1 +block_hint,RegExpMatchFast,309,310,0 +block_hint,RegExpMatchFast,181,182,1 +block_hint,RegExpMatchFast,179,180,1 +block_hint,RegExpMatchFast,311,312,0 +block_hint,RegExpMatchFast,183,184,0 block_hint,RegExpMatchFast,102,103,0 block_hint,RegExpMatchFast,104,105,0 -block_hint,RegExpMatchFast,201,202,1 -block_hint,RegExpMatchFast,319,320,0 +block_hint,RegExpMatchFast,204,205,1 +block_hint,RegExpMatchFast,322,323,0 block_hint,RegExpMatchFast,106,107,1 -block_hint,RegExpMatchFast,190,191,1 -block_hint,RegExpMatchFast,349,350,0 +block_hint,RegExpMatchFast,193,194,1 +block_hint,RegExpMatchFast,352,353,0 block_hint,RegExpMatchFast,96,97,1 -block_hint,RegExpMatchFast,172,173,1 -block_hint,RegExpMatchFast,170,171,1 -block_hint,RegExpMatchFast,174,175,0 +block_hint,RegExpMatchFast,175,176,1 +block_hint,RegExpMatchFast,173,174,1 +block_hint,RegExpMatchFast,177,178,0 block_hint,RegExpMatchFast,98,99,0 block_hint,RegExpMatchFast,100,101,0 -block_hint,RegExpMatchFast,221,222,1 -block_hint,RegExpMatchFast,311,312,0 -block_hint,RegExpMatchFast,223,224,0 -block_hint,RegExpReplace,258,259,1 -block_hint,RegExpReplace,296,297,1 -block_hint,RegExpReplace,248,249,1 -block_hint,RegExpReplace,148,149,0 +block_hint,RegExpMatchFast,224,225,1 +block_hint,RegExpMatchFast,314,315,0 +block_hint,RegExpMatchFast,226,227,0 +block_hint,RegExpReplace,261,262,1 +block_hint,RegExpReplace,299,300,1 +block_hint,RegExpReplace,251,252,1 +block_hint,RegExpReplace,149,150,0 block_hint,RegExpReplace,22,23,1 -block_hint,RegExpReplace,206,207,1 -block_hint,RegExpReplace,150,151,0 +block_hint,RegExpReplace,209,210,1 +block_hint,RegExpReplace,151,152,0 block_hint,RegExpReplace,24,25,1 -block_hint,RegExpReplace,208,209,1 -block_hint,RegExpReplace,210,211,1 -block_hint,RegExpReplace,171,172,1 -block_hint,RegExpReplace,254,255,0 +block_hint,RegExpReplace,211,212,1 +block_hint,RegExpReplace,213,214,1 +block_hint,RegExpReplace,172,173,1 +block_hint,RegExpReplace,179,180,0 +block_hint,RegExpReplace,257,258,0 block_hint,RegExpReplace,50,51,1 -block_hint,RegExpReplace,226,227,0 -block_hint,RegExpReplace,162,163,0 -block_hint,RegExpReplace,180,181,0 -block_hint,RegExpReplace,108,109,1 -block_hint,RegExpReplace,372,373,0 -block_hint,RegExpReplace,356,357,1 -block_hint,RegExpReplace,290,291,1 -block_hint,RegExpReplace,200,201,1 -block_hint,RegExpReplace,182,183,0 -block_hint,RegExpReplace,80,81,0 +block_hint,RegExpReplace,229,230,0 +block_hint,RegExpReplace,163,164,0 +block_hint,RegExpReplace,183,184,0 +block_hint,RegExpReplace,109,110,1 +block_hint,RegExpReplace,375,376,0 +block_hint,RegExpReplace,359,360,1 +block_hint,RegExpReplace,293,294,1 +block_hint,RegExpReplace,203,204,1 +block_hint,RegExpReplace,185,186,0 +block_hint,RegExpReplace,81,82,0 block_hint,RegExpReplace,56,57,1 block_hint,RegExpReplace,58,59,1 block_hint,RegExpReplace,60,61,1 -block_hint,RegExpReplace,166,167,0 +block_hint,RegExpReplace,167,168,0 block_hint,RegExpReplace,62,63,1 -block_hint,RegExpReplace,230,231,1 -block_hint,RegExpReplace,168,169,0 +block_hint,RegExpReplace,233,234,1 +block_hint,RegExpReplace,169,170,0 block_hint,RegExpReplace,64,65,1 -block_hint,RegExpReplace,377,378,1 -block_hint,RegExpReplace,368,369,1 -block_hint,RegExpReplace,323,324,0 -block_hint,RegExpReplace,282,283,0 -block_hint,RegExpReplace,215,216,0 -block_hint,RegExpReplace,99,100,1 +block_hint,RegExpReplace,380,381,1 +block_hint,RegExpReplace,371,372,1 +block_hint,RegExpReplace,326,327,0 +block_hint,RegExpReplace,285,286,0 +block_hint,RegExpReplace,218,219,0 +block_hint,RegExpReplace,100,101,1 block_hint,RegExpReplace,26,27,1 block_hint,RegExpReplace,28,29,1 -block_hint,RegExpReplace,101,102,1 +block_hint,RegExpReplace,102,103,1 block_hint,RegExpReplace,30,31,0 block_hint,RegExpReplace,32,33,1 block_hint,RegExpReplace,34,35,1 block_hint,RegExpReplace,72,73,1 block_hint,RegExpReplace,44,45,1 -block_hint,RegExpReplace,160,161,1 +block_hint,RegExpReplace,161,162,1 block_hint,RegExpReplace,46,47,1 block_hint,RegExpReplace,48,49,1 -block_hint,RegExpReplace,233,234,1 -block_hint,RegExpReplace,175,176,1 -block_hint,RegExpReplace,152,153,1 +block_hint,RegExpReplace,236,237,1 +block_hint,RegExpReplace,176,177,1 +block_hint,RegExpReplace,153,154,1 block_hint,RegExpReplace,36,37,1 -block_hint,RegExpReplace,154,155,1 +block_hint,RegExpReplace,155,156,1 block_hint,RegExpReplace,40,41,0 -block_hint,RegExpReplace,251,252,1 -block_hint,RegExpReplace,193,194,1 +block_hint,RegExpReplace,254,255,1 +block_hint,RegExpReplace,196,197,1 block_hint,RegExpReplace,42,43,1 block_hint,RegExpSearchFast,50,51,1 block_hint,RegExpSearchFast,6,7,1 @@ -4547,106 +4640,120 @@ block_hint,SymbolPrototypeToString,9,10,1 block_hint,SymbolPrototypeToString,11,12,1 block_hint,SymbolPrototypeToString,5,6,0 block_hint,SymbolPrototypeToString,7,8,1 -block_hint,CreateTypedArray,567,568,0 -block_hint,CreateTypedArray,593,594,0 -block_hint,CreateTypedArray,540,541,0 -block_hint,CreateTypedArray,451,452,0 -block_hint,CreateTypedArray,331,332,1 -block_hint,CreateTypedArray,333,334,1 -block_hint,CreateTypedArray,635,636,0 -block_hint,CreateTypedArray,487,488,1 -block_hint,CreateTypedArray,485,486,1 -block_hint,CreateTypedArray,382,383,1 -block_hint,CreateTypedArray,546,547,0 -block_hint,CreateTypedArray,616,617,0 -block_hint,CreateTypedArray,544,545,0 -block_hint,CreateTypedArray,455,456,0 -block_hint,CreateTypedArray,393,394,0 -block_hint,CreateTypedArray,395,396,0 -block_hint,CreateTypedArray,385,386,0 -block_hint,CreateTypedArray,104,105,1 -block_hint,CreateTypedArray,106,107,1 -block_hint,CreateTypedArray,645,646,1 -block_hint,CreateTypedArray,596,597,0 -block_hint,CreateTypedArray,643,644,1 -block_hint,CreateTypedArray,613,614,1 -block_hint,CreateTypedArray,489,490,0 -block_hint,CreateTypedArray,523,524,1 -block_hint,CreateTypedArray,361,362,0 -block_hint,CreateTypedArray,236,237,0 -block_hint,CreateTypedArray,299,300,0 -block_hint,CreateTypedArray,279,280,1 -block_hint,CreateTypedArray,281,282,1 +block_hint,CreateTypedArray,610,611,0 +block_hint,CreateTypedArray,638,639,0 +block_hint,CreateTypedArray,576,577,0 +block_hint,CreateTypedArray,485,486,0 +block_hint,CreateTypedArray,356,357,1 +block_hint,CreateTypedArray,358,359,1 +block_hint,CreateTypedArray,677,678,0 +block_hint,CreateTypedArray,520,521,1 +block_hint,CreateTypedArray,518,519,1 +block_hint,CreateTypedArray,407,408,1 +block_hint,CreateTypedArray,586,587,0 +block_hint,CreateTypedArray,662,663,0 +block_hint,CreateTypedArray,584,585,0 block_hint,CreateTypedArray,491,492,0 -block_hint,CreateTypedArray,525,526,1 -block_hint,CreateTypedArray,363,364,0 -block_hint,CreateTypedArray,252,253,0 -block_hint,CreateTypedArray,301,302,0 -block_hint,CreateTypedArray,479,480,0 -block_hint,CreateTypedArray,481,482,0 -block_hint,CreateTypedArray,629,630,0 -block_hint,CreateTypedArray,496,497,1 -block_hint,CreateTypedArray,494,495,1 -block_hint,CreateTypedArray,397,398,1 -block_hint,CreateTypedArray,504,505,0 -block_hint,CreateTypedArray,498,499,0 -block_hint,CreateTypedArray,400,401,0 -block_hint,CreateTypedArray,152,153,1 -block_hint,CreateTypedArray,340,341,0 -block_hint,CreateTypedArray,154,155,1 -block_hint,CreateTypedArray,650,651,1 -block_hint,CreateTypedArray,603,604,0 -block_hint,CreateTypedArray,648,649,1 -block_hint,CreateTypedArray,619,620,1 -block_hint,CreateTypedArray,500,501,0 -block_hint,CreateTypedArray,519,520,1 -block_hint,CreateTypedArray,357,358,0 -block_hint,CreateTypedArray,204,205,0 -block_hint,CreateTypedArray,622,623,0 -block_hint,CreateTypedArray,166,167,1 -block_hint,CreateTypedArray,289,290,1 -block_hint,CreateTypedArray,291,292,1 -block_hint,CreateTypedArray,502,503,0 -block_hint,CreateTypedArray,521,522,1 -block_hint,CreateTypedArray,359,360,0 -block_hint,CreateTypedArray,220,221,0 -block_hint,CreateTypedArray,624,625,0 -block_hint,CreateTypedArray,511,512,0 -block_hint,CreateTypedArray,506,507,0 -block_hint,CreateTypedArray,462,463,0 -block_hint,CreateTypedArray,346,347,0 -block_hint,CreateTypedArray,416,417,1 -block_hint,CreateTypedArray,350,351,1 -block_hint,CreateTypedArray,465,466,0 -block_hint,CreateTypedArray,348,349,1 -block_hint,CreateTypedArray,418,419,0 -block_hint,CreateTypedArray,654,655,0 -block_hint,CreateTypedArray,605,606,0 +block_hint,CreateTypedArray,424,425,0 +block_hint,CreateTypedArray,426,427,0 +block_hint,CreateTypedArray,410,411,0 +block_hint,CreateTypedArray,105,106,1 +block_hint,CreateTypedArray,107,108,1 +block_hint,CreateTypedArray,412,413,1 +block_hint,CreateTypedArray,109,110,1 +block_hint,CreateTypedArray,111,112,1 +block_hint,CreateTypedArray,641,642,0 +block_hint,CreateTypedArray,683,684,1 +block_hint,CreateTypedArray,660,661,1 +block_hint,CreateTypedArray,522,523,0 +block_hint,CreateTypedArray,558,559,1 +block_hint,CreateTypedArray,384,385,0 +block_hint,CreateTypedArray,261,262,0 +block_hint,CreateTypedArray,416,417,0 +block_hint,CreateTypedArray,123,124,1 +block_hint,CreateTypedArray,125,126,1 +block_hint,CreateTypedArray,305,306,1 +block_hint,CreateTypedArray,307,308,1 +block_hint,CreateTypedArray,525,526,0 +block_hint,CreateTypedArray,560,561,1 +block_hint,CreateTypedArray,386,387,0 +block_hint,CreateTypedArray,277,278,0 +block_hint,CreateTypedArray,420,421,0 +block_hint,CreateTypedArray,137,138,1 +block_hint,CreateTypedArray,139,140,1 +block_hint,CreateTypedArray,512,513,0 +block_hint,CreateTypedArray,514,515,0 +block_hint,CreateTypedArray,671,672,0 +block_hint,CreateTypedArray,531,532,1 block_hint,CreateTypedArray,529,530,1 -block_hint,CreateTypedArray,527,528,1 -block_hint,CreateTypedArray,431,432,1 -block_hint,CreateTypedArray,609,610,0 -block_hint,CreateTypedArray,537,538,0 -block_hint,CreateTypedArray,445,446,0 -block_hint,CreateTypedArray,314,315,1 -block_hint,CreateTypedArray,607,608,0 +block_hint,CreateTypedArray,428,429,1 +block_hint,CreateTypedArray,541,542,0 +block_hint,CreateTypedArray,533,534,0 +block_hint,CreateTypedArray,431,432,0 +block_hint,CreateTypedArray,165,166,1 +block_hint,CreateTypedArray,365,366,0 +block_hint,CreateTypedArray,167,168,1 +block_hint,CreateTypedArray,433,434,1 +block_hint,CreateTypedArray,169,170,1 +block_hint,CreateTypedArray,171,172,1 +block_hint,CreateTypedArray,648,649,0 +block_hint,CreateTypedArray,686,687,1 +block_hint,CreateTypedArray,665,666,1 block_hint,CreateTypedArray,535,536,0 +block_hint,CreateTypedArray,554,555,1 +block_hint,CreateTypedArray,380,381,0 +block_hint,CreateTypedArray,229,230,0 +block_hint,CreateTypedArray,437,438,0 +block_hint,CreateTypedArray,183,184,1 +block_hint,CreateTypedArray,185,186,1 +block_hint,CreateTypedArray,187,188,1 +block_hint,CreateTypedArray,318,319,1 +block_hint,CreateTypedArray,320,321,1 +block_hint,CreateTypedArray,538,539,0 +block_hint,CreateTypedArray,556,557,1 +block_hint,CreateTypedArray,382,383,0 +block_hint,CreateTypedArray,245,246,0 block_hint,CreateTypedArray,441,442,0 -block_hint,CreateTypedArray,265,266,0 -block_hint,CreateTypedArray,588,589,0 -block_hint,CreateTypedArray,321,322,0 -block_hint,CreateTypedArray,323,324,0 +block_hint,CreateTypedArray,199,200,1 +block_hint,CreateTypedArray,201,202,1 +block_hint,CreateTypedArray,548,549,0 +block_hint,CreateTypedArray,543,544,0 +block_hint,CreateTypedArray,500,501,0 block_hint,CreateTypedArray,371,372,0 -block_hint,CreateTypedArray,373,374,0 -block_hint,CreateTypedArray,316,317,1 -block_hint,CreateTypedArray,326,327,0 -block_hint,CreateTypedArray,319,320,0 -block_hint,CreateTypedArray,473,474,0 -block_hint,CreateTypedArray,517,518,1 -block_hint,CreateTypedArray,355,356,0 -block_hint,CreateTypedArray,188,189,0 -block_hint,CreateTypedArray,449,450,0 -block_hint,CreateTypedArray,273,274,0 +block_hint,CreateTypedArray,453,454,1 +block_hint,CreateTypedArray,375,376,1 +block_hint,CreateTypedArray,503,504,0 +block_hint,CreateTypedArray,373,374,1 +block_hint,CreateTypedArray,455,456,0 +block_hint,CreateTypedArray,688,689,0 +block_hint,CreateTypedArray,650,651,0 +block_hint,CreateTypedArray,564,565,1 +block_hint,CreateTypedArray,562,563,1 +block_hint,CreateTypedArray,466,467,1 +block_hint,CreateTypedArray,656,657,0 +block_hint,CreateTypedArray,574,575,0 +block_hint,CreateTypedArray,481,482,0 +block_hint,CreateTypedArray,340,341,1 +block_hint,CreateTypedArray,654,655,0 +block_hint,CreateTypedArray,572,573,0 +block_hint,CreateTypedArray,477,478,0 +block_hint,CreateTypedArray,290,291,0 +block_hint,CreateTypedArray,634,635,0 +block_hint,CreateTypedArray,347,348,0 +block_hint,CreateTypedArray,349,350,0 +block_hint,CreateTypedArray,396,397,0 +block_hint,CreateTypedArray,398,399,0 +block_hint,CreateTypedArray,342,343,1 +block_hint,CreateTypedArray,352,353,0 +block_hint,CreateTypedArray,345,346,0 +block_hint,CreateTypedArray,507,508,0 +block_hint,CreateTypedArray,552,553,1 +block_hint,CreateTypedArray,378,379,0 +block_hint,CreateTypedArray,213,214,0 +block_hint,CreateTypedArray,567,568,0 +block_hint,CreateTypedArray,391,392,0 +block_hint,CreateTypedArray,60,61,1 +block_hint,CreateTypedArray,62,63,1 block_hint,TypedArrayFrom,156,157,1 block_hint,TypedArrayFrom,140,141,0 block_hint,TypedArrayFrom,124,125,1 @@ -5083,82 +5190,83 @@ block_hint,TestTypeOfHandler,23,24,0 block_hint,TestTypeOfHandler,31,32,1 block_hint,TestTypeOfHandler,50,51,0 block_hint,TestTypeOfHandler,35,36,0 -block_hint,LdaGlobalHandler,8,9,1 -block_hint,LdaGlobalHandler,10,11,1 -block_hint,LdaGlobalHandler,12,13,1 -block_hint,LdaGlobalHandler,14,15,1 -block_hint,LdaGlobalHandler,180,181,0 -block_hint,LdaGlobalHandler,104,105,0 -block_hint,LdaGlobalHandler,108,109,1 +block_hint,LdaGlobalHandler,7,8,1 +block_hint,LdaGlobalHandler,9,10,1 +block_hint,LdaGlobalHandler,11,12,1 +block_hint,LdaGlobalHandler,13,14,1 +block_hint,LdaGlobalHandler,183,184,0 +block_hint,LdaGlobalHandler,105,106,0 +block_hint,LdaGlobalHandler,109,110,1 block_hint,StaContextSlotHandler,5,6,1 block_hint,StaCurrentContextSlotHandler,2,3,1 -block_hint,LdaLookupGlobalSlotHandler,14,15,1 -block_hint,LdaLookupGlobalSlotHandler,124,125,0 -block_hint,LdaLookupGlobalSlotHandler,16,17,1 -block_hint,GetNamedPropertyHandler,362,363,1 -block_hint,GetNamedPropertyHandler,210,211,0 -block_hint,GetNamedPropertyHandler,75,76,0 -block_hint,GetNamedPropertyHandler,37,38,1 -block_hint,GetNamedPropertyHandler,303,304,0 -block_hint,GetNamedPropertyHandler,329,330,0 -block_hint,GetNamedPropertyHandler,212,213,1 -block_hint,GetNamedPropertyHandler,112,113,0 -block_hint,GetNamedPropertyHandler,214,215,0 -block_hint,GetNamedPropertyHandler,286,287,1 +block_hint,LdaLookupGlobalSlotHandler,13,14,1 +block_hint,LdaLookupGlobalSlotHandler,125,126,0 +block_hint,LdaLookupGlobalSlotHandler,15,16,1 +block_hint,GetNamedPropertyHandler,372,373,1 +block_hint,GetNamedPropertyHandler,216,217,0 +block_hint,GetNamedPropertyHandler,77,78,0 +block_hint,GetNamedPropertyHandler,35,36,1 +block_hint,GetNamedPropertyHandler,313,314,0 +block_hint,GetNamedPropertyHandler,339,340,0 +block_hint,GetNamedPropertyHandler,218,219,1 +block_hint,GetNamedPropertyHandler,290,291,0 +block_hint,GetNamedPropertyHandler,220,221,0 +block_hint,GetNamedPropertyHandler,294,295,1 block_hint,GetNamedPropertyHandler,39,40,0 -block_hint,GetNamedPropertyHandler,92,93,1 -block_hint,GetNamedPropertyHandler,337,338,0 -block_hint,GetNamedPropertyHandler,236,237,0 -block_hint,GetNamedPropertyHandler,148,149,0 -block_hint,GetNamedPropertyHandler,59,60,1 -block_hint,GetNamedPropertyHandler,85,86,0 -block_hint,GetNamedPropertyHandler,27,28,1 -block_hint,GetNamedPropertyHandler,138,139,0 -block_hint,GetNamedPropertyHandler,63,64,0 -block_hint,GetNamedPropertyHandler,293,294,1 -block_hint,GetNamedPropertyHandler,96,97,0 -block_hint,GetNamedPropertyHandler,242,243,1 +block_hint,GetNamedPropertyHandler,98,99,1 +block_hint,GetNamedPropertyHandler,347,348,0 +block_hint,GetNamedPropertyHandler,242,243,0 +block_hint,GetNamedPropertyHandler,154,155,0 +block_hint,GetNamedPropertyHandler,120,121,1 +block_hint,GetNamedPropertyHandler,49,50,0 +block_hint,GetNamedPropertyHandler,87,88,0 +block_hint,GetNamedPropertyHandler,25,26,1 +block_hint,GetNamedPropertyHandler,144,145,0 +block_hint,GetNamedPropertyHandler,65,66,0 +block_hint,GetNamedPropertyHandler,303,304,1 +block_hint,GetNamedPropertyHandler,102,103,0 +block_hint,GetNamedPropertyHandler,248,249,1 +block_hint,GetNamedPropertyHandler,250,251,1 block_hint,GetNamedPropertyHandler,244,245,1 -block_hint,GetNamedPropertyHandler,238,239,1 -block_hint,GetNamedPropertyHandler,240,241,1 -block_hint,GetNamedPropertyHandler,158,159,1 -block_hint,AddHandler,53,54,0 -block_hint,AddHandler,37,38,0 -block_hint,AddHandler,28,29,1 -block_hint,AddHandler,80,81,0 -block_hint,AddHandler,60,61,1 -block_hint,AddHandler,40,41,1 -block_hint,AddHandler,74,75,1 -block_hint,AddHandler,43,44,1 -block_hint,AddHandler,56,57,1 -block_hint,AddHandler,22,23,1 -block_hint,SubHandler,35,36,0 -block_hint,SubHandler,23,24,1 -block_hint,SubHandler,64,65,1 -block_hint,SubHandler,75,76,1 -block_hint,SubHandler,66,67,1 -block_hint,SubHandler,45,46,1 -block_hint,SubHandler,19,20,1 -block_hint,MulHandler,69,70,1 -block_hint,MulHandler,65,66,1 -block_hint,MulHandler,17,18,1 -block_hint,MulHandler,75,76,1 +block_hint,GetNamedPropertyHandler,246,247,1 +block_hint,GetNamedPropertyHandler,164,165,1 +block_hint,AddHandler,72,73,0 +block_hint,AddHandler,45,46,0 +block_hint,AddHandler,32,33,1 +block_hint,AddHandler,118,119,0 +block_hint,AddHandler,81,82,1 +block_hint,AddHandler,48,49,1 +block_hint,AddHandler,103,104,1 +block_hint,AddHandler,52,53,1 +block_hint,AddHandler,75,76,1 +block_hint,AddHandler,24,25,1 +block_hint,SubHandler,42,43,0 +block_hint,SubHandler,27,28,1 +block_hint,SubHandler,78,79,1 +block_hint,SubHandler,98,99,1 +block_hint,SubHandler,80,81,1 +block_hint,SubHandler,56,57,1 +block_hint,SubHandler,21,22,1 +block_hint,MulHandler,106,107,1 +block_hint,MulHandler,98,99,1 +block_hint,MulHandler,30,31,1 +block_hint,MulHandler,112,113,1 +block_hint,MulHandler,91,92,1 block_hint,MulHandler,59,60,1 -block_hint,MulHandler,39,40,1 -block_hint,MulHandler,12,13,1 -block_hint,DivHandler,65,66,0 -block_hint,DivHandler,54,55,0 -block_hint,DivHandler,37,38,1 -block_hint,DivHandler,15,16,1 -block_hint,DivHandler,73,74,1 -block_hint,DivHandler,60,61,1 -block_hint,DivHandler,40,41,1 -block_hint,DivHandler,10,11,1 -block_hint,ModHandler,74,75,0 -block_hint,ModHandler,70,71,0 -block_hint,ModHandler,56,57,1 -block_hint,ModHandler,51,52,1 -block_hint,ModHandler,28,29,0 +block_hint,MulHandler,23,24,1 +block_hint,DivHandler,85,86,0 +block_hint,DivHandler,66,67,0 +block_hint,DivHandler,43,44,1 +block_hint,DivHandler,23,24,1 +block_hint,DivHandler,95,96,1 +block_hint,DivHandler,72,73,1 +block_hint,DivHandler,46,47,1 +block_hint,DivHandler,17,18,1 +block_hint,ModHandler,89,90,0 +block_hint,ModHandler,85,86,0 +block_hint,ModHandler,68,69,1 +block_hint,ModHandler,63,64,1 +block_hint,ModHandler,29,30,0 block_hint,ModHandler,15,16,1 block_hint,BitwiseOrHandler,42,43,0 block_hint,BitwiseOrHandler,30,31,1 @@ -5233,6 +5341,7 @@ block_hint,DecHandler,23,24,0 block_hint,DecHandler,18,19,1 block_hint,NegateHandler,26,27,1 block_hint,NegateHandler,24,25,1 +block_hint,NegateHandler,14,15,1 block_hint,ToBooleanLogicalNotHandler,15,16,0 block_hint,ToBooleanLogicalNotHandler,21,22,0 block_hint,ToBooleanLogicalNotHandler,7,8,0 @@ -5444,37 +5553,37 @@ block_hint,LdaImmutableContextSlotWideHandler,3,4,1 block_hint,LdaImmutableContextSlotWideHandler,9,10,0 block_hint,LdaImmutableContextSlotWideHandler,5,6,1 block_hint,LdaImmutableCurrentContextSlotWideHandler,2,3,1 -block_hint,LdaGlobalWideHandler,257,258,0 -block_hint,LdaGlobalWideHandler,108,109,1 +block_hint,LdaGlobalWideHandler,262,263,0 +block_hint,LdaGlobalWideHandler,110,111,1 block_hint,StaGlobalWideHandler,3,4,0 block_hint,StaCurrentContextSlotWideHandler,2,3,1 -block_hint,GetNamedPropertyWideHandler,323,324,0 -block_hint,GetNamedPropertyWideHandler,138,139,1 +block_hint,GetNamedPropertyWideHandler,331,332,0 +block_hint,GetNamedPropertyWideHandler,140,141,1 block_hint,GetKeyedPropertyWideHandler,3,4,0 block_hint,SetNamedPropertyWideHandler,3,4,0 block_hint,DefineNamedOwnPropertyWideHandler,3,4,0 block_hint,SetKeyedPropertyWideHandler,3,4,0 block_hint,DefineKeyedOwnPropertyWideHandler,3,4,0 block_hint,StaInArrayLiteralWideHandler,3,4,0 -block_hint,AddWideHandler,82,83,0 -block_hint,AddWideHandler,49,50,0 -block_hint,AddWideHandler,35,36,0 -block_hint,AddWideHandler,78,79,0 -block_hint,AddWideHandler,64,65,1 -block_hint,AddWideHandler,45,46,1 -block_hint,AddWideHandler,27,28,1 -block_hint,AddWideHandler,43,44,1 -block_hint,AddWideHandler,16,17,1 -block_hint,SubWideHandler,75,76,0 -block_hint,SubWideHandler,53,54,0 -block_hint,SubWideHandler,33,34,0 -block_hint,SubWideHandler,13,14,1 -block_hint,MulWideHandler,80,81,0 -block_hint,MulWideHandler,73,74,1 -block_hint,MulWideHandler,59,60,1 -block_hint,MulWideHandler,57,58,1 -block_hint,MulWideHandler,30,31,1 -block_hint,MulWideHandler,10,11,1 +block_hint,AddWideHandler,120,121,0 +block_hint,AddWideHandler,60,61,0 +block_hint,AddWideHandler,42,43,0 +block_hint,AddWideHandler,107,108,0 +block_hint,AddWideHandler,76,77,1 +block_hint,AddWideHandler,53,54,1 +block_hint,AddWideHandler,31,32,1 +block_hint,AddWideHandler,51,52,1 +block_hint,AddWideHandler,18,19,1 +block_hint,SubWideHandler,108,109,0 +block_hint,SubWideHandler,65,66,0 +block_hint,SubWideHandler,40,41,0 +block_hint,SubWideHandler,15,16,1 +block_hint,MulWideHandler,128,129,0 +block_hint,MulWideHandler,106,107,1 +block_hint,MulWideHandler,83,84,1 +block_hint,MulWideHandler,81,82,1 +block_hint,MulWideHandler,43,44,1 +block_hint,MulWideHandler,19,20,1 block_hint,BitwiseOrWideHandler,28,29,0 block_hint,BitwiseOrWideHandler,20,21,1 block_hint,AddSmiWideHandler,25,26,0 @@ -5567,8 +5676,8 @@ block_hint,ForInNextWideHandler,11,12,0 block_hint,ForInNextWideHandler,2,3,1 block_hint,ForInNextWideHandler,4,5,0 block_hint,ForInNextWideHandler,9,10,1 -block_hint,LdaGlobalExtraWideHandler,257,258,0 -block_hint,LdaGlobalExtraWideHandler,108,109,1 +block_hint,LdaGlobalExtraWideHandler,262,263,0 +block_hint,LdaGlobalExtraWideHandler,110,111,1 block_hint,AddSmiExtraWideHandler,33,34,1 block_hint,AddSmiExtraWideHandler,23,24,0 block_hint,AddSmiExtraWideHandler,28,29,1 @@ -5586,720 +5695,720 @@ block_hint,BitwiseAndSmiExtraWideHandler,29,30,0 block_hint,BitwiseAndSmiExtraWideHandler,18,19,1 block_hint,CallUndefinedReceiver1ExtraWideHandler,68,69,0 block_hint,CallUndefinedReceiver1ExtraWideHandler,19,20,0 -builtin_hash,RecordWriteSaveFP,925153714 -builtin_hash,RecordWriteIgnoreFP,925153714 -builtin_hash,EphemeronKeyBarrierSaveFP,576191782 -builtin_hash,AdaptorWithBuiltinExitFrame,354449226 -builtin_hash,Call_ReceiverIsNullOrUndefined_Baseline_Compact,184201450 -builtin_hash,Call_ReceiverIsNotNullOrUndefined_Baseline_Compact,-622040840 -builtin_hash,Call_ReceiverIsAny_Baseline_Compact,-622040840 -builtin_hash,CallProxy,431038095 -builtin_hash,CallWithSpread,114885228 -builtin_hash,CallWithSpread_Baseline,-764009345 -builtin_hash,CallWithArrayLike,-906554997 -builtin_hash,ConstructWithSpread,-608478575 -builtin_hash,ConstructWithSpread_Baseline,633800479 -builtin_hash,Construct_Baseline,-357225391 -builtin_hash,FastNewObject,-181970843 -builtin_hash,FastNewClosure,-175349459 -builtin_hash,StringEqual,706401832 -builtin_hash,StringGreaterThan,-613733034 -builtin_hash,StringGreaterThanOrEqual,-355204059 -builtin_hash,StringLessThan,-355204059 -builtin_hash,StringLessThanOrEqual,-613733034 -builtin_hash,StringSubstring,293726114 -builtin_hash,OrderedHashTableHealIndex,320211480 -builtin_hash,CompileLazy,-48838662 -builtin_hash,CompileLazyDeoptimizedCode,1029930506 -builtin_hash,InstantiateAsmJs,-487292373 -builtin_hash,AllocateInYoungGeneration,669322182 -builtin_hash,AllocateRegularInYoungGeneration,-197165802 -builtin_hash,AllocateRegularInOldGeneration,-197165802 -builtin_hash,CopyFastSmiOrObjectElements,-974669866 -builtin_hash,GrowFastDoubleElements,633375032 -builtin_hash,GrowFastSmiOrObjectElements,55014026 -builtin_hash,ToNumber,1056900683 -builtin_hash,ToNumber_Baseline,-255970856 -builtin_hash,ToNumeric_Baseline,-150297448 -builtin_hash,ToNumberConvertBigInt,-115864776 -builtin_hash,Typeof,455002258 -builtin_hash,KeyedLoadIC_PolymorphicName,-147363616 -builtin_hash,KeyedStoreIC_Megamorphic,-766812467 -builtin_hash,DefineKeyedOwnIC_Megamorphic,-98359717 -builtin_hash,LoadGlobalIC_NoFeedback,-751542491 -builtin_hash,LoadIC_FunctionPrototype,-456945758 -builtin_hash,LoadIC_StringLength,708277321 -builtin_hash,LoadIC_StringWrapperLength,813299235 -builtin_hash,LoadIC_NoFeedback,203476084 -builtin_hash,StoreIC_NoFeedback,-1067800910 -builtin_hash,DefineNamedOwnIC_NoFeedback,-679825540 -builtin_hash,KeyedLoadIC_SloppyArguments,-451779601 -builtin_hash,StoreFastElementIC_Standard,688605787 -builtin_hash,StoreFastElementIC_GrowNoTransitionHandleCOW,-981655461 -builtin_hash,StoreFastElementIC_NoTransitionHandleCOW,516590767 -builtin_hash,ElementsTransitionAndStore_Standard,-764902427 -builtin_hash,ElementsTransitionAndStore_GrowNoTransitionHandleCOW,403149715 -builtin_hash,ElementsTransitionAndStore_NoTransitionHandleCOW,995918993 -builtin_hash,KeyedHasIC_PolymorphicName,-165442496 -builtin_hash,EnqueueMicrotask,7595399 -builtin_hash,RunMicrotasks,-338691087 -builtin_hash,HasProperty,508450008 -builtin_hash,DeleteProperty,-838129578 -builtin_hash,SetDataProperties,-464632524 -builtin_hash,ReturnReceiver,312434521 -builtin_hash,ArrayConstructor,-319898279 -builtin_hash,ArrayConstructorImpl,412703549 -builtin_hash,ArrayNoArgumentConstructor_PackedSmi_DontOverride,850144068 -builtin_hash,ArrayNoArgumentConstructor_HoleySmi_DontOverride,850144068 -builtin_hash,ArrayNoArgumentConstructor_PackedSmi_DisableAllocationSites,1070673895 -builtin_hash,ArrayNoArgumentConstructor_Packed_DisableAllocationSites,1070673895 -builtin_hash,ArrayNoArgumentConstructor_Holey_DisableAllocationSites,1070673895 -builtin_hash,ArrayNoArgumentConstructor_PackedDouble_DisableAllocationSites,-336123958 -builtin_hash,ArraySingleArgumentConstructor_HoleySmi_DontOverride,227053212 -builtin_hash,ArraySingleArgumentConstructor_HoleySmi_DisableAllocationSites,873893 -builtin_hash,ArraySingleArgumentConstructor_Holey_DisableAllocationSites,873893 -builtin_hash,ArraySingleArgumentConstructor_HoleyDouble_DisableAllocationSites,-847176062 -builtin_hash,ArrayIncludesSmi,536907331 -builtin_hash,ArrayIncludesSmiOrObject,1001204729 -builtin_hash,ArrayIncludes,561561276 -builtin_hash,ArrayIndexOfSmi,-663968987 -builtin_hash,ArrayIndexOfSmiOrObject,-876114200 -builtin_hash,ArrayIndexOf,-253950356 -builtin_hash,ArrayPrototypePop,-292344596 -builtin_hash,ArrayPrototypePush,-675294835 -builtin_hash,CloneFastJSArray,-172971657 -builtin_hash,CloneFastJSArrayFillingHoles,-284975194 -builtin_hash,ExtractFastJSArray,-471066963 -builtin_hash,ArrayPrototypeEntries,829416546 -builtin_hash,ArrayPrototypeKeys,162456650 -builtin_hash,ArrayPrototypeValues,829416546 -builtin_hash,ArrayIteratorPrototypeNext,1046841582 -builtin_hash,AsyncFunctionEnter,608034084 -builtin_hash,AsyncFunctionResolve,-196767509 -builtin_hash,AsyncFunctionAwaitCaught,861277429 -builtin_hash,AsyncFunctionAwaitUncaught,861277429 -builtin_hash,AsyncFunctionAwaitResolveClosure,-342974575 -builtin_hash,DatePrototypeGetDate,799986656 -builtin_hash,DatePrototypeGetDay,799986656 -builtin_hash,DatePrototypeGetFullYear,799986656 -builtin_hash,DatePrototypeGetHours,799986656 -builtin_hash,DatePrototypeGetMilliseconds,-1015935702 -builtin_hash,DatePrototypeGetMinutes,799986656 -builtin_hash,DatePrototypeGetMonth,799986656 -builtin_hash,DatePrototypeGetSeconds,799986656 -builtin_hash,DatePrototypeGetTime,604056264 -builtin_hash,DatePrototypeGetTimezoneOffset,-1015935702 -builtin_hash,DatePrototypeValueOf,604056264 -builtin_hash,DatePrototypeToPrimitive,83647064 -builtin_hash,CreateIterResultObject,-837586211 -builtin_hash,CreateGeneratorObject,-731927520 -builtin_hash,GeneratorPrototypeNext,24056276 -builtin_hash,GeneratorPrototypeReturn,704697656 -builtin_hash,SuspendGeneratorBaseline,-772427084 -builtin_hash,ResumeGeneratorBaseline,-1024670400 -builtin_hash,GlobalIsFinite,98040433 -builtin_hash,GlobalIsNaN,-131926158 -builtin_hash,LoadIC,758731508 -builtin_hash,LoadIC_Megamorphic,485927514 -builtin_hash,LoadIC_Noninlined,-900550790 -builtin_hash,LoadICTrampoline,279586909 -builtin_hash,LoadICBaseline,378721791 -builtin_hash,LoadICTrampoline_Megamorphic,279586909 -builtin_hash,LoadSuperIC,-700275115 -builtin_hash,LoadSuperICBaseline,-779741774 -builtin_hash,KeyedLoadIC,-932282799 -builtin_hash,KeyedLoadIC_Megamorphic,-52322939 -builtin_hash,KeyedLoadICTrampoline,279586909 -builtin_hash,KeyedLoadICBaseline,378721791 -builtin_hash,KeyedLoadICTrampoline_Megamorphic,279586909 -builtin_hash,StoreGlobalIC,-593770272 -builtin_hash,StoreGlobalICTrampoline,279586909 -builtin_hash,StoreGlobalICBaseline,378721791 -builtin_hash,StoreIC,-725198793 -builtin_hash,StoreICTrampoline,853716338 -builtin_hash,StoreICBaseline,-779741774 -builtin_hash,DefineNamedOwnIC,407070464 -builtin_hash,DefineNamedOwnICBaseline,-779741774 -builtin_hash,KeyedStoreIC,-781537170 -builtin_hash,KeyedStoreICTrampoline,853716338 -builtin_hash,KeyedStoreICBaseline,-779741774 -builtin_hash,DefineKeyedOwnIC,-368740640 -builtin_hash,StoreInArrayLiteralIC,870478079 -builtin_hash,StoreInArrayLiteralICBaseline,-779741774 -builtin_hash,LoadGlobalIC,-629815841 -builtin_hash,LoadGlobalICInsideTypeof,-681620395 -builtin_hash,LoadGlobalICTrampoline,742678434 -builtin_hash,LoadGlobalICBaseline,-376594265 -builtin_hash,LoadGlobalICInsideTypeofTrampoline,742678434 -builtin_hash,LoadGlobalICInsideTypeofBaseline,-376594265 -builtin_hash,LookupGlobalICBaseline,318821975 -builtin_hash,LookupGlobalICInsideTypeofBaseline,318821975 -builtin_hash,KeyedHasIC,-1007842062 -builtin_hash,KeyedHasICBaseline,378721791 -builtin_hash,KeyedHasIC_Megamorphic,508450008 -builtin_hash,IterableToList,444610848 -builtin_hash,IterableToListWithSymbolLookup,-721120479 -builtin_hash,IterableToListMayPreserveHoles,907505763 -builtin_hash,FindOrderedHashMapEntry,-772719645 -builtin_hash,MapConstructor,328612162 -builtin_hash,MapPrototypeSet,-740571272 -builtin_hash,MapPrototypeDelete,815676314 -builtin_hash,MapPrototypeGet,-99464800 -builtin_hash,MapPrototypeHas,417921745 -builtin_hash,MapPrototypeEntries,-250935376 -builtin_hash,MapPrototypeGetSize,1035731846 -builtin_hash,MapPrototypeForEach,-11074084 -builtin_hash,MapPrototypeKeys,-250935376 -builtin_hash,MapPrototypeValues,-250935376 -builtin_hash,MapIteratorPrototypeNext,-513700577 -builtin_hash,MapIteratorToList,788737747 -builtin_hash,SameValueNumbersOnly,-715209 -builtin_hash,Add_Baseline,239982805 -builtin_hash,AddSmi_Baseline,890570719 -builtin_hash,Subtract_Baseline,984085983 -builtin_hash,SubtractSmi_Baseline,745275517 -builtin_hash,Multiply_Baseline,363197242 -builtin_hash,MultiplySmi_Baseline,-291668426 -builtin_hash,Divide_Baseline,41207474 -builtin_hash,DivideSmi_Baseline,-228110709 -builtin_hash,Modulus_Baseline,895869986 -builtin_hash,ModulusSmi_Baseline,1052037814 -builtin_hash,Exponentiate_Baseline,525497385 -builtin_hash,BitwiseAnd_Baseline,834157251 -builtin_hash,BitwiseAndSmi_Baseline,-224843182 -builtin_hash,BitwiseOr_Baseline,-1000862579 -builtin_hash,BitwiseOrSmi_Baseline,-175921720 -builtin_hash,BitwiseXor_Baseline,451693864 -builtin_hash,BitwiseXorSmi_Baseline,1072796926 -builtin_hash,ShiftLeft_Baseline,-1059182958 -builtin_hash,ShiftLeftSmi_Baseline,180208819 -builtin_hash,ShiftRight_Baseline,8908966 -builtin_hash,ShiftRightSmi_Baseline,-553470248 -builtin_hash,ShiftRightLogical_Baseline,-771447674 -builtin_hash,ShiftRightLogicalSmi_Baseline,524787408 -builtin_hash,Add_WithFeedback,-726025304 -builtin_hash,Subtract_WithFeedback,82634219 -builtin_hash,Modulus_WithFeedback,-14646246 -builtin_hash,BitwiseOr_WithFeedback,-345564281 -builtin_hash,Equal_Baseline,-851425691 -builtin_hash,StrictEqual_Baseline,609264081 -builtin_hash,LessThan_Baseline,-250082751 -builtin_hash,GreaterThan_Baseline,442646997 -builtin_hash,LessThanOrEqual_Baseline,278491596 -builtin_hash,GreaterThanOrEqual_Baseline,-526555932 -builtin_hash,Equal_WithFeedback,470211465 -builtin_hash,StrictEqual_WithFeedback,-720033793 -builtin_hash,LessThan_WithFeedback,555433500 -builtin_hash,GreaterThan_WithFeedback,-13794951 -builtin_hash,GreaterThanOrEqual_WithFeedback,38912741 -builtin_hash,BitwiseNot_Baseline,-63143721 -builtin_hash,Decrement_Baseline,-491402242 -builtin_hash,Increment_Baseline,379465907 -builtin_hash,Negate_Baseline,984080569 -builtin_hash,ObjectAssign,72200033 -builtin_hash,ObjectCreate,-611788064 -builtin_hash,ObjectEntries,-669410235 -builtin_hash,ObjectGetOwnPropertyDescriptor,671274362 -builtin_hash,ObjectGetOwnPropertyNames,12658690 -builtin_hash,ObjectIs,-340387257 -builtin_hash,ObjectKeys,-12925872 -builtin_hash,ObjectPrototypeHasOwnProperty,156674900 -builtin_hash,ObjectToString,-633754100 -builtin_hash,InstanceOf_WithFeedback,411449225 -builtin_hash,InstanceOf_Baseline,486598742 -builtin_hash,ForInEnumerate,-1069244956 -builtin_hash,ForInPrepare,362202805 -builtin_hash,ForInFilter,-306829682 -builtin_hash,RegExpConstructor,-10164395 -builtin_hash,RegExpExecAtom,-1073379799 -builtin_hash,RegExpExecInternal,891768236 -builtin_hash,FindOrderedHashSetEntry,705655709 -builtin_hash,SetConstructor,-575472839 -builtin_hash,SetPrototypeHas,417921745 -builtin_hash,SetPrototypeAdd,-541161428 -builtin_hash,SetPrototypeDelete,851985022 -builtin_hash,SetPrototypeEntries,-250935376 -builtin_hash,SetPrototypeGetSize,1035731846 -builtin_hash,SetPrototypeForEach,-130012968 -builtin_hash,SetPrototypeValues,-250935376 -builtin_hash,SetIteratorPrototypeNext,-958421279 -builtin_hash,SetOrSetIteratorToList,144945576 -builtin_hash,StringFromCharCode,-225080952 -builtin_hash,StringPrototypeReplace,-894056994 -builtin_hash,StringPrototypeSplit,-81652142 -builtin_hash,TypedArrayConstructor,536725208 -builtin_hash,TypedArrayPrototypeByteLength,667117445 -builtin_hash,TypedArrayPrototypeLength,423335186 -builtin_hash,WeakMapConstructor,211573517 -builtin_hash,WeakMapLookupHashIndex,449726121 -builtin_hash,WeakMapGet,-388873308 -builtin_hash,WeakMapPrototypeHas,-407398312 -builtin_hash,WeakMapPrototypeSet,795326571 -builtin_hash,WeakSetConstructor,-100066523 -builtin_hash,WeakSetPrototypeHas,-407398312 -builtin_hash,WeakSetPrototypeAdd,-116381667 -builtin_hash,WeakCollectionSet,-319283680 -builtin_hash,AsyncGeneratorResolve,69898511 -builtin_hash,AsyncGeneratorYield,444077127 -builtin_hash,AsyncGeneratorResumeNext,-495744369 -builtin_hash,AsyncGeneratorPrototypeNext,95683089 -builtin_hash,AsyncGeneratorAwaitUncaught,-291001587 -builtin_hash,AsyncGeneratorAwaitResolveClosure,1023878839 -builtin_hash,AsyncGeneratorYieldResolveClosure,783335474 -builtin_hash,StringAdd_CheckNone,313254412 -builtin_hash,SubString,992908207 -builtin_hash,GetProperty,-151131763 -builtin_hash,GetPropertyWithReceiver,-145418645 -builtin_hash,SetProperty,877836229 -builtin_hash,CreateDataProperty,-606450676 -builtin_hash,ArrayPrototypeConcat,841497445 -builtin_hash,ArrayEvery,-992635840 -builtin_hash,ArrayFilterLoopLazyDeoptContinuation,923422735 -builtin_hash,ArrayFilterLoopContinuation,-871404088 -builtin_hash,ArrayFilter,135589144 -builtin_hash,ArrayPrototypeFind,-52246620 -builtin_hash,ArrayForEachLoopLazyDeoptContinuation,-87111692 -builtin_hash,ArrayForEachLoopContinuation,-432571379 -builtin_hash,ArrayForEach,1063323906 -builtin_hash,ArrayFrom,403114949 -builtin_hash,ArrayIsArray,421541474 -builtin_hash,LoadJoinElement_FastSmiOrObjectElements_0,-931014688 -builtin_hash,LoadJoinElement_FastDoubleElements_0,668207359 -builtin_hash,JoinStackPush,1045985488 -builtin_hash,JoinStackPop,294613228 -builtin_hash,ArrayPrototypeJoin,-591790002 -builtin_hash,ArrayPrototypeToString,254788114 -builtin_hash,ArrayPrototypeLastIndexOf,-623388518 -builtin_hash,ArrayMapLoopLazyDeoptContinuation,527144862 -builtin_hash,ArrayMapLoopContinuation,-248361443 -builtin_hash,ArrayMap,-977984343 -builtin_hash,ArrayReduceLoopLazyDeoptContinuation,641128240 -builtin_hash,ArrayReduceLoopContinuation,601284012 -builtin_hash,ArrayReduce,-918238503 -builtin_hash,ArrayPrototypeReverse,-50777013 -builtin_hash,ArrayPrototypeShift,763658583 -builtin_hash,ArrayPrototypeSlice,132491105 -builtin_hash,ArraySome,46741703 -builtin_hash,ArrayPrototypeSplice,532432423 -builtin_hash,ArrayPrototypeUnshift,285400235 -builtin_hash,ArrayBufferPrototypeGetByteLength,904301424 -builtin_hash,ArrayBufferIsView,-1007526052 -builtin_hash,ToInteger,-164962161 -builtin_hash,FastCreateDataProperty,-709822671 -builtin_hash,BooleanConstructor,-990386327 -builtin_hash,BooleanPrototypeToString,-439668236 -builtin_hash,ToString,-945769269 -builtin_hash,StringPrototypeToString,-494656392 -builtin_hash,StringPrototypeValueOf,-494656392 -builtin_hash,StringPrototypeCharAt,-1053868658 -builtin_hash,StringPrototypeCharCodeAt,228833317 -builtin_hash,StringPrototypeCodePointAt,-570719484 -builtin_hash,StringPrototypeConcat,408689273 -builtin_hash,StringConstructor,259187035 -builtin_hash,StringAddConvertLeft,-940497411 -builtin_hash,StringAddConvertRight,262934773 -builtin_hash,StringCharAt,40686160 -builtin_hash,FastNewClosureBaseline,274645735 -builtin_hash,FastNewFunctionContextFunction,-198047187 -builtin_hash,CreateRegExpLiteral,-338674408 -builtin_hash,CreateShallowArrayLiteral,-564919299 -builtin_hash,CreateEmptyArrayLiteral,643763718 -builtin_hash,CreateShallowObjectLiteral,1034845855 -builtin_hash,ObjectConstructor,-400745906 -builtin_hash,CreateEmptyLiteralObject,-942901622 -builtin_hash,NumberConstructor,287129549 -builtin_hash,StringToNumber,831842981 -builtin_hash,NonNumberToNumber,-93280944 -builtin_hash,NonNumberToNumeric,773024939 -builtin_hash,ToNumeric,-142083210 -builtin_hash,NumberToString,882645046 -builtin_hash,ToBoolean,303836147 -builtin_hash,ToBooleanForBaselineJump,-629829779 -builtin_hash,ToLength,1015821828 -builtin_hash,ToName,1027768228 -builtin_hash,ToObject,71126311 -builtin_hash,NonPrimitiveToPrimitive_Default,847969156 -builtin_hash,NonPrimitiveToPrimitive_Number,847969156 -builtin_hash,NonPrimitiveToPrimitive_String,847969156 -builtin_hash,OrdinaryToPrimitive_Number,424204202 -builtin_hash,OrdinaryToPrimitive_String,424204202 -builtin_hash,DataViewPrototypeGetByteLength,480775435 -builtin_hash,DataViewPrototypeGetFloat64,1003024607 -builtin_hash,DataViewPrototypeSetUint32,229410881 -builtin_hash,DataViewPrototypeSetFloat64,-938786777 -builtin_hash,FunctionPrototypeHasInstance,505391038 -builtin_hash,FastFunctionPrototypeBind,-508566393 -builtin_hash,ForInNext,-1021384217 -builtin_hash,GetIteratorWithFeedback,51240002 -builtin_hash,GetIteratorBaseline,-216059576 -builtin_hash,CallIteratorWithFeedback,581894736 -builtin_hash,MathAbs,-187735218 -builtin_hash,MathCeil,468403420 -builtin_hash,MathFloor,824943398 -builtin_hash,MathRound,-67452399 -builtin_hash,MathPow,-187693750 -builtin_hash,MathMax,897170464 -builtin_hash,MathMin,213295587 -builtin_hash,MathAsin,788026272 -builtin_hash,MathAtan2,-181481861 -builtin_hash,MathCos,-681373097 -builtin_hash,MathExp,-410231589 -builtin_hash,MathFround,415902182 -builtin_hash,MathImul,-715703335 -builtin_hash,MathLog,941930077 -builtin_hash,MathSin,855471515 -builtin_hash,MathSign,539035638 -builtin_hash,MathSqrt,-650867412 -builtin_hash,MathTan,85134160 -builtin_hash,MathTanh,-321274619 -builtin_hash,MathRandom,827257341 -builtin_hash,NumberPrototypeToString,-583893270 -builtin_hash,NumberIsInteger,775270226 -builtin_hash,NumberIsNaN,-189048659 -builtin_hash,NumberParseFloat,-439771973 -builtin_hash,ParseInt,-192026072 -builtin_hash,NumberParseInt,82296249 -builtin_hash,Add,325215303 -builtin_hash,Subtract,52083078 -builtin_hash,Multiply,-317381366 -builtin_hash,Divide,750482944 -builtin_hash,Modulus,328987036 -builtin_hash,CreateObjectWithoutProperties,596134857 -builtin_hash,ObjectIsExtensible,-930811057 -builtin_hash,ObjectPreventExtensions,858037175 -builtin_hash,ObjectGetPrototypeOf,-202287704 -builtin_hash,ObjectSetPrototypeOf,-524999648 -builtin_hash,ObjectPrototypeToString,169720373 -builtin_hash,ObjectPrototypeValueOf,-28430309 -builtin_hash,FulfillPromise,21355213 -builtin_hash,NewPromiseCapability,-218729781 -builtin_hash,PromiseCapabilityDefaultResolve,336354486 -builtin_hash,PerformPromiseThen,278993520 -builtin_hash,PromiseAll,-190868023 -builtin_hash,PromiseAllResolveElementClosure,-452068448 -builtin_hash,PromiseConstructor,-93257640 -builtin_hash,PromisePrototypeCatch,-895785401 -builtin_hash,PromiseFulfillReactionJob,-249420439 -builtin_hash,PromiseResolveTrampoline,-927707015 -builtin_hash,PromiseResolve,557472834 -builtin_hash,ResolvePromise,-983952394 -builtin_hash,PromisePrototypeThen,866384223 -builtin_hash,PromiseResolveThenableJob,-536493053 -builtin_hash,ProxyConstructor,-754644599 -builtin_hash,ProxyGetProperty,213230890 -builtin_hash,ProxyIsExtensible,-484311481 -builtin_hash,ProxyPreventExtensions,297316441 -builtin_hash,ReflectGet,-1068630269 -builtin_hash,ReflectHas,-927707015 -builtin_hash,RegExpPrototypeExec,-1008377217 -builtin_hash,RegExpMatchFast,470663647 -builtin_hash,RegExpReplace,-181940551 -builtin_hash,RegExpPrototypeReplace,39784472 -builtin_hash,RegExpSearchFast,-745723781 -builtin_hash,RegExpPrototypeSourceGetter,-712979884 -builtin_hash,RegExpSplit,881747508 -builtin_hash,RegExpPrototypeTest,-1044390149 -builtin_hash,RegExpPrototypeTestFast,381723041 -builtin_hash,RegExpPrototypeGlobalGetter,-937075195 -builtin_hash,RegExpPrototypeIgnoreCaseGetter,-369470981 -builtin_hash,RegExpPrototypeMultilineGetter,-760493777 -builtin_hash,RegExpPrototypeHasIndicesGetter,260570818 -builtin_hash,RegExpPrototypeDotAllGetter,260570818 -builtin_hash,RegExpPrototypeStickyGetter,-823365333 -builtin_hash,RegExpPrototypeUnicodeGetter,-823365333 -builtin_hash,RegExpPrototypeFlagsGetter,-334954319 -builtin_hash,StringPrototypeEndsWith,-178713286 -builtin_hash,StringPrototypeIncludes,482244051 -builtin_hash,StringPrototypeIndexOf,-130883228 -builtin_hash,StringPrototypeIterator,287346997 -builtin_hash,StringIteratorPrototypeNext,-1032793009 -builtin_hash,StringPrototypeMatch,-984631220 -builtin_hash,StringPrototypeSearch,-984631220 -builtin_hash,StringRepeat,407848752 -builtin_hash,StringPrototypeSlice,538867513 -builtin_hash,StringPrototypeStartsWith,-490931141 -builtin_hash,StringPrototypeSubstr,-1042470347 -builtin_hash,StringPrototypeSubstring,283811647 -builtin_hash,StringPrototypeTrim,509891784 -builtin_hash,SymbolPrototypeToString,477000612 -builtin_hash,CreateTypedArray,255685378 -builtin_hash,TypedArrayFrom,-527855119 -builtin_hash,TypedArrayPrototypeSet,-1042496168 -builtin_hash,TypedArrayPrototypeSubArray,-873399896 -builtin_hash,NewSloppyArgumentsElements,840136606 -builtin_hash,NewStrictArgumentsElements,-701611224 -builtin_hash,NewRestArgumentsElements,450207936 -builtin_hash,FastNewSloppyArguments,427487705 -builtin_hash,FastNewStrictArguments,940012797 -builtin_hash,FastNewRestArguments,408066435 -builtin_hash,StringSlowFlatten,200237548 -builtin_hash,StringIndexOf,506557026 -builtin_hash,Load_FastSmiElements_0,653973042 -builtin_hash,Load_FastObjectElements_0,653973042 -builtin_hash,Store_FastSmiElements_0,418112357 -builtin_hash,Store_FastObjectElements_0,-800846888 -builtin_hash,SortCompareDefault,304655245 -builtin_hash,SortCompareUserFn,-618954695 -builtin_hash,Copy,144972358 -builtin_hash,MergeAt,425921460 -builtin_hash,GallopLeft,-851568101 -builtin_hash,GallopRight,-1027593577 -builtin_hash,ArrayTimSort,-833804402 -builtin_hash,ArrayPrototypeSort,-985590788 -builtin_hash,StringFastLocaleCompare,-984642925 -builtin_hash,WasmInt32ToHeapNumber,-560703743 -builtin_hash,WasmTaggedNonSmiToInt32,-977713892 -builtin_hash,WasmTriggerTierUp,248716051 -builtin_hash,WasmStackGuard,534090893 -builtin_hash,CanUseSameAccessor_FastSmiElements_0,302023388 -builtin_hash,CanUseSameAccessor_FastObjectElements_0,302023388 -builtin_hash,StringPrototypeToLowerCaseIntl,525980093 -builtin_hash,StringToLowerCaseIntl,-870512333 -builtin_hash,WideHandler,964339339 -builtin_hash,ExtraWideHandler,964339339 -builtin_hash,LdarHandler,-1051441899 -builtin_hash,LdaZeroHandler,-361600616 -builtin_hash,LdaSmiHandler,-168991530 -builtin_hash,LdaUndefinedHandler,-638450171 -builtin_hash,LdaNullHandler,-638450171 -builtin_hash,LdaTheHoleHandler,-638450171 -builtin_hash,LdaTrueHandler,-1021965256 -builtin_hash,LdaFalseHandler,-1021965256 -builtin_hash,LdaConstantHandler,356409883 -builtin_hash,LdaContextSlotHandler,943881063 -builtin_hash,LdaImmutableContextSlotHandler,943881063 -builtin_hash,LdaCurrentContextSlotHandler,-382503010 -builtin_hash,LdaImmutableCurrentContextSlotHandler,-382503010 -builtin_hash,StarHandler,87231384 -builtin_hash,MovHandler,83265050 -builtin_hash,PushContextHandler,-240552696 -builtin_hash,PopContextHandler,112490181 -builtin_hash,TestReferenceEqualHandler,-82232472 -builtin_hash,TestUndetectableHandler,-510593553 -builtin_hash,TestNullHandler,68435121 -builtin_hash,TestUndefinedHandler,68435121 -builtin_hash,TestTypeOfHandler,112039968 -builtin_hash,LdaGlobalHandler,-710429991 -builtin_hash,LdaGlobalInsideTypeofHandler,449865173 -builtin_hash,StaGlobalHandler,431419910 -builtin_hash,StaContextSlotHandler,-317790092 -builtin_hash,StaCurrentContextSlotHandler,-425140012 -builtin_hash,LdaLookupGlobalSlotHandler,-213340191 -builtin_hash,LdaLookupGlobalSlotInsideTypeofHandler,841482473 -builtin_hash,StaLookupSlotHandler,712075677 -builtin_hash,GetNamedPropertyHandler,-731343419 -builtin_hash,GetNamedPropertyFromSuperHandler,-238764505 -builtin_hash,GetKeyedPropertyHandler,533315167 -builtin_hash,SetNamedPropertyHandler,-228269698 -builtin_hash,DefineNamedOwnPropertyHandler,-228269698 -builtin_hash,SetKeyedPropertyHandler,-286024195 -builtin_hash,DefineKeyedOwnPropertyHandler,-286024195 -builtin_hash,StaInArrayLiteralHandler,-286024195 -builtin_hash,DefineKeyedOwnPropertyInLiteralHandler,816417950 -builtin_hash,AddHandler,436623080 -builtin_hash,SubHandler,828723569 -builtin_hash,MulHandler,-316424498 -builtin_hash,DivHandler,515954740 -builtin_hash,ModHandler,-366411552 -builtin_hash,ExpHandler,841779190 -builtin_hash,BitwiseOrHandler,-654281787 -builtin_hash,BitwiseXorHandler,-535984936 -builtin_hash,BitwiseAndHandler,-787882211 -builtin_hash,ShiftLeftHandler,-843937923 -builtin_hash,ShiftRightHandler,212674128 -builtin_hash,ShiftRightLogicalHandler,46895009 -builtin_hash,AddSmiHandler,445675964 -builtin_hash,SubSmiHandler,888105299 -builtin_hash,MulSmiHandler,75879268 -builtin_hash,DivSmiHandler,-1048906096 -builtin_hash,ModSmiHandler,844716305 -builtin_hash,BitwiseOrSmiHandler,950867671 -builtin_hash,BitwiseXorSmiHandler,667017295 -builtin_hash,BitwiseAndSmiHandler,16303015 -builtin_hash,ShiftLeftSmiHandler,-243955085 -builtin_hash,ShiftRightSmiHandler,247609496 -builtin_hash,ShiftRightLogicalSmiHandler,-992904191 -builtin_hash,IncHandler,-790806177 -builtin_hash,DecHandler,395099716 -builtin_hash,NegateHandler,773570671 -builtin_hash,BitwiseNotHandler,-30220634 -builtin_hash,ToBooleanLogicalNotHandler,-99917552 -builtin_hash,LogicalNotHandler,-229756855 -builtin_hash,TypeOfHandler,872105570 -builtin_hash,DeletePropertyStrictHandler,986048046 -builtin_hash,DeletePropertySloppyHandler,-712291966 -builtin_hash,GetSuperConstructorHandler,728738893 -builtin_hash,CallAnyReceiverHandler,-851436429 -builtin_hash,CallPropertyHandler,-851436429 -builtin_hash,CallProperty0Handler,250813033 -builtin_hash,CallProperty1Handler,-121823545 -builtin_hash,CallProperty2Handler,370258853 -builtin_hash,CallUndefinedReceiverHandler,-986912746 -builtin_hash,CallUndefinedReceiver0Handler,-202732647 -builtin_hash,CallUndefinedReceiver1Handler,565224727 -builtin_hash,CallUndefinedReceiver2Handler,387553770 -builtin_hash,CallWithSpreadHandler,-851436429 -builtin_hash,CallRuntimeHandler,-170619637 -builtin_hash,CallJSRuntimeHandler,409704029 -builtin_hash,InvokeIntrinsicHandler,715377671 -builtin_hash,ConstructHandler,-131706767 -builtin_hash,ConstructWithSpreadHandler,364942391 -builtin_hash,TestEqualHandler,-837108372 -builtin_hash,TestEqualStrictHandler,894153483 -builtin_hash,TestLessThanHandler,-203533412 -builtin_hash,TestGreaterThanHandler,-62684313 -builtin_hash,TestLessThanOrEqualHandler,200710478 -builtin_hash,TestGreaterThanOrEqualHandler,-469101728 -builtin_hash,TestInstanceOfHandler,-340407472 -builtin_hash,TestInHandler,-632047176 -builtin_hash,ToNameHandler,751533885 -builtin_hash,ToNumberHandler,242489604 -builtin_hash,ToNumericHandler,839640143 -builtin_hash,ToObjectHandler,751533885 -builtin_hash,ToStringHandler,-825940301 -builtin_hash,CreateRegExpLiteralHandler,774985654 -builtin_hash,CreateArrayLiteralHandler,440763826 -builtin_hash,CreateArrayFromIterableHandler,-751073497 -builtin_hash,CreateEmptyArrayLiteralHandler,825438818 -builtin_hash,CreateObjectLiteralHandler,-380204898 -builtin_hash,CreateEmptyObjectLiteralHandler,-480083872 -builtin_hash,CreateClosureHandler,877654061 -builtin_hash,CreateBlockContextHandler,88550400 -builtin_hash,CreateCatchContextHandler,-353230330 -builtin_hash,CreateFunctionContextHandler,159733033 -builtin_hash,CreateMappedArgumentsHandler,427483474 -builtin_hash,CreateUnmappedArgumentsHandler,-700097820 -builtin_hash,CreateRestParameterHandler,-443936485 -builtin_hash,JumpLoopHandler,-343051033 -builtin_hash,JumpHandler,930291898 -builtin_hash,JumpConstantHandler,30595851 -builtin_hash,JumpIfUndefinedConstantHandler,643859045 -builtin_hash,JumpIfNotUndefinedConstantHandler,-579153610 -builtin_hash,JumpIfUndefinedOrNullConstantHandler,407521228 -builtin_hash,JumpIfTrueConstantHandler,643859045 -builtin_hash,JumpIfFalseConstantHandler,643859045 -builtin_hash,JumpIfToBooleanTrueConstantHandler,696724860 -builtin_hash,JumpIfToBooleanFalseConstantHandler,-179848636 -builtin_hash,JumpIfToBooleanTrueHandler,690028724 -builtin_hash,JumpIfToBooleanFalseHandler,-185041964 -builtin_hash,JumpIfTrueHandler,-843949956 -builtin_hash,JumpIfFalseHandler,-843949956 -builtin_hash,JumpIfNullHandler,-843949956 -builtin_hash,JumpIfNotNullHandler,1010847017 -builtin_hash,JumpIfUndefinedHandler,-843949956 -builtin_hash,JumpIfNotUndefinedHandler,1010847017 -builtin_hash,JumpIfUndefinedOrNullHandler,-517586474 -builtin_hash,JumpIfJSReceiverHandler,-9297690 -builtin_hash,SwitchOnSmiNoFeedbackHandler,-716989037 -builtin_hash,ForInEnumerateHandler,61176991 -builtin_hash,ForInPrepareHandler,748762980 -builtin_hash,ForInContinueHandler,-714020166 -builtin_hash,ForInNextHandler,1029822774 -builtin_hash,ForInStepHandler,776836572 -builtin_hash,SetPendingMessageHandler,-975756024 -builtin_hash,ThrowHandler,644164630 -builtin_hash,ReThrowHandler,644164630 -builtin_hash,ReturnHandler,258099816 -builtin_hash,ThrowReferenceErrorIfHoleHandler,-638014566 -builtin_hash,ThrowSuperNotCalledIfHoleHandler,-464286245 -builtin_hash,ThrowSuperAlreadyCalledIfNotHoleHandler,398738172 -builtin_hash,ThrowIfNotSuperConstructorHandler,405467231 -builtin_hash,SwitchOnGeneratorStateHandler,-1026917452 -builtin_hash,SuspendGeneratorHandler,-1054903459 -builtin_hash,ResumeGeneratorHandler,31675678 -builtin_hash,GetIteratorHandler,-711157277 -builtin_hash,ShortStarHandler,-568713113 -builtin_hash,LdarWideHandler,756831261 -builtin_hash,LdaSmiWideHandler,-782553530 -builtin_hash,LdaConstantWideHandler,-1037025209 -builtin_hash,LdaContextSlotWideHandler,1063635436 -builtin_hash,LdaImmutableContextSlotWideHandler,1063635436 -builtin_hash,LdaImmutableCurrentContextSlotWideHandler,847312619 -builtin_hash,StarWideHandler,40106091 -builtin_hash,MovWideHandler,-754864487 -builtin_hash,PushContextWideHandler,-948084621 -builtin_hash,PopContextWideHandler,245106357 -builtin_hash,TestReferenceEqualWideHandler,335478736 -builtin_hash,LdaGlobalWideHandler,-1010718236 -builtin_hash,LdaGlobalInsideTypeofWideHandler,-149141203 -builtin_hash,StaGlobalWideHandler,723033359 -builtin_hash,StaContextSlotWideHandler,731461442 -builtin_hash,StaCurrentContextSlotWideHandler,824213038 -builtin_hash,LdaLookupGlobalSlotWideHandler,-311844471 -builtin_hash,GetNamedPropertyWideHandler,-251017711 -builtin_hash,GetKeyedPropertyWideHandler,-256173219 -builtin_hash,SetNamedPropertyWideHandler,137651840 -builtin_hash,DefineNamedOwnPropertyWideHandler,137651840 -builtin_hash,SetKeyedPropertyWideHandler,-232580858 -builtin_hash,DefineKeyedOwnPropertyWideHandler,-232580858 -builtin_hash,StaInArrayLiteralWideHandler,-232580858 -builtin_hash,AddWideHandler,40447064 -builtin_hash,SubWideHandler,563692284 -builtin_hash,MulWideHandler,42508558 -builtin_hash,DivWideHandler,-844098316 -builtin_hash,BitwiseOrWideHandler,-328979788 -builtin_hash,BitwiseAndWideHandler,-469583662 -builtin_hash,ShiftLeftWideHandler,-954698376 -builtin_hash,AddSmiWideHandler,683295664 -builtin_hash,SubSmiWideHandler,-413733930 -builtin_hash,MulSmiWideHandler,-838301803 -builtin_hash,DivSmiWideHandler,527308360 -builtin_hash,ModSmiWideHandler,1034309589 -builtin_hash,BitwiseOrSmiWideHandler,492037335 -builtin_hash,BitwiseXorSmiWideHandler,-42936112 -builtin_hash,BitwiseAndSmiWideHandler,-477429885 -builtin_hash,ShiftLeftSmiWideHandler,672571321 -builtin_hash,ShiftRightSmiWideHandler,-67772744 -builtin_hash,ShiftRightLogicalSmiWideHandler,127757354 -builtin_hash,IncWideHandler,-922639629 -builtin_hash,DecWideHandler,-907897874 -builtin_hash,NegateWideHandler,-988497568 -builtin_hash,CallPropertyWideHandler,-698600283 -builtin_hash,CallProperty0WideHandler,1057782406 -builtin_hash,CallProperty1WideHandler,166979495 -builtin_hash,CallProperty2WideHandler,-58239499 -builtin_hash,CallUndefinedReceiverWideHandler,-121494034 -builtin_hash,CallUndefinedReceiver0WideHandler,-36365251 -builtin_hash,CallUndefinedReceiver1WideHandler,-1052313758 -builtin_hash,CallUndefinedReceiver2WideHandler,-469182582 -builtin_hash,CallWithSpreadWideHandler,-698600283 -builtin_hash,ConstructWideHandler,411540742 -builtin_hash,TestEqualWideHandler,-902995058 -builtin_hash,TestEqualStrictWideHandler,852452310 -builtin_hash,TestLessThanWideHandler,814869973 -builtin_hash,TestGreaterThanWideHandler,18887871 -builtin_hash,TestLessThanOrEqualWideHandler,34488528 -builtin_hash,TestGreaterThanOrEqualWideHandler,702527286 -builtin_hash,TestInstanceOfWideHandler,577442592 -builtin_hash,TestInWideHandler,469604978 -builtin_hash,ToNumericWideHandler,-933767737 -builtin_hash,CreateRegExpLiteralWideHandler,-286224018 -builtin_hash,CreateArrayLiteralWideHandler,925645732 -builtin_hash,CreateEmptyArrayLiteralWideHandler,556344123 -builtin_hash,CreateObjectLiteralWideHandler,-878578517 -builtin_hash,CreateClosureWideHandler,-102315205 -builtin_hash,CreateBlockContextWideHandler,392513921 -builtin_hash,CreateFunctionContextWideHandler,179338975 -builtin_hash,JumpLoopWideHandler,-949183832 -builtin_hash,JumpWideHandler,930291898 -builtin_hash,JumpIfToBooleanTrueWideHandler,-205748918 -builtin_hash,JumpIfToBooleanFalseWideHandler,1069905826 -builtin_hash,JumpIfTrueWideHandler,948634550 -builtin_hash,JumpIfFalseWideHandler,948634550 -builtin_hash,SwitchOnSmiNoFeedbackWideHandler,633155127 -builtin_hash,ForInPrepareWideHandler,-215451327 -builtin_hash,ForInNextWideHandler,372934797 -builtin_hash,ThrowReferenceErrorIfHoleWideHandler,298664482 -builtin_hash,GetIteratorWideHandler,43434708 -builtin_hash,LdaSmiExtraWideHandler,-892533764 -builtin_hash,LdaGlobalExtraWideHandler,661412585 -builtin_hash,AddSmiExtraWideHandler,-692969189 -builtin_hash,SubSmiExtraWideHandler,435863200 -builtin_hash,MulSmiExtraWideHandler,1072184980 -builtin_hash,DivSmiExtraWideHandler,-704989643 -builtin_hash,BitwiseOrSmiExtraWideHandler,-957712250 -builtin_hash,BitwiseXorSmiExtraWideHandler,44753591 -builtin_hash,BitwiseAndSmiExtraWideHandler,-563032786 -builtin_hash,CallUndefinedReceiverExtraWideHandler,-267336492 -builtin_hash,CallUndefinedReceiver1ExtraWideHandler,93448265 -builtin_hash,CallUndefinedReceiver2ExtraWideHandler,11673012 +builtin_hash,RecordWriteSaveFP,-613048523 +builtin_hash,RecordWriteIgnoreFP,-613048523 +builtin_hash,EphemeronKeyBarrierSaveFP,-874028499 +builtin_hash,AdaptorWithBuiltinExitFrame,-50443338 +builtin_hash,Call_ReceiverIsNullOrUndefined_Baseline_Compact,277963652 +builtin_hash,Call_ReceiverIsNotNullOrUndefined_Baseline_Compact,739975018 +builtin_hash,Call_ReceiverIsAny_Baseline_Compact,739975018 +builtin_hash,CallProxy,265720458 +builtin_hash,CallWithSpread,690518666 +builtin_hash,CallWithSpread_Baseline,14944167 +builtin_hash,CallWithArrayLike,-463192950 +builtin_hash,ConstructWithSpread,1026222363 +builtin_hash,ConstructWithSpread_Baseline,-954447059 +builtin_hash,Construct_Baseline,242132798 +builtin_hash,FastNewObject,812115752 +builtin_hash,FastNewClosure,-22842529 +builtin_hash,StringEqual,552928703 +builtin_hash,StringGreaterThan,814990350 +builtin_hash,StringGreaterThanOrEqual,-931415038 +builtin_hash,StringLessThan,-931415038 +builtin_hash,StringLessThanOrEqual,814990350 +builtin_hash,StringSubstring,679034293 +builtin_hash,OrderedHashTableHealIndex,-480837431 +builtin_hash,CompileLazy,-913572652 +builtin_hash,CompileLazyDeoptimizedCode,797435305 +builtin_hash,InstantiateAsmJs,-824208537 +builtin_hash,AllocateInYoungGeneration,-589367571 +builtin_hash,AllocateRegularInYoungGeneration,549206964 +builtin_hash,AllocateRegularInOldGeneration,549206964 +builtin_hash,CopyFastSmiOrObjectElements,-664166620 +builtin_hash,GrowFastDoubleElements,-794207344 +builtin_hash,GrowFastSmiOrObjectElements,-727031326 +builtin_hash,ToNumber,87194511 +builtin_hash,ToNumber_Baseline,-245107362 +builtin_hash,ToNumeric_Baseline,765738096 +builtin_hash,ToNumberConvertBigInt,-809735249 +builtin_hash,Typeof,554300746 +builtin_hash,KeyedLoadIC_PolymorphicName,808866465 +builtin_hash,KeyedStoreIC_Megamorphic,355428822 +builtin_hash,DefineKeyedOwnIC_Megamorphic,-254774567 +builtin_hash,LoadGlobalIC_NoFeedback,567497889 +builtin_hash,LoadIC_FunctionPrototype,440547932 +builtin_hash,LoadIC_StringLength,631981109 +builtin_hash,LoadIC_StringWrapperLength,957410129 +builtin_hash,LoadIC_NoFeedback,-673925088 +builtin_hash,StoreIC_NoFeedback,599149807 +builtin_hash,DefineNamedOwnIC_NoFeedback,-684443605 +builtin_hash,KeyedLoadIC_SloppyArguments,732273933 +builtin_hash,StoreFastElementIC_Standard,301200009 +builtin_hash,StoreFastElementIC_GrowNoTransitionHandleCOW,-894353505 +builtin_hash,StoreFastElementIC_NoTransitionHandleCOW,-684092303 +builtin_hash,ElementsTransitionAndStore_Standard,-313637466 +builtin_hash,ElementsTransitionAndStore_GrowNoTransitionHandleCOW,887654385 +builtin_hash,ElementsTransitionAndStore_NoTransitionHandleCOW,-730942180 +builtin_hash,KeyedHasIC_PolymorphicName,-900991969 +builtin_hash,EnqueueMicrotask,-201594324 +builtin_hash,RunMicrotasks,226014440 +builtin_hash,HasProperty,-179991880 +builtin_hash,DeleteProperty,-417791504 +builtin_hash,SetDataProperties,-676389083 +builtin_hash,ReturnReceiver,-253986889 +builtin_hash,ArrayConstructor,-132723945 +builtin_hash,ArrayConstructorImpl,-940010648 +builtin_hash,ArrayNoArgumentConstructor_PackedSmi_DontOverride,-419508170 +builtin_hash,ArrayNoArgumentConstructor_HoleySmi_DontOverride,-419508170 +builtin_hash,ArrayNoArgumentConstructor_PackedSmi_DisableAllocationSites,605372040 +builtin_hash,ArrayNoArgumentConstructor_Packed_DisableAllocationSites,605372040 +builtin_hash,ArrayNoArgumentConstructor_Holey_DisableAllocationSites,605372040 +builtin_hash,ArrayNoArgumentConstructor_PackedDouble_DisableAllocationSites,-118459699 +builtin_hash,ArraySingleArgumentConstructor_HoleySmi_DontOverride,-533026482 +builtin_hash,ArraySingleArgumentConstructor_HoleySmi_DisableAllocationSites,276667194 +builtin_hash,ArraySingleArgumentConstructor_Holey_DisableAllocationSites,276667194 +builtin_hash,ArraySingleArgumentConstructor_HoleyDouble_DisableAllocationSites,-533601049 +builtin_hash,ArrayIncludesSmi,-836179006 +builtin_hash,ArrayIncludesSmiOrObject,162670788 +builtin_hash,ArrayIncludes,508610041 +builtin_hash,ArrayIndexOfSmi,-144958716 +builtin_hash,ArrayIndexOfSmiOrObject,-560665373 +builtin_hash,ArrayIndexOf,659016893 +builtin_hash,ArrayPrototypePop,-672159034 +builtin_hash,ArrayPrototypePush,-828516926 +builtin_hash,CloneFastJSArray,330962956 +builtin_hash,CloneFastJSArrayFillingHoles,-114093580 +builtin_hash,ExtractFastJSArray,-899029625 +builtin_hash,ArrayPrototypeEntries,-846534049 +builtin_hash,ArrayPrototypeKeys,-432117890 +builtin_hash,ArrayPrototypeValues,-846534049 +builtin_hash,ArrayIteratorPrototypeNext,167355436 +builtin_hash,AsyncFunctionEnter,-860415031 +builtin_hash,AsyncFunctionResolve,910332485 +builtin_hash,AsyncFunctionAwaitCaught,-619125883 +builtin_hash,AsyncFunctionAwaitUncaught,-619125883 +builtin_hash,AsyncFunctionAwaitResolveClosure,-441313583 +builtin_hash,DatePrototypeGetDate,596885245 +builtin_hash,DatePrototypeGetDay,596885245 +builtin_hash,DatePrototypeGetFullYear,596885245 +builtin_hash,DatePrototypeGetHours,596885245 +builtin_hash,DatePrototypeGetMilliseconds,-147735130 +builtin_hash,DatePrototypeGetMinutes,596885245 +builtin_hash,DatePrototypeGetMonth,596885245 +builtin_hash,DatePrototypeGetSeconds,596885245 +builtin_hash,DatePrototypeGetTime,842589209 +builtin_hash,DatePrototypeGetTimezoneOffset,-147735130 +builtin_hash,DatePrototypeValueOf,842589209 +builtin_hash,DatePrototypeToPrimitive,-469261030 +builtin_hash,CreateIterResultObject,-236239497 +builtin_hash,CreateGeneratorObject,-989601020 +builtin_hash,GeneratorPrototypeNext,-532167070 +builtin_hash,GeneratorPrototypeReturn,204056688 +builtin_hash,SuspendGeneratorBaseline,-716242694 +builtin_hash,ResumeGeneratorBaseline,600643426 +builtin_hash,GlobalIsFinite,-28742852 +builtin_hash,GlobalIsNaN,-414427038 +builtin_hash,LoadIC,-1028921753 +builtin_hash,LoadIC_Megamorphic,604208967 +builtin_hash,LoadIC_Noninlined,-411987614 +builtin_hash,LoadICTrampoline,800274028 +builtin_hash,LoadICBaseline,470944725 +builtin_hash,LoadICTrampoline_Megamorphic,800274028 +builtin_hash,LoadSuperIC,-145652312 +builtin_hash,LoadSuperICBaseline,-463763660 +builtin_hash,KeyedLoadIC,-400473566 +builtin_hash,KeyedLoadIC_Megamorphic,41817838 +builtin_hash,KeyedLoadICTrampoline,800274028 +builtin_hash,KeyedLoadICBaseline,470944725 +builtin_hash,KeyedLoadICTrampoline_Megamorphic,800274028 +builtin_hash,StoreGlobalIC,-985598929 +builtin_hash,StoreGlobalICTrampoline,800274028 +builtin_hash,StoreGlobalICBaseline,470944725 +builtin_hash,StoreIC,107868822 +builtin_hash,StoreICTrampoline,515324941 +builtin_hash,StoreICBaseline,-463763660 +builtin_hash,DefineNamedOwnIC,293425336 +builtin_hash,DefineNamedOwnICBaseline,-463763660 +builtin_hash,KeyedStoreIC,-634858106 +builtin_hash,KeyedStoreICTrampoline,515324941 +builtin_hash,KeyedStoreICBaseline,-463763660 +builtin_hash,DefineKeyedOwnIC,-567510982 +builtin_hash,StoreInArrayLiteralIC,336733574 +builtin_hash,StoreInArrayLiteralICBaseline,-463763660 +builtin_hash,LoadGlobalIC,-994002095 +builtin_hash,LoadGlobalICInsideTypeof,131610143 +builtin_hash,LoadGlobalICTrampoline,-356577892 +builtin_hash,LoadGlobalICBaseline,-87390287 +builtin_hash,LoadGlobalICInsideTypeofTrampoline,-356577892 +builtin_hash,LoadGlobalICInsideTypeofBaseline,-87390287 +builtin_hash,LookupGlobalICBaseline,195819709 +builtin_hash,LookupGlobalICInsideTypeofBaseline,195819709 +builtin_hash,KeyedHasIC,-581893205 +builtin_hash,KeyedHasICBaseline,470944725 +builtin_hash,KeyedHasIC_Megamorphic,-179991880 +builtin_hash,IterableToList,-847583682 +builtin_hash,IterableToListWithSymbolLookup,639766325 +builtin_hash,IterableToListMayPreserveHoles,915672519 +builtin_hash,FindOrderedHashMapEntry,257985360 +builtin_hash,MapConstructor,173900465 +builtin_hash,MapPrototypeSet,-909373880 +builtin_hash,MapPrototypeDelete,-182536468 +builtin_hash,MapPrototypeGet,-10028336 +builtin_hash,MapPrototypeHas,-139761843 +builtin_hash,MapPrototypeEntries,-344495525 +builtin_hash,MapPrototypeGetSize,1002199563 +builtin_hash,MapPrototypeForEach,666422496 +builtin_hash,MapPrototypeKeys,-344495525 +builtin_hash,MapPrototypeValues,-344495525 +builtin_hash,MapIteratorPrototypeNext,824163271 +builtin_hash,MapIteratorToList,-171739571 +builtin_hash,SameValueNumbersOnly,-385008716 +builtin_hash,Add_Baseline,-279802821 +builtin_hash,AddSmi_Baseline,-180294218 +builtin_hash,Subtract_Baseline,422911741 +builtin_hash,SubtractSmi_Baseline,593938918 +builtin_hash,Multiply_Baseline,-390820476 +builtin_hash,MultiplySmi_Baseline,325873812 +builtin_hash,Divide_Baseline,-303206156 +builtin_hash,DivideSmi_Baseline,-760734875 +builtin_hash,Modulus_Baseline,-56419644 +builtin_hash,ModulusSmi_Baseline,-723448 +builtin_hash,Exponentiate_Baseline,-897267514 +builtin_hash,BitwiseAnd_Baseline,368212144 +builtin_hash,BitwiseAndSmi_Baseline,-1040430105 +builtin_hash,BitwiseOr_Baseline,-468458668 +builtin_hash,BitwiseOrSmi_Baseline,688726246 +builtin_hash,BitwiseXor_Baseline,-113074811 +builtin_hash,BitwiseXorSmi_Baseline,601401020 +builtin_hash,ShiftLeft_Baseline,-775732772 +builtin_hash,ShiftLeftSmi_Baseline,-78665210 +builtin_hash,ShiftRight_Baseline,748634885 +builtin_hash,ShiftRightSmi_Baseline,886941283 +builtin_hash,ShiftRightLogical_Baseline,561208446 +builtin_hash,ShiftRightLogicalSmi_Baseline,-31850172 +builtin_hash,Add_WithFeedback,-713508648 +builtin_hash,Subtract_WithFeedback,-1006518356 +builtin_hash,Modulus_WithFeedback,673708690 +builtin_hash,BitwiseOr_WithFeedback,-71811840 +builtin_hash,Equal_Baseline,-449571287 +builtin_hash,StrictEqual_Baseline,-311709296 +builtin_hash,LessThan_Baseline,-1041710075 +builtin_hash,GreaterThan_Baseline,763769306 +builtin_hash,LessThanOrEqual_Baseline,-289600196 +builtin_hash,GreaterThanOrEqual_Baseline,-964000144 +builtin_hash,Equal_WithFeedback,-804822195 +builtin_hash,StrictEqual_WithFeedback,316409561 +builtin_hash,LessThan_WithFeedback,-1041748847 +builtin_hash,GreaterThan_WithFeedback,208079969 +builtin_hash,GreaterThanOrEqual_WithFeedback,50039232 +builtin_hash,BitwiseNot_Baseline,574212378 +builtin_hash,Decrement_Baseline,740961552 +builtin_hash,Increment_Baseline,-482954167 +builtin_hash,Negate_Baseline,257429052 +builtin_hash,ObjectAssign,415745977 +builtin_hash,ObjectCreate,152352347 +builtin_hash,ObjectEntries,-267361188 +builtin_hash,ObjectGetOwnPropertyDescriptor,-1005546404 +builtin_hash,ObjectGetOwnPropertyNames,-10249982 +builtin_hash,ObjectIs,947042700 +builtin_hash,ObjectKeys,276395735 +builtin_hash,ObjectPrototypeHasOwnProperty,-366540189 +builtin_hash,ObjectToString,-680252272 +builtin_hash,InstanceOf_WithFeedback,-814385450 +builtin_hash,InstanceOf_Baseline,-567095434 +builtin_hash,ForInEnumerate,329908035 +builtin_hash,ForInPrepare,731557174 +builtin_hash,ForInFilter,884185984 +builtin_hash,RegExpConstructor,-1029370119 +builtin_hash,RegExpExecAtom,181372809 +builtin_hash,RegExpExecInternal,317900879 +builtin_hash,FindOrderedHashSetEntry,482436035 +builtin_hash,SetConstructor,692235107 +builtin_hash,SetPrototypeHas,-139761843 +builtin_hash,SetPrototypeAdd,-596680080 +builtin_hash,SetPrototypeDelete,331633635 +builtin_hash,SetPrototypeEntries,-344495525 +builtin_hash,SetPrototypeGetSize,1002199563 +builtin_hash,SetPrototypeForEach,97244170 +builtin_hash,SetPrototypeValues,-344495525 +builtin_hash,SetIteratorPrototypeNext,-441725951 +builtin_hash,SetOrSetIteratorToList,623342942 +builtin_hash,StringFromCharCode,-123751380 +builtin_hash,StringPrototypeReplace,-921072145 +builtin_hash,StringPrototypeSplit,-419686814 +builtin_hash,TypedArrayConstructor,32466415 +builtin_hash,TypedArrayPrototypeByteLength,864895308 +builtin_hash,TypedArrayPrototypeLength,539604699 +builtin_hash,WeakMapConstructor,814764494 +builtin_hash,WeakMapLookupHashIndex,-464287185 +builtin_hash,WeakMapGet,925651553 +builtin_hash,WeakMapPrototypeHas,947465532 +builtin_hash,WeakMapPrototypeSet,-976760951 +builtin_hash,WeakSetConstructor,694246453 +builtin_hash,WeakSetPrototypeHas,947465532 +builtin_hash,WeakSetPrototypeAdd,-160318733 +builtin_hash,WeakCollectionSet,578996244 +builtin_hash,AsyncGeneratorResolve,-83028412 +builtin_hash,AsyncGeneratorYieldWithAwait,-366463177 +builtin_hash,AsyncGeneratorResumeNext,220127321 +builtin_hash,AsyncGeneratorPrototypeNext,1069549757 +builtin_hash,AsyncGeneratorAwaitUncaught,-628599896 +builtin_hash,AsyncGeneratorAwaitResolveClosure,1062097477 +builtin_hash,AsyncGeneratorYieldWithAwaitResolveClosure,793122606 +builtin_hash,StringAdd_CheckNone,113370168 +builtin_hash,SubString,895503589 +builtin_hash,GetProperty,1052862169 +builtin_hash,GetPropertyWithReceiver,1045827042 +builtin_hash,SetProperty,908643608 +builtin_hash,CreateDataProperty,-314133834 +builtin_hash,ArrayPrototypeConcat,-557766770 +builtin_hash,ArrayEvery,-740699383 +builtin_hash,ArrayFilterLoopLazyDeoptContinuation,-463893516 +builtin_hash,ArrayFilterLoopContinuation,-636224543 +builtin_hash,ArrayFilter,-1006837550 +builtin_hash,ArrayPrototypeFind,358067331 +builtin_hash,ArrayForEachLoopLazyDeoptContinuation,-227856192 +builtin_hash,ArrayForEachLoopContinuation,498815593 +builtin_hash,ArrayForEach,-465472618 +builtin_hash,ArrayFrom,559791774 +builtin_hash,ArrayIsArray,556045869 +builtin_hash,LoadJoinElement_FastSmiOrObjectElements_0,4464260 +builtin_hash,LoadJoinElement_FastDoubleElements_0,-669389930 +builtin_hash,JoinStackPush,932509525 +builtin_hash,JoinStackPop,97051696 +builtin_hash,ArrayPrototypeJoin,638420418 +builtin_hash,ArrayPrototypeToString,571363693 +builtin_hash,ArrayPrototypeLastIndexOf,-262998450 +builtin_hash,ArrayMapLoopLazyDeoptContinuation,992596139 +builtin_hash,ArrayMapLoopContinuation,852679435 +builtin_hash,ArrayMap,237015696 +builtin_hash,ArrayReduceLoopLazyDeoptContinuation,-1021360101 +builtin_hash,ArrayReduceLoopContinuation,736239909 +builtin_hash,ArrayReduce,550306639 +builtin_hash,ArrayPrototypeReverse,-848939503 +builtin_hash,ArrayPrototypeShift,510698980 +builtin_hash,ArrayPrototypeSlice,-226926113 +builtin_hash,ArraySome,616986483 +builtin_hash,ArrayPrototypeSplice,318122997 +builtin_hash,ArrayPrototypeUnshift,942293281 +builtin_hash,ArrayBufferPrototypeGetByteLength,8155127 +builtin_hash,ArrayBufferIsView,-92420774 +builtin_hash,ToInteger,-772950786 +builtin_hash,FastCreateDataProperty,683077437 +builtin_hash,BooleanConstructor,104847507 +builtin_hash,BooleanPrototypeToString,496844333 +builtin_hash,ToString,-492204321 +builtin_hash,StringPrototypeToString,232130928 +builtin_hash,StringPrototypeValueOf,232130928 +builtin_hash,StringPrototypeCharAt,-493882295 +builtin_hash,StringPrototypeCharCodeAt,-70476469 +builtin_hash,StringPrototypeCodePointAt,958343749 +builtin_hash,StringPrototypeConcat,122908250 +builtin_hash,StringConstructor,36941296 +builtin_hash,StringAddConvertLeft,895631940 +builtin_hash,StringAddConvertRight,620894196 +builtin_hash,StringCharAt,-771156702 +builtin_hash,FastNewClosureBaseline,-345301780 +builtin_hash,FastNewFunctionContextFunction,393493853 +builtin_hash,CreateRegExpLiteral,1052274841 +builtin_hash,CreateShallowArrayLiteral,758569216 +builtin_hash,CreateEmptyArrayLiteral,-244361805 +builtin_hash,CreateShallowObjectLiteral,429596211 +builtin_hash,ObjectConstructor,792071103 +builtin_hash,CreateEmptyLiteralObject,792021411 +builtin_hash,NumberConstructor,-545912408 +builtin_hash,StringToNumber,-567475001 +builtin_hash,NonNumberToNumber,-75339598 +builtin_hash,NonNumberToNumeric,-163611573 +builtin_hash,ToNumeric,1067114169 +builtin_hash,NumberToString,808056721 +builtin_hash,ToBoolean,474893826 +builtin_hash,ToBooleanForBaselineJump,-1000387172 +builtin_hash,ToLength,-1031135247 +builtin_hash,ToName,-893589751 +builtin_hash,ToObject,-995611522 +builtin_hash,NonPrimitiveToPrimitive_Default,-741936834 +builtin_hash,NonPrimitiveToPrimitive_Number,-741936834 +builtin_hash,NonPrimitiveToPrimitive_String,-741936834 +builtin_hash,OrdinaryToPrimitive_Number,940682530 +builtin_hash,OrdinaryToPrimitive_String,940682530 +builtin_hash,DataViewPrototypeGetByteLength,-344862281 +builtin_hash,DataViewPrototypeGetFloat64,-710736378 +builtin_hash,DataViewPrototypeSetUint32,561326289 +builtin_hash,DataViewPrototypeSetFloat64,224815643 +builtin_hash,FunctionPrototypeHasInstance,-159239165 +builtin_hash,FastFunctionPrototypeBind,-835190429 +builtin_hash,ForInNext,-628108871 +builtin_hash,GetIteratorWithFeedback,412632852 +builtin_hash,GetIteratorBaseline,878549031 +builtin_hash,CallIteratorWithFeedback,-173921836 +builtin_hash,MathAbs,-418374171 +builtin_hash,MathCeil,-83433093 +builtin_hash,MathFloor,963617939 +builtin_hash,MathRound,739741009 +builtin_hash,MathPow,510691647 +builtin_hash,MathMax,45115699 +builtin_hash,MathMin,-996382942 +builtin_hash,MathAsin,261451622 +builtin_hash,MathAtan2,605332815 +builtin_hash,MathCos,515079504 +builtin_hash,MathExp,551351922 +builtin_hash,MathFround,564706237 +builtin_hash,MathImul,685265173 +builtin_hash,MathLog,-553256829 +builtin_hash,MathSin,302395292 +builtin_hash,MathSign,611819739 +builtin_hash,MathSqrt,55107225 +builtin_hash,MathTan,-332405887 +builtin_hash,MathTanh,939045985 +builtin_hash,MathRandom,-504157126 +builtin_hash,NumberPrototypeToString,145247584 +builtin_hash,NumberIsInteger,632098621 +builtin_hash,NumberIsNaN,343619286 +builtin_hash,NumberParseFloat,-745268146 +builtin_hash,ParseInt,423449565 +builtin_hash,NumberParseInt,348325306 +builtin_hash,Add,-712082634 +builtin_hash,Subtract,860006498 +builtin_hash,Multiply,966938552 +builtin_hash,Divide,501339465 +builtin_hash,Modulus,556264773 +builtin_hash,CreateObjectWithoutProperties,911390056 +builtin_hash,ObjectIsExtensible,-376770424 +builtin_hash,ObjectPreventExtensions,-675757061 +builtin_hash,ObjectGetPrototypeOf,-694816240 +builtin_hash,ObjectSetPrototypeOf,-335823538 +builtin_hash,ObjectPrototypeToString,158685312 +builtin_hash,ObjectPrototypeValueOf,-993024104 +builtin_hash,FulfillPromise,-68874675 +builtin_hash,NewPromiseCapability,-880232666 +builtin_hash,PromiseCapabilityDefaultResolve,694927325 +builtin_hash,PerformPromiseThen,-238303189 +builtin_hash,PromiseAll,-121414633 +builtin_hash,PromiseAllResolveElementClosure,797273436 +builtin_hash,PromiseConstructor,-424149894 +builtin_hash,PromisePrototypeCatch,235262026 +builtin_hash,PromiseFulfillReactionJob,927825363 +builtin_hash,PromiseResolveTrampoline,-549629094 +builtin_hash,PromiseResolve,-366429795 +builtin_hash,ResolvePromise,526061379 +builtin_hash,PromisePrototypeThen,959282415 +builtin_hash,PromiseResolveThenableJob,-977786068 +builtin_hash,ProxyConstructor,-54504231 +builtin_hash,ProxyGetProperty,-692505715 +builtin_hash,ProxyIsExtensible,-120987472 +builtin_hash,ProxyPreventExtensions,739592105 +builtin_hash,ReflectGet,1006327680 +builtin_hash,ReflectHas,-549629094 +builtin_hash,RegExpPrototypeExec,866694176 +builtin_hash,RegExpMatchFast,556779044 +builtin_hash,RegExpReplace,1037671691 +builtin_hash,RegExpPrototypeReplace,-488505709 +builtin_hash,RegExpSearchFast,744647901 +builtin_hash,RegExpPrototypeSourceGetter,-69902772 +builtin_hash,RegExpSplit,418335022 +builtin_hash,RegExpPrototypeTest,-893509849 +builtin_hash,RegExpPrototypeTestFast,-541676085 +builtin_hash,RegExpPrototypeGlobalGetter,612394650 +builtin_hash,RegExpPrototypeIgnoreCaseGetter,-595775382 +builtin_hash,RegExpPrototypeMultilineGetter,368200363 +builtin_hash,RegExpPrototypeHasIndicesGetter,99570183 +builtin_hash,RegExpPrototypeDotAllGetter,99570183 +builtin_hash,RegExpPrototypeStickyGetter,471291660 +builtin_hash,RegExpPrototypeUnicodeGetter,471291660 +builtin_hash,RegExpPrototypeFlagsGetter,-493351549 +builtin_hash,StringPrototypeEndsWith,-140669855 +builtin_hash,StringPrototypeIncludes,-538712449 +builtin_hash,StringPrototypeIndexOf,-279080867 +builtin_hash,StringPrototypeIterator,-906814404 +builtin_hash,StringIteratorPrototypeNext,-459023719 +builtin_hash,StringPrototypeMatch,950777323 +builtin_hash,StringPrototypeSearch,950777323 +builtin_hash,StringRepeat,333496990 +builtin_hash,StringPrototypeSlice,147923310 +builtin_hash,StringPrototypeStartsWith,-916453690 +builtin_hash,StringPrototypeSubstr,93046303 +builtin_hash,StringPrototypeSubstring,-486167723 +builtin_hash,StringPrototypeTrim,-537839064 +builtin_hash,SymbolPrototypeToString,-331094885 +builtin_hash,CreateTypedArray,946007034 +builtin_hash,TypedArrayFrom,-383816322 +builtin_hash,TypedArrayPrototypeSet,183639399 +builtin_hash,TypedArrayPrototypeSubArray,-654743264 +builtin_hash,NewSloppyArgumentsElements,-733955106 +builtin_hash,NewStrictArgumentsElements,27861461 +builtin_hash,NewRestArgumentsElements,-158196826 +builtin_hash,FastNewSloppyArguments,701807193 +builtin_hash,FastNewStrictArguments,-400637158 +builtin_hash,FastNewRestArguments,771398605 +builtin_hash,StringSlowFlatten,758688335 +builtin_hash,StringIndexOf,893861646 +builtin_hash,Load_FastSmiElements_0,41377987 +builtin_hash,Load_FastObjectElements_0,41377987 +builtin_hash,Store_FastSmiElements_0,987491586 +builtin_hash,Store_FastObjectElements_0,-907039137 +builtin_hash,SortCompareDefault,-712046902 +builtin_hash,SortCompareUserFn,-498446944 +builtin_hash,Copy,1005972100 +builtin_hash,MergeAt,-238184884 +builtin_hash,GallopLeft,-228579918 +builtin_hash,GallopRight,508662767 +builtin_hash,ArrayTimSort,-584574007 +builtin_hash,ArrayPrototypeSort,-446345392 +builtin_hash,StringFastLocaleCompare,-805723901 +builtin_hash,WasmInt32ToHeapNumber,186218317 +builtin_hash,WasmTaggedNonSmiToInt32,644195797 +builtin_hash,WasmTriggerTierUp,-448026998 +builtin_hash,WasmStackGuard,929375954 +builtin_hash,CanUseSameAccessor_FastSmiElements_0,333215288 +builtin_hash,CanUseSameAccessor_FastObjectElements_0,333215288 +builtin_hash,StringPrototypeToLowerCaseIntl,325118773 +builtin_hash,StringToLowerCaseIntl,729618594 +builtin_hash,WideHandler,-985531040 +builtin_hash,ExtraWideHandler,-985531040 +builtin_hash,LdarHandler,1066069071 +builtin_hash,LdaZeroHandler,697098880 +builtin_hash,LdaSmiHandler,-92763154 +builtin_hash,LdaUndefinedHandler,94159659 +builtin_hash,LdaNullHandler,94159659 +builtin_hash,LdaTheHoleHandler,94159659 +builtin_hash,LdaTrueHandler,66190034 +builtin_hash,LdaFalseHandler,66190034 +builtin_hash,LdaConstantHandler,-234672240 +builtin_hash,LdaContextSlotHandler,999512170 +builtin_hash,LdaImmutableContextSlotHandler,999512170 +builtin_hash,LdaCurrentContextSlotHandler,-705029165 +builtin_hash,LdaImmutableCurrentContextSlotHandler,-705029165 +builtin_hash,StarHandler,-825981541 +builtin_hash,MovHandler,-222623368 +builtin_hash,PushContextHandler,239039195 +builtin_hash,PopContextHandler,663403390 +builtin_hash,TestReferenceEqualHandler,107959616 +builtin_hash,TestUndetectableHandler,768306054 +builtin_hash,TestNullHandler,317848228 +builtin_hash,TestUndefinedHandler,317848228 +builtin_hash,TestTypeOfHandler,-585531608 +builtin_hash,LdaGlobalHandler,680542536 +builtin_hash,LdaGlobalInsideTypeofHandler,-812384965 +builtin_hash,StaGlobalHandler,-849976646 +builtin_hash,StaContextSlotHandler,-642236485 +builtin_hash,StaCurrentContextSlotHandler,515612512 +builtin_hash,LdaLookupGlobalSlotHandler,328181263 +builtin_hash,LdaLookupGlobalSlotInsideTypeofHandler,-152487163 +builtin_hash,StaLookupSlotHandler,1043986971 +builtin_hash,GetNamedPropertyHandler,-918198086 +builtin_hash,GetNamedPropertyFromSuperHandler,-605958764 +builtin_hash,GetKeyedPropertyHandler,-368783501 +builtin_hash,SetNamedPropertyHandler,512867069 +builtin_hash,DefineNamedOwnPropertyHandler,512867069 +builtin_hash,SetKeyedPropertyHandler,-529790650 +builtin_hash,DefineKeyedOwnPropertyHandler,-529790650 +builtin_hash,StaInArrayLiteralHandler,-529790650 +builtin_hash,DefineKeyedOwnPropertyInLiteralHandler,519916231 +builtin_hash,AddHandler,-1014428769 +builtin_hash,SubHandler,-971645828 +builtin_hash,MulHandler,-1072793455 +builtin_hash,DivHandler,-162323805 +builtin_hash,ModHandler,-485795098 +builtin_hash,ExpHandler,795159955 +builtin_hash,BitwiseOrHandler,-974394049 +builtin_hash,BitwiseXorHandler,580834482 +builtin_hash,BitwiseAndHandler,614318128 +builtin_hash,ShiftLeftHandler,-423182377 +builtin_hash,ShiftRightHandler,-255081510 +builtin_hash,ShiftRightLogicalHandler,735938776 +builtin_hash,AddSmiHandler,107839307 +builtin_hash,SubSmiHandler,-363881533 +builtin_hash,MulSmiHandler,169761579 +builtin_hash,DivSmiHandler,-681265328 +builtin_hash,ModSmiHandler,861935655 +builtin_hash,BitwiseOrSmiHandler,-680303745 +builtin_hash,BitwiseXorSmiHandler,576458108 +builtin_hash,BitwiseAndSmiHandler,-994511503 +builtin_hash,ShiftLeftSmiHandler,-728693655 +builtin_hash,ShiftRightSmiHandler,975905832 +builtin_hash,ShiftRightLogicalSmiHandler,686146238 +builtin_hash,IncHandler,117772531 +builtin_hash,DecHandler,-691015839 +builtin_hash,NegateHandler,212889736 +builtin_hash,BitwiseNotHandler,-960473652 +builtin_hash,ToBooleanLogicalNotHandler,-997041363 +builtin_hash,LogicalNotHandler,-404436240 +builtin_hash,TypeOfHandler,-868029172 +builtin_hash,DeletePropertyStrictHandler,-310645655 +builtin_hash,DeletePropertySloppyHandler,-884621901 +builtin_hash,GetSuperConstructorHandler,-336144805 +builtin_hash,CallAnyReceiverHandler,-483788286 +builtin_hash,CallPropertyHandler,-483788286 +builtin_hash,CallProperty0Handler,234175094 +builtin_hash,CallProperty1Handler,354307341 +builtin_hash,CallProperty2Handler,968021051 +builtin_hash,CallUndefinedReceiverHandler,472718464 +builtin_hash,CallUndefinedReceiver0Handler,1020191467 +builtin_hash,CallUndefinedReceiver1Handler,785762305 +builtin_hash,CallUndefinedReceiver2Handler,-921863582 +builtin_hash,CallWithSpreadHandler,-483788286 +builtin_hash,CallRuntimeHandler,575543766 +builtin_hash,CallJSRuntimeHandler,-279970155 +builtin_hash,InvokeIntrinsicHandler,315814934 +builtin_hash,ConstructHandler,750653559 +builtin_hash,ConstructWithSpreadHandler,-950529667 +builtin_hash,TestEqualHandler,469957169 +builtin_hash,TestEqualStrictHandler,774972588 +builtin_hash,TestLessThanHandler,876731233 +builtin_hash,TestGreaterThanHandler,854370589 +builtin_hash,TestLessThanOrEqualHandler,-616820445 +builtin_hash,TestGreaterThanOrEqualHandler,128578007 +builtin_hash,TestInstanceOfHandler,437146777 +builtin_hash,TestInHandler,-595986293 +builtin_hash,ToNameHandler,-388837341 +builtin_hash,ToNumberHandler,172727215 +builtin_hash,ToNumericHandler,518340123 +builtin_hash,ToObjectHandler,-388837341 +builtin_hash,ToStringHandler,-736791596 +builtin_hash,CreateRegExpLiteralHandler,-387261303 +builtin_hash,CreateArrayLiteralHandler,544722821 +builtin_hash,CreateArrayFromIterableHandler,-590862374 +builtin_hash,CreateEmptyArrayLiteralHandler,-215104396 +builtin_hash,CreateObjectLiteralHandler,536615992 +builtin_hash,CreateEmptyObjectLiteralHandler,810635729 +builtin_hash,CreateClosureHandler,-899658211 +builtin_hash,CreateBlockContextHandler,-125556632 +builtin_hash,CreateCatchContextHandler,551209828 +builtin_hash,CreateFunctionContextHandler,-65684761 +builtin_hash,CreateMappedArgumentsHandler,67709625 +builtin_hash,CreateUnmappedArgumentsHandler,608258279 +builtin_hash,CreateRestParameterHandler,1042430952 +builtin_hash,JumpLoopHandler,77742379 +builtin_hash,JumpHandler,-420188660 +builtin_hash,JumpConstantHandler,-998825364 +builtin_hash,JumpIfUndefinedConstantHandler,-326209739 +builtin_hash,JumpIfNotUndefinedConstantHandler,37208057 +builtin_hash,JumpIfUndefinedOrNullConstantHandler,-104381115 +builtin_hash,JumpIfTrueConstantHandler,-326209739 +builtin_hash,JumpIfFalseConstantHandler,-326209739 +builtin_hash,JumpIfToBooleanTrueConstantHandler,-234142841 +builtin_hash,JumpIfToBooleanFalseConstantHandler,-602774868 +builtin_hash,JumpIfToBooleanTrueHandler,-297635325 +builtin_hash,JumpIfToBooleanFalseHandler,1015367976 +builtin_hash,JumpIfTrueHandler,862147447 +builtin_hash,JumpIfFalseHandler,862147447 +builtin_hash,JumpIfNullHandler,862147447 +builtin_hash,JumpIfNotNullHandler,-481058680 +builtin_hash,JumpIfUndefinedHandler,862147447 +builtin_hash,JumpIfNotUndefinedHandler,-481058680 +builtin_hash,JumpIfUndefinedOrNullHandler,14126870 +builtin_hash,JumpIfJSReceiverHandler,-723850389 +builtin_hash,SwitchOnSmiNoFeedbackHandler,-902670490 +builtin_hash,ForInEnumerateHandler,-322331924 +builtin_hash,ForInPrepareHandler,20034175 +builtin_hash,ForInContinueHandler,827732360 +builtin_hash,ForInNextHandler,119110335 +builtin_hash,ForInStepHandler,757646701 +builtin_hash,SetPendingMessageHandler,996401409 +builtin_hash,ThrowHandler,122680912 +builtin_hash,ReThrowHandler,122680912 +builtin_hash,ReturnHandler,47039723 +builtin_hash,ThrowReferenceErrorIfHoleHandler,-342650955 +builtin_hash,ThrowSuperNotCalledIfHoleHandler,-285583864 +builtin_hash,ThrowSuperAlreadyCalledIfNotHoleHandler,-827541184 +builtin_hash,ThrowIfNotSuperConstructorHandler,1018623070 +builtin_hash,SwitchOnGeneratorStateHandler,717471818 +builtin_hash,SuspendGeneratorHandler,547514791 +builtin_hash,ResumeGeneratorHandler,-860485588 +builtin_hash,GetIteratorHandler,-6630463 +builtin_hash,ShortStarHandler,721894508 +builtin_hash,LdarWideHandler,-978392409 +builtin_hash,LdaSmiWideHandler,-366656871 +builtin_hash,LdaConstantWideHandler,972813981 +builtin_hash,LdaContextSlotWideHandler,628329787 +builtin_hash,LdaImmutableContextSlotWideHandler,628329787 +builtin_hash,LdaImmutableCurrentContextSlotWideHandler,489858159 +builtin_hash,StarWideHandler,-1038662456 +builtin_hash,MovWideHandler,483803273 +builtin_hash,PushContextWideHandler,216419588 +builtin_hash,PopContextWideHandler,272986324 +builtin_hash,TestReferenceEqualWideHandler,-4739833 +builtin_hash,LdaGlobalWideHandler,-434470564 +builtin_hash,LdaGlobalInsideTypeofWideHandler,888730933 +builtin_hash,StaGlobalWideHandler,459118950 +builtin_hash,StaContextSlotWideHandler,888275701 +builtin_hash,StaCurrentContextSlotWideHandler,-317584552 +builtin_hash,LdaLookupGlobalSlotWideHandler,1026575020 +builtin_hash,GetNamedPropertyWideHandler,664403992 +builtin_hash,GetKeyedPropertyWideHandler,322108853 +builtin_hash,SetNamedPropertyWideHandler,784668777 +builtin_hash,DefineNamedOwnPropertyWideHandler,784668777 +builtin_hash,SetKeyedPropertyWideHandler,1015904043 +builtin_hash,DefineKeyedOwnPropertyWideHandler,1015904043 +builtin_hash,StaInArrayLiteralWideHandler,1015904043 +builtin_hash,AddWideHandler,1006647977 +builtin_hash,SubWideHandler,212325320 +builtin_hash,MulWideHandler,-922622067 +builtin_hash,DivWideHandler,145054418 +builtin_hash,BitwiseOrWideHandler,805505097 +builtin_hash,BitwiseAndWideHandler,563101073 +builtin_hash,ShiftLeftWideHandler,448918085 +builtin_hash,AddSmiWideHandler,-135072104 +builtin_hash,SubSmiWideHandler,-169078418 +builtin_hash,MulSmiWideHandler,793690226 +builtin_hash,DivSmiWideHandler,-657180043 +builtin_hash,ModSmiWideHandler,335754550 +builtin_hash,BitwiseOrSmiWideHandler,-1067934836 +builtin_hash,BitwiseXorSmiWideHandler,-668709153 +builtin_hash,BitwiseAndSmiWideHandler,-90084544 +builtin_hash,ShiftLeftSmiWideHandler,-381247703 +builtin_hash,ShiftRightSmiWideHandler,-38676513 +builtin_hash,ShiftRightLogicalSmiWideHandler,-1026231042 +builtin_hash,IncWideHandler,389395178 +builtin_hash,DecWideHandler,1062128797 +builtin_hash,NegateWideHandler,375542705 +builtin_hash,CallPropertyWideHandler,479651507 +builtin_hash,CallProperty0WideHandler,402451236 +builtin_hash,CallProperty1WideHandler,864866147 +builtin_hash,CallProperty2WideHandler,672960581 +builtin_hash,CallUndefinedReceiverWideHandler,633606056 +builtin_hash,CallUndefinedReceiver0WideHandler,-782323787 +builtin_hash,CallUndefinedReceiver1WideHandler,52355318 +builtin_hash,CallUndefinedReceiver2WideHandler,297430331 +builtin_hash,CallWithSpreadWideHandler,479651507 +builtin_hash,ConstructWideHandler,-923801363 +builtin_hash,TestEqualWideHandler,745861994 +builtin_hash,TestEqualStrictWideHandler,982796365 +builtin_hash,TestLessThanWideHandler,665221830 +builtin_hash,TestGreaterThanWideHandler,776130121 +builtin_hash,TestLessThanOrEqualWideHandler,-299580558 +builtin_hash,TestGreaterThanOrEqualWideHandler,-356242933 +builtin_hash,TestInstanceOfWideHandler,406240218 +builtin_hash,TestInWideHandler,-754759119 +builtin_hash,ToNumericWideHandler,1034444948 +builtin_hash,CreateRegExpLiteralWideHandler,1015965077 +builtin_hash,CreateArrayLiteralWideHandler,238187057 +builtin_hash,CreateEmptyArrayLiteralWideHandler,-21075025 +builtin_hash,CreateObjectLiteralWideHandler,570835533 +builtin_hash,CreateClosureWideHandler,912422636 +builtin_hash,CreateBlockContextWideHandler,499748521 +builtin_hash,CreateFunctionContextWideHandler,-887672919 +builtin_hash,JumpLoopWideHandler,714317010 +builtin_hash,JumpWideHandler,-420188660 +builtin_hash,JumpIfToBooleanTrueWideHandler,230302934 +builtin_hash,JumpIfToBooleanFalseWideHandler,237768975 +builtin_hash,JumpIfTrueWideHandler,814624851 +builtin_hash,JumpIfFalseWideHandler,814624851 +builtin_hash,SwitchOnSmiNoFeedbackWideHandler,623977068 +builtin_hash,ForInPrepareWideHandler,430965432 +builtin_hash,ForInNextWideHandler,-899950637 +builtin_hash,ThrowReferenceErrorIfHoleWideHandler,-575574526 +builtin_hash,GetIteratorWideHandler,-626454663 +builtin_hash,LdaSmiExtraWideHandler,465680004 +builtin_hash,LdaGlobalExtraWideHandler,1016564513 +builtin_hash,AddSmiExtraWideHandler,585533206 +builtin_hash,SubSmiExtraWideHandler,-88717151 +builtin_hash,MulSmiExtraWideHandler,-508453390 +builtin_hash,DivSmiExtraWideHandler,-542490757 +builtin_hash,BitwiseOrSmiExtraWideHandler,776661340 +builtin_hash,BitwiseXorSmiExtraWideHandler,276228867 +builtin_hash,BitwiseAndSmiExtraWideHandler,739058259 +builtin_hash,CallUndefinedReceiverExtraWideHandler,488508421 +builtin_hash,CallUndefinedReceiver1ExtraWideHandler,700320270 +builtin_hash,CallUndefinedReceiver2ExtraWideHandler,-7276189 diff --git a/deps/v8/tools/callstats.py.vpython b/deps/v8/tools/callstats.py.vpython deleted file mode 100644 index 11e3f34ef6f9c9..00000000000000 --- a/deps/v8/tools/callstats.py.vpython +++ /dev/null @@ -1,43 +0,0 @@ -# This is a vpython "spec" file. -# -# It describes patterns for python wheel dependencies of the python scripts in -# the callstats.py, particularly for dependencies that have compiled components -# (since pure-python dependencies can be easily vendored into third_party). -# -# When vpython is invoked, it finds this file and builds a python VirtualEnv, -# containing all of the dependencies described in this file, fetching them from -# CIPD (the "Chrome Infrastructure Package Deployer" service). Unlike `pip`, -# this never requires the end-user machine to have a working python extension -# compilation environment. All of these packages are built using: -# https://chromium.googlesource.com/infra/infra/+/master/infra/tools/dockerbuild/ -# -# All python scripts in the repo share this same spec, to avoid dependency -# fragmentation. -# -# If you have depot_tools installed in your $PATH, you can invoke python scripts -# in this repo by running them as you normally would run them, except -# substituting `vpython` instead of `python` on the command line, e.g.: -# vpython path/to/script.py some --arguments -# -# Read more about `vpython` and how to modify this file here: -# https://chromium.googlesource.com/infra/infra/+/master/doc/users/vpython.md - -python_version: "2.7" - -wheel: < - name: "infra/python/wheels/numpy/${vpython_platform}" - version: "version:1.11.3" -> - -wheel: < - name: "infra/python/wheels/scipy/${vpython_platform}" - version: "version:0.19.0" - match_tag: < - abi: "cp27mu" - platform: "manylinux1_i686" - > - match_tag: < - abi: "cp27mu" - platform: "manylinux1_x86_64" - > -> diff --git a/deps/v8/tools/clusterfuzz/foozzie/v8_mock.js b/deps/v8/tools/clusterfuzz/foozzie/v8_mock.js index e6fd63da3613f2..d2dc9e27b1d55b 100644 --- a/deps/v8/tools/clusterfuzz/foozzie/v8_mock.js +++ b/deps/v8/tools/clusterfuzz/foozzie/v8_mock.js @@ -85,6 +85,8 @@ var prettyPrinted = function prettyPrinted(msg) { return msg; }; // Mock performance methods. performance.now = function() { return 1.2; }; +performance.mark = function() { return undefined; }; +performance.measure = function() { return undefined; }; performance.measureMemory = function() { return []; }; // Mock readline so that test cases don't hang. diff --git a/deps/v8/tools/debug_helper/debug-helper-internal.cc b/deps/v8/tools/debug_helper/debug-helper-internal.cc index 51c8da6f278aea..d13ed9f973da0a 100644 --- a/deps/v8/tools/debug_helper/debug-helper-internal.cc +++ b/deps/v8/tools/debug_helper/debug-helper-internal.cc @@ -23,8 +23,10 @@ bool IsPointerCompressed(uintptr_t address) { uintptr_t EnsureDecompressed(uintptr_t address, uintptr_t any_uncompressed_ptr) { if (!COMPRESS_POINTERS_BOOL || !IsPointerCompressed(address)) return address; - return i::DecompressTaggedAny(any_uncompressed_ptr, - static_cast<i::Tagged_t>(address)); + // TODO(v8:11880): ExternalCodeCompressionScheme might be needed here for + // decompressing Code pointers from external code space. + return i::V8HeapCompressionScheme::DecompressTaggedAny( + any_uncompressed_ptr, static_cast<i::Tagged_t>(address)); } d::PropertyKind GetArrayKind(d::MemoryAccessResult mem_result) { diff --git a/deps/v8/tools/debug_helper/gen-heap-constants.py b/deps/v8/tools/debug_helper/gen-heap-constants.py index 3ea5be68210573..d78276a86f625a 100644 --- a/deps/v8/tools/debug_helper/gen-heap-constants.py +++ b/deps/v8/tools/debug_helper/gen-heap-constants.py @@ -64,13 +64,16 @@ def iterate_maps(target_space, camel_space_name): out = out + ' if (heap_addresses->any_heap_pointer == 0) {\n' out = out + ' heap_addresses->any_heap_pointer = any_uncompressed_ptr;\n' out = out + ' }\n' + # If we ever try to apply this to CodeSpace we might need to use + # ExternalCodeCompressionScheme instead of V8HeapCompressionScheme for + # decompressing external code pointers below. expected_spaces = set(['map_space', 'read_only_space', 'old_space']) for offset, space_name in v8heapconst.HEAP_FIRST_PAGES.items(): if (space_name in expected_spaces): out = out + ' if (heap_addresses->' + space_name + '_first_page == 0) {\n' out = out + ' heap_addresses->' + space_name + \ - '_first_page = i::DecompressTaggedPointer(any_uncompressed_ptr, ' + \ - str(offset) + ');\n' + '_first_page = i::V8HeapCompressionScheme::DecompressTaggedPointer(' + \ + 'any_uncompressed_ptr, ' + str(offset) + ');\n' out = out + ' }\n' out = out + '}\n' diff --git a/deps/v8/tools/dev/gm.py b/deps/v8/tools/dev/gm.py index 58a729045aecd9..245a05389e0ad2 100755 --- a/deps/v8/tools/dev/gm.py +++ b/deps/v8/tools/dev/gm.py @@ -46,6 +46,7 @@ ] # Arches that get built/run when you don't specify any. DEFAULT_ARCHES = ["ia32", "x64", "arm", "arm64"] +SANDBOX_SUPPORTED_ARCHES = ["x64", "arm64"] # Modes that this script understands. MODES = { "release": "release", @@ -345,12 +346,18 @@ def GetSpecialCompiler(self): return ["clang_base_path = \"/usr\"", "clang_use_chrome_plugins = false"] return [] + def GetSandboxFlag(self): + if self.arch in SANDBOX_SUPPORTED_ARCHES: + return ["v8_enable_sandbox = true"] + return [] + def GetGnArgs(self): # Use only substring before first '-' as the actual mode mode = re.match("([^-]+)", self.mode).group(1) template = ARGS_TEMPLATES[mode] - arch_specific = (self.GetTargetCpu() + self.GetV8TargetCpu() + - self.GetTargetOS() + self.GetSpecialCompiler()) + arch_specific = ( + self.GetTargetCpu() + self.GetV8TargetCpu() + self.GetTargetOS() + + self.GetSpecialCompiler() + self.GetSandboxFlag()) return template % "\n".join(arch_specific) def Build(self): diff --git a/deps/v8/tools/gdbinit b/deps/v8/tools/gdbinit index f45c4165d61fb8..fee237b5372d2d 100644 --- a/deps/v8/tools/gdbinit +++ b/deps/v8/tools/gdbinit @@ -259,3 +259,64 @@ gdb.events.new_objfile.connect(newobj_handler) gdb.execute("set environment V8_GDBINIT_SOURCED=1") end + +### CppGC helpers. + +# Print compressed pointer. +define cpcp +call _cppgc_internal_Decompress_Compressed_Pointer((unsigned)($arg0)) +end +document cpcp +Prints compressed pointer (raw value) after decompression. +Usage: cpcp compressed_pointer +end + +# Print member. +define cpm +call _cppgc_internal_Print_Member((cppgc::internal::MemberBase*)(&$arg0)) +end +document cpm +Prints member, compressed or not. +Usage: cpm member +end + +# Pretty printer for cppgc::Member. +python + +import re + + +class CppGCMemberPrinter(object): + """Print cppgc Member types.""" + + def __init__(self, val, category, pointee_type): + self.val = val + self.category = category + self.pointee_type = pointee_type + + def to_string(self): + pointer = gdb.parse_and_eval( + "_cppgc_internal_Print_Member((cppgc::internal::MemberBase*){})".format( + self.val.address)) + return "{}Member<{}> pointing to {}".format( + '' if self.category is None else self.category, self.pointee_type, + pointer) + + def display_hint(self): + return "{}Member<{}>".format('' if self.category is None else self.category, + self.pointee_type) + + +def cppgc_pretty_printers(val): + typename = val.type.name or val.type.tag or str(val.type) + regex = re.compile("^(cppgc|blink)::(Weak|Untraced)?Member<(.*)>$") + match = regex.match(typename) + if match is None: + return None + return CppGCMemberPrinter( + val, category=match.group(2), pointee_type=match.group(3)) + + +gdb.pretty_printers.append(cppgc_pretty_printers) + +end diff --git a/deps/v8/tools/gen-postmortem-metadata.py b/deps/v8/tools/gen-postmortem-metadata.py index 4ffa36560e7a57..7f5f7864ae7316 100644 --- a/deps/v8/tools/gen-postmortem-metadata.py +++ b/deps/v8/tools/gen-postmortem-metadata.py @@ -94,6 +94,17 @@ { 'name': 'CodeKindFieldMask', 'value': 'Code::KindField::kMask' }, { 'name': 'CodeKindFieldShift', 'value': 'Code::KindField::kShift' }, + { 'name': 'DeoptimizationDataInlinedFunctionCountIndex', + 'value': 'DeoptimizationData::kInlinedFunctionCountIndex' }, + { 'name': 'DeoptimizationDataLiteralArrayIndex', + 'value': 'DeoptimizationData::kLiteralArrayIndex' }, + { 'name': 'DeoptimizationDataOptimizationIdIndex', + 'value': 'DeoptimizationData::kOptimizationIdIndex' }, + { 'name': 'DeoptimizationDataSharedFunctionInfoIndex', + 'value': 'DeoptimizationData::kSharedFunctionInfoIndex' }, + { 'name': 'DeoptimizationDataInliningPositionsIndex', + 'value': 'DeoptimizationData::kInliningPositionsIndex' }, + { 'name': 'CodeKindBytecodeHandler', 'value': 'static_cast<int>(CodeKind::BYTECODE_HANDLER)' }, { 'name': 'CodeKindInterpretedFunction', @@ -266,13 +277,13 @@ 'FixedArray, data, uintptr_t, kHeaderSize', 'BytecodeArray, data, uintptr_t, kHeaderSize', 'JSArrayBuffer, backing_store, uintptr_t, kBackingStoreOffset', - 'JSArrayBuffer, byte_length, size_t, kByteLengthOffset', - 'JSArrayBufferView, byte_length, size_t, kByteLengthOffset', - 'JSArrayBufferView, byte_offset, size_t, kByteOffsetOffset', + 'JSArrayBuffer, byte_length, size_t, kRawByteLengthOffset', + 'JSArrayBufferView, byte_length, size_t, kRawByteLengthOffset', + 'JSArrayBufferView, byte_offset, size_t, kRawByteOffsetOffset', 'JSDate, value, Object, kValueOffset', 'JSRegExp, source, Object, kSourceOffset', 'JSTypedArray, external_pointer, uintptr_t, kExternalPointerOffset', - 'JSTypedArray, length, Object, kLengthOffset', + 'JSTypedArray, length, Object, kRawLengthOffset', 'Map, instance_size_in_words, char, kInstanceSizeInWordsOffset', 'Map, inobject_properties_start_or_constructor_function_index, char, kInobjectPropertiesStartOrConstructorFunctionIndexOffset', 'Map, instance_type, uint16_t, kInstanceTypeOffset', @@ -299,6 +310,7 @@ 'Code, flags, uint32_t, kFlagsOffset', 'Code, instruction_start, uintptr_t, kHeaderSize', 'Code, instruction_size, int, kInstructionSizeOffset', + 'Code, deoptimization_data, FixedArray, kDeoptimizationDataOrInterpreterDataOffset', 'String, length, int32_t, kLengthOffset', 'DescriptorArray, header_size, uintptr_t, kHeaderSize', 'ConsString, first, String, kFirstOffset', @@ -307,7 +319,7 @@ 'ThinString, actual, String, kActualOffset', 'Symbol, name, Object, kDescriptionOffset', 'FixedArrayBase, length, SMI, kLengthOffset', -]; +] # # The following is a whitelist of classes we expect to find when scanning the diff --git a/deps/v8/tools/heap-stats/categories.js b/deps/v8/tools/heap-stats/categories.js index e4e570c4b40bd0..24f880cd264890 100644 --- a/deps/v8/tools/heap-stats/categories.js +++ b/deps/v8/tools/heap-stats/categories.js @@ -53,6 +53,7 @@ export const CATEGORIES = new Map([ 'JS_COLLATOR_TYPE', 'JS_DATE_TIME_FORMAT_TYPE', 'JS_DISPLAY_NAMES_TYPE', + 'JS_DURATION_FORMAT_TYPE', 'JS_LIST_FORMAT_TYPE', 'JS_LOCALE_TYPE', 'JS_NUMBER_FORMAT_TYPE', diff --git a/deps/v8/tools/release/mergeinfo.py b/deps/v8/tools/release/mergeinfo.py index d824f4738a98fe..7136463afb2659 100755 --- a/deps/v8/tools/release/mergeinfo.py +++ b/deps/v8/tools/release/mergeinfo.py @@ -25,9 +25,9 @@ def git_execute(working_dir, args, verbose=False): print("Executing git command:" + str(command)) p = Popen(args=command, stdin=PIPE, stdout=PIPE, stderr=PIPE) output, err = p.communicate() - rc = p.returncode - if rc != 0: + if p.returncode != 0: raise Exception(err) + output = output.decode('utf-8') if verbose: print("Git return value: " + output) return output @@ -74,7 +74,8 @@ def get_branches_for_commit(git_working_dir, hash_to_search): hash_to_search, '-a']).strip() branches = branches.splitlines() - return map(str.strip, branches) + return {branch.strip() for branch in branches} + def is_lkgr(branches): return 'remotes/origin/lkgr' in branches @@ -89,7 +90,7 @@ def get_first_canary(branches): def get_first_v8_version(branches): version_re = re.compile("remotes/origin/[0-9]+\.[0-9]+\.[0-9]+") - versions = filter(lambda branch: version_re.match(branch), branches) + versions = [branch for branch in branches if version_re.match(branch)] if len(versions) == 0: return "--" version = versions[0].split("/")[-1] diff --git a/deps/v8/tools/testrunner/local/variants.py b/deps/v8/tools/testrunner/local/variants.py index bbd76e1b55e67a..f44e445eca46a1 100644 --- a/deps/v8/tools/testrunner/local/variants.py +++ b/deps/v8/tools/testrunner/local/variants.py @@ -80,7 +80,7 @@ ], "sparkplug": ["--jitless", "--no-sparkplug"], "concurrent_sparkplug": ["--jitless"], - "maglev": ["--jitless"], + "maglev": ["--jitless", "--no-maglev"], "stress_maglev": ["--jitless"], "always_sparkplug": ["--jitless", "--no-sparkplug"], "code_serializer": [ diff --git a/deps/v8/tools/testrunner/num_fuzzer.py b/deps/v8/tools/testrunner/num_fuzzer.py index 9265e32e1293ba..68f85c9466fa35 100755 --- a/deps/v8/tools/testrunner/num_fuzzer.py +++ b/deps/v8/tools/testrunner/num_fuzzer.py @@ -77,9 +77,6 @@ def _add_parser_options(self, parser): parser.add_option("--stress-deopt", default=0, type="int", help="probability [0-10] of adding --deopt-every-n-times " "flag to the test") - parser.add_option("--stress-deopt-min", default=1, type="int", - help="extends --stress-deopt to have minimum interval " - "between deopt points") parser.add_option("--stress-interrupt-budget", default=0, type="int", help="probability [0-10] of adding the --interrupt-budget " "flag to the test") diff --git a/deps/v8/tools/testrunner/objects/testcase.py b/deps/v8/tools/testrunner/objects/testcase.py index 1463f474ff5745..58780913792252 100644 --- a/deps/v8/tools/testrunner/objects/testcase.py +++ b/deps/v8/tools/testrunner/objects/testcase.py @@ -447,9 +447,13 @@ def cmp(x, y): (other.suite.name, other.name, other.variant) ) - def __str__(self): + @property + def full_name(self): return self.suite.name + '/' + self.name + def __str__(self): + return self.full_name + class D8TestCase(TestCase): def get_shell(self): diff --git a/deps/v8/tools/testrunner/standard_runner_test.py b/deps/v8/tools/testrunner/standard_runner_test.py index 5254b90041ff0d..64154e018248a8 100644 --- a/deps/v8/tools/testrunner/standard_runner_test.py +++ b/deps/v8/tools/testrunner/standard_runner_test.py @@ -28,6 +28,8 @@ sys.path.append(TOOLS_ROOT) from testrunner import standard_runner from testrunner import num_fuzzer +from testrunner.testproc import base +from testrunner.testproc import fuzzer from testrunner.utils.test_utils import ( temp_base, TestRunnerTest, @@ -511,17 +513,60 @@ def testRunnerFlags(self): result.has_returncode(0) +class FakeTimeoutProc(base.TestProcObserver): + """Fake of the total-timeout observer that just stops after counting + "count" number of test or result events. + """ + def __init__(self, count): + super(FakeTimeoutProc, self).__init__() + self._n = 0 + self._count = count + + def _on_next_test(self, test): + self.__on_event() + + def _on_result_for(self, test, result): + self.__on_event() + + def __on_event(self): + if self._n >= self._count: + self.stop() + self._n += 1 + + class NumFuzzerTest(TestRunnerTest): def get_runner_class(self): return num_fuzzer.NumFuzzer def testNumFuzzer(self): - result = self.run_tests( - '--command-prefix', sys.executable, - '--outdir', 'out/build', - ) - result.has_returncode(0) - result.stdout_includes('>>> Autodetected') + fuzz_flags = [ + f'{flag}=1' for flag in self.get_runner_options() + if flag.startswith('--stress-') + ] + self.assertEqual(len(fuzz_flags), len(fuzzer.FUZZERS)) + for fuzz_flag in fuzz_flags: + # The fake timeout observer above will stop after proessing the 10th + # test. This still executes an 11th. Each test causes a test- and a + # result event internally. We test both paths here. + for event_count in (19, 20): + with self.subTest(f'fuzz_flag={fuzz_flag} event_count={event_count}'): + with patch( + 'testrunner.testproc.timeout.TimeoutProc.create', + lambda x: FakeTimeoutProc(event_count)): + result = self.run_tests( + '--command-prefix', sys.executable, + '--outdir', 'out/build', + '--variants=default', + '--fuzzer-random-seed=12345', + '--total-timeout-sec=60', + fuzz_flag, + '--progress=verbose', + 'sweet/bananas', + ) + result.has_returncode(0) + result.stdout_includes('>>> Autodetected') + result.stdout_includes('11 tests ran') + class OtherTest(TestRunnerTest): def testStatusFilePresubmit(self): @@ -531,5 +576,6 @@ def testStatusFilePresubmit(self): self.assertTrue(statusfile.PresubmitCheck( os.path.join(basedir, 'test', 'sweet', 'sweet.status'))) + if __name__ == '__main__': unittest.main() diff --git a/deps/v8/tools/testrunner/testdata/testroot1/out/build/d8_mocked.py b/deps/v8/tools/testrunner/testdata/testroot1/out/build/d8_mocked.py index d67e0304f63932..67f2f8ec5e4e42 100644 --- a/deps/v8/tools/testrunner/testdata/testroot1/out/build/d8_mocked.py +++ b/deps/v8/tools/testrunner/testdata/testroot1/out/build/d8_mocked.py @@ -13,7 +13,18 @@ args = ' '.join(sys.argv[1:]) print(args) + # Let all berries fail. if 'berries' in args: sys.exit(1) + +# Dummy results if some analysis flags for numfuzz are present. +if '--print-deopt-stress' in args: + print('=== Stress deopt counter: 12345') + +if '--fuzzer-gc-analysis' in args: + print('### Maximum marking limit reached = 3.70') + print('### Maximum new space size reached = 7.30') + print('### Allocations = 9107, hash = 0x0000001b') + sys.exit(0) diff --git a/deps/v8/tools/testrunner/testproc/fuzzer.py b/deps/v8/tools/testrunner/testproc/fuzzer.py index 2d7fab6653a6c5..ca7f3b71b9c296 100644 --- a/deps/v8/tools/testrunner/testproc/fuzzer.py +++ b/deps/v8/tools/testrunner/testproc/fuzzer.py @@ -17,8 +17,7 @@ (0.1, '--force-slow-path'), (0.2, '--future'), (0.1, '--interrupt-budget=100'), - # TODO(almuthanna): enable again when the FYI bots are greener - # (0.1, '--interrupt-budget-for-maglev=100'), + (0.1, '--interrupt-budget-for-maglev=100'), (0.1, '--liftoff'), (0.1, '--maglev'), (0.1, '--minor-mc'), @@ -54,6 +53,9 @@ (0.1, '--turbo-force-mid-tier-regalloc'), ] +MIN_DEOPT = 1 +MAX_DEOPT = 10**9 + def random_extra_flags(rng): """Returns a random list of flags chosen from the configurations in @@ -194,7 +196,7 @@ def _create_analysis_subtest(self, test): def _result_for(self, test, subtest, result): if not self._disable_analysis: - if result is not None: + if result is not None and subtest.procid.endswith('Fuzzer-analysis'): # Analysis phase, for fuzzing we drop the result. if result.has_unexpected_output: self._send_result(test, None) @@ -356,21 +358,15 @@ def create_flags_generator(self, rng, test, analysis_value): class DeoptAnalyzer(Analyzer): - MAX_DEOPT=1000000000 - - def __init__(self, min_interval): - super(DeoptAnalyzer, self).__init__() - self._min = min_interval - def get_analysis_flags(self): - return ['--deopt-every-n-times=%d' % self.MAX_DEOPT, + return ['--deopt-every-n-times=%d' % MAX_DEOPT, '--print-deopt-stress'] def do_analysis(self, result): for line in reversed(result.output.stdout.splitlines()): if line.startswith('=== Stress deopt counter: '): - counter = self.MAX_DEOPT - int(line.split(' ')[-1]) - if counter < self._min: + counter = MAX_DEOPT - int(line.split(' ')[-1]) + if counter < MIN_DEOPT: # Skip this test since we won't generate any meaningful interval with # given minimum. return None @@ -378,17 +374,13 @@ def do_analysis(self, result): class DeoptFuzzer(Fuzzer): - def __init__(self, min_interval): - super(DeoptFuzzer, self).__init__() - self._min = min_interval - def create_flags_generator(self, rng, test, analysis_value): while True: if analysis_value: value = analysis_value // 2 else: value = 10000 - interval = rng.randint(self._min, max(value, self._min)) + interval = rng.randint(MIN_DEOPT, max(value, MIN_DEOPT)) yield ['--deopt-every-n-times=%d' % interval] diff --git a/deps/v8/tools/testrunner/testproc/indicators.py b/deps/v8/tools/testrunner/testproc/indicators.py index 394742fc6b10f3..1ae04a64c486f9 100644 --- a/deps/v8/tools/testrunner/testproc/indicators.py +++ b/deps/v8/tools/testrunner/testproc/indicators.py @@ -14,7 +14,7 @@ def print_failure_header(test, is_flaky=False): - text = [str(test)] + text = [test.full_name] if test.output_proc.negative: text.append('[negative]') if is_flaky: @@ -24,6 +24,23 @@ def print_failure_header(test, is_flaky=False): print(output.encode(encoding, errors='replace').decode(encoding)) +def formatted_result_output(result): + lines = [] + if result.output.stderr: + lines.append("--- stderr ---") + lines.append(result.output.stderr.strip()) + if result.output.stdout: + lines.append("--- stdout ---") + lines.append(result.output.stdout.strip()) + lines.append("Command: %s" % result.cmd.to_string()) + if result.output.HasCrashed(): + lines.append("exit code: %s" % result.output.exit_code_string) + lines.append("--- CRASHED ---") + if result.output.HasTimedOut(): + lines.append("--- TIMEOUT ---") + return '\n'.join(lines) + + class ProgressIndicator(): def __init__(self, context, options, test_count): @@ -68,19 +85,7 @@ def finished(self): for test, result, is_flaky in self._failed: flaky += int(is_flaky) print_failure_header(test, is_flaky=is_flaky) - if result.output.stderr: - print("--- stderr ---") - print(result.output.stderr.strip()) - if result.output.stdout: - print("--- stdout ---") - print(result.output.stdout.strip()) - print("Command: %s" % result.cmd.to_string()) - if result.output.HasCrashed(): - print("exit code: %s" % result.output.exit_code_string) - print("--- CRASHED ---") - crashed += 1 - if result.output.HasTimedOut(): - print("--- TIMEOUT ---") + print(formatted_result_output(result)) if len(self._failed) == 0: print("===") print("=== All tests succeeded") @@ -230,7 +235,7 @@ def on_test_result(self, test, result): else: self._passed += 1 - self._print_progress(str(test)) + self._print_progress(test.full_name) if result.has_unexpected_output: output = result.output stdout = output.stdout.strip() @@ -358,10 +363,7 @@ def __init__(self, context, options, test_count, framework_name): self.test_count = 0 def on_test_result(self, test, result): - if result.is_rerun: - self.process_results(test, result.results) - else: - self.process_results(test, [result]) + self.process_results(test, result.as_list) def process_results(self, test, results): for run, result in enumerate(results): @@ -376,7 +378,7 @@ def process_results(self, test, results): if not result.has_unexpected_output and run == 0: continue - record = self._test_record(test, result, output, run) + record = self._test_record(test, result, run) record.update({ "result": test.output_proc.get_outcome(output), "stdout": output.stdout, @@ -392,30 +394,22 @@ def result_value(test, result, output): return "" return test.output_proc.get_outcome(output) - record = self._test_record(test, result, output, run) - record.update({ - "result": result_value(test, result, output), - "marked_slow": test.is_slow, - }) + record = self._test_record(test, result, run) + record.update( + result=result_value(test, result, output), + marked_slow=test.is_slow, + ) self.tests.add(record) self.duration_sum += record['duration'] self.test_count += 1 - def _test_record(self, test, result, output, run): - return { - "name": str(test), - "flags": result.cmd.args, - "command": result.cmd.to_string(relative=True), - "run": run + 1, - "exit_code": output.exit_code, - "expected": test.expected_outcomes, - "duration": output.duration, - "random_seed": test.random_seed, - "target_name": test.get_shell(), - "variant": test.variant, - "variant_flags": test.variant_flags, - "framework_name": self.framework_name, - } + def _test_record(self, test, result, run): + record = util.base_test_record(test, result, run) + record.update( + framework_name=self.framework_name, + command=result.cmd.to_string(relative=True), + ) + return record def finished(self): duration_mean = None @@ -423,10 +417,10 @@ def finished(self): duration_mean = self.duration_sum / self.test_count result = { - "results": self.results, - "slowest_tests": self.tests.as_list(), - "duration_mean": duration_mean, - "test_total": self.test_count, + 'results': self.results, + 'slowest_tests': self.tests.as_list(), + 'duration_mean': duration_mean, + 'test_total': self.test_count, } with open(self.options.json_test_results, "w") as f: diff --git a/deps/v8/tools/testrunner/testproc/progress.py b/deps/v8/tools/testrunner/testproc/progress.py index 789adf053f1c28..319cfa1af1b671 100644 --- a/deps/v8/tools/testrunner/testproc/progress.py +++ b/deps/v8/tools/testrunner/testproc/progress.py @@ -6,6 +6,7 @@ from . import base from testrunner.local import utils from testrunner.testproc.indicators import JsonTestProgressIndicator, PROGRESS_INDICATORS +from testrunner.testproc.resultdb import ResultDBIndicator class ResultsTracker(base.TestProcObserver): @@ -66,7 +67,7 @@ def __init__(self, context, options, framework_name, test_count): 0, JsonTestProgressIndicator(context, options, test_count, framework_name)) - + self.procs.append(ResultDBIndicator(context, options, test_count)) self._requirement = max(proc._requirement for proc in self.procs) def _on_result_for(self, test, result): diff --git a/deps/v8/tools/testrunner/testproc/result.py b/deps/v8/tools/testrunner/testproc/result.py index 18983d418083e7..5436e48be7d097 100644 --- a/deps/v8/tools/testrunner/testproc/result.py +++ b/deps/v8/tools/testrunner/testproc/result.py @@ -16,6 +16,10 @@ def is_grouped(self): def is_rerun(self): return False + @property + def as_list(self): + return [self] + class Result(ResultBase): """Result created by the output processor.""" @@ -112,5 +116,9 @@ def __init__(self, results): def is_rerun(self): return True + @property + def as_list(self): + return self.results + def status(self): return ' '.join(r.status() for r in self.results) diff --git a/deps/v8/tools/testrunner/testproc/resultdb.py b/deps/v8/tools/testrunner/testproc/resultdb.py new file mode 100644 index 00000000000000..c01d2591506480 --- /dev/null +++ b/deps/v8/tools/testrunner/testproc/resultdb.py @@ -0,0 +1,95 @@ +# Copyright 2022 the V8 project authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import json +import logging +import pprint +import requests +import os + +from . import base +from .indicators import ( + formatted_result_output, + ProgressIndicator, +) +from .util import ( + base_test_record, + extract_tags, + strip_ascii_control_characters, +) + + +class ResultDBIndicator(ProgressIndicator): + + def __init__(self, context, options, test_count): + super(ResultDBIndicator, self).__init__(context, options, test_count) + self._requirement = base.DROP_PASS_OUTPUT + self.rpc = ResultDB_RPC() + + def on_test_result(self, test, result): + for run, sub_result in enumerate(result.as_list): + self.send_result(test, sub_result, run) + + def send_result(self, test, result, run): + # We need to recalculate the observed (but lost) test behaviour. + # `result.has_unexpected_output` indicates that the run behaviour of the + # test matches the expected behaviour irrespective of passing or failing. + result_expected = not result.has_unexpected_output + test_should_pass = not test.is_fail + run_passed = (result_expected == test_should_pass) + rdb_result = { + 'testId': strip_ascii_control_characters(test.full_name), + 'status': 'PASS' if run_passed else 'FAIL', + 'expected': result_expected, + } + + if result.output and result.output.duration: + rdb_result.update(duration=f'{result.output.duration}ms') + if result.has_unexpected_output: + formated_output = formatted_result_output(result) + sanitized = strip_ascii_control_characters(formated_output) + # TODO(liviurau): do we have a better presentation data for this? + # Protobuf strings can have len == 2**32. + rdb_result.update(summaryHtml=f'<pre>{sanitized}</pre>') + record = base_test_record(test, result, run) + rdb_result.update(tags=extract_tags(record)) + self.rpc.send(rdb_result) + + +class ResultDB_RPC: + + def __init__(self): + self.session = None + luci_context = os.environ.get('LUCI_CONTEXT') + # TODO(liviurau): use a factory method and return None in absence of + # necessary context. + if not luci_context: + logging.warning( + f'No LUCI_CONTEXT found. No results will be sent to ResutDB.') + return + with open(luci_context, mode="r", encoding="utf-8") as f: + config = json.load(f) + sink = config.get('result_sink', None) + if not sink: + logging.warning( + f'No ResultDB sink found. No results will be sent to ResutDB.') + return + self.session = requests.Session() + self.session.headers = { + 'Authorization': f'ResultSink {sink.get("auth_token")}', + } + self.url = f'http://{sink.get("address")}/prpc/luci.resultsink.v1.Sink/ReportTestResults' + + def send(self, result): + if self.session: + payload = dict(testResults=[result]) + try: + self.session.post(self.url, json=payload).raise_for_status() + except Exception as e: + logging.error(f'Request failed: {payload}') + raise e + + def __del__(self): + if self.session: + self.session.close() diff --git a/deps/v8/tools/testrunner/testproc/util.py b/deps/v8/tools/testrunner/testproc/util.py index 316c5ba3d8e086..5e6b8fd2c7cca5 100644 --- a/deps/v8/tools/testrunner/testproc/util.py +++ b/deps/v8/tools/testrunner/testproc/util.py @@ -7,6 +7,7 @@ import logging import os import platform +import re import signal import subprocess @@ -53,6 +54,43 @@ def kill_processes_linux(): logging.exception('Failed to kill process') +def strip_ascii_control_characters(unicode_string): + return re.sub(r'[^\x20-\x7E]', '?', str(unicode_string)) + + +def base_test_record(test, result, run): + record = { + 'name': test.full_name, + 'flags': result.cmd.args, + 'run': run + 1, + 'expected': test.expected_outcomes, + 'random_seed': test.random_seed, + 'target_name': test.get_shell(), + 'variant': test.variant, + 'variant_flags': test.variant_flags, + } + if result.output: + record.update( + exit_code=result.output.exit_code, + duration=result.output.duration, + ) + return record + + +def extract_tags(record): + tags = [] + for k, v in record.items(): + if type(v) == list: + tags += [sanitized_kv_dict(k, e) for e in v] + else: + tags.append(sanitized_kv_dict(k, v)) + return tags + + +def sanitized_kv_dict(k, v): + return dict(key=k, value=strip_ascii_control_characters(v)) + + class FixedSizeTopList(): """Utility collection for gathering a fixed number of elements with the biggest value for the given key. It employs a heap from which we pop the diff --git a/deps/v8/tools/testrunner/utils/augmented_options.py b/deps/v8/tools/testrunner/utils/augmented_options.py index db6aef4b5e79f6..0af04031c5ece0 100644 --- a/deps/v8/tools/testrunner/utils/augmented_options.py +++ b/deps/v8/tools/testrunner/utils/augmented_options.py @@ -57,9 +57,9 @@ def shard_info(self): def fuzzer_configs(self): fuzzers = [] - def add(name, prob, *args): + def add(name, prob): if prob: - fuzzers.append(fuzzer.create_fuzzer_config(name, prob, *args)) + fuzzers.append(fuzzer.create_fuzzer_config(name, prob)) add('compaction', self.stress_compaction) add('interrupt', self.stress_interrupt_budget) @@ -69,7 +69,7 @@ def add(name, prob, *args): add('stack', self.stress_stack_size) add('threads', self.stress_thread_pool_size) add('delay', self.stress_delay_tasks) - add('deopt', self.stress_deopt, self.stress_deopt_min) + add('deopt', self.stress_deopt) return fuzzers def fuzzer_tests_count(self): diff --git a/deps/v8/tools/testrunner/utils/test_utils.py b/deps/v8/tools/testrunner/utils/test_utils.py index 4891d07266ad5b..f61beaa4fdb18a 100644 --- a/deps/v8/tools/testrunner/utils/test_utils.py +++ b/deps/v8/tools/testrunner/utils/test_utils.py @@ -183,9 +183,16 @@ def resolve_arg(arg): json_out = clean_json_output(json_out_path, basedir) return TestResult(stdout.getvalue(), stderr.getvalue(), code, json_out, self) - def get_runner_class(): - """Implement to return the runner class""" - return None + def get_runner_options(self, baseroot='testroot1'): + """Returns a list of all flags parsed by the test runner.""" + with temp_base(baseroot=baseroot) as basedir: + runner = self.get_runner_class()(basedir=basedir) + parser = runner._create_parser() + return [i.get_opt_string() for i in parser.option_list] + + def get_runner_class(): + """Implement to return the runner class""" + return None class FakeOSContext(DefaultOSContext): diff --git a/deps/v8/tools/v8heapconst.py b/deps/v8/tools/v8heapconst.py index 34bcad23fe343f..6009e736ff22a0 100644 --- a/deps/v8/tools/v8heapconst.py +++ b/deps/v8/tools/v8heapconst.py @@ -241,347 +241,350 @@ 2111: "JS_DATE_TYPE", 2112: "JS_DATE_TIME_FORMAT_TYPE", 2113: "JS_DISPLAY_NAMES_TYPE", - 2114: "JS_ERROR_TYPE", - 2115: "JS_EXTERNAL_OBJECT_TYPE", - 2116: "JS_FINALIZATION_REGISTRY_TYPE", - 2117: "JS_LIST_FORMAT_TYPE", - 2118: "JS_LOCALE_TYPE", - 2119: "JS_MESSAGE_OBJECT_TYPE", - 2120: "JS_NUMBER_FORMAT_TYPE", - 2121: "JS_PLURAL_RULES_TYPE", - 2122: "JS_REG_EXP_TYPE", - 2123: "JS_REG_EXP_STRING_ITERATOR_TYPE", - 2124: "JS_RELATIVE_TIME_FORMAT_TYPE", - 2125: "JS_SEGMENT_ITERATOR_TYPE", - 2126: "JS_SEGMENTER_TYPE", - 2127: "JS_SEGMENTS_TYPE", - 2128: "JS_SHADOW_REALM_TYPE", - 2129: "JS_SHARED_ARRAY_TYPE", - 2130: "JS_SHARED_STRUCT_TYPE", - 2131: "JS_STRING_ITERATOR_TYPE", - 2132: "JS_TEMPORAL_CALENDAR_TYPE", - 2133: "JS_TEMPORAL_DURATION_TYPE", - 2134: "JS_TEMPORAL_INSTANT_TYPE", - 2135: "JS_TEMPORAL_PLAIN_DATE_TYPE", - 2136: "JS_TEMPORAL_PLAIN_DATE_TIME_TYPE", - 2137: "JS_TEMPORAL_PLAIN_MONTH_DAY_TYPE", - 2138: "JS_TEMPORAL_PLAIN_TIME_TYPE", - 2139: "JS_TEMPORAL_PLAIN_YEAR_MONTH_TYPE", - 2140: "JS_TEMPORAL_TIME_ZONE_TYPE", - 2141: "JS_TEMPORAL_ZONED_DATE_TIME_TYPE", - 2142: "JS_V8_BREAK_ITERATOR_TYPE", - 2143: "JS_WEAK_REF_TYPE", - 2144: "WASM_EXCEPTION_PACKAGE_TYPE", - 2145: "WASM_GLOBAL_OBJECT_TYPE", - 2146: "WASM_INSTANCE_OBJECT_TYPE", - 2147: "WASM_MEMORY_OBJECT_TYPE", - 2148: "WASM_MODULE_OBJECT_TYPE", - 2149: "WASM_SUSPENDER_OBJECT_TYPE", - 2150: "WASM_TABLE_OBJECT_TYPE", - 2151: "WASM_TAG_OBJECT_TYPE", - 2152: "WASM_VALUE_OBJECT_TYPE", + 2114: "JS_DURATION_FORMAT_TYPE", + 2115: "JS_ERROR_TYPE", + 2116: "JS_EXTERNAL_OBJECT_TYPE", + 2117: "JS_FINALIZATION_REGISTRY_TYPE", + 2118: "JS_LIST_FORMAT_TYPE", + 2119: "JS_LOCALE_TYPE", + 2120: "JS_MESSAGE_OBJECT_TYPE", + 2121: "JS_NUMBER_FORMAT_TYPE", + 2122: "JS_PLURAL_RULES_TYPE", + 2123: "JS_RAW_JSON_TYPE", + 2124: "JS_REG_EXP_TYPE", + 2125: "JS_REG_EXP_STRING_ITERATOR_TYPE", + 2126: "JS_RELATIVE_TIME_FORMAT_TYPE", + 2127: "JS_SEGMENT_ITERATOR_TYPE", + 2128: "JS_SEGMENTER_TYPE", + 2129: "JS_SEGMENTS_TYPE", + 2130: "JS_SHADOW_REALM_TYPE", + 2131: "JS_SHARED_ARRAY_TYPE", + 2132: "JS_SHARED_STRUCT_TYPE", + 2133: "JS_STRING_ITERATOR_TYPE", + 2134: "JS_TEMPORAL_CALENDAR_TYPE", + 2135: "JS_TEMPORAL_DURATION_TYPE", + 2136: "JS_TEMPORAL_INSTANT_TYPE", + 2137: "JS_TEMPORAL_PLAIN_DATE_TYPE", + 2138: "JS_TEMPORAL_PLAIN_DATE_TIME_TYPE", + 2139: "JS_TEMPORAL_PLAIN_MONTH_DAY_TYPE", + 2140: "JS_TEMPORAL_PLAIN_TIME_TYPE", + 2141: "JS_TEMPORAL_PLAIN_YEAR_MONTH_TYPE", + 2142: "JS_TEMPORAL_TIME_ZONE_TYPE", + 2143: "JS_TEMPORAL_ZONED_DATE_TIME_TYPE", + 2144: "JS_V8_BREAK_ITERATOR_TYPE", + 2145: "JS_WEAK_REF_TYPE", + 2146: "WASM_EXCEPTION_PACKAGE_TYPE", + 2147: "WASM_GLOBAL_OBJECT_TYPE", + 2148: "WASM_INSTANCE_OBJECT_TYPE", + 2149: "WASM_MEMORY_OBJECT_TYPE", + 2150: "WASM_MODULE_OBJECT_TYPE", + 2151: "WASM_SUSPENDER_OBJECT_TYPE", + 2152: "WASM_TABLE_OBJECT_TYPE", + 2153: "WASM_TAG_OBJECT_TYPE", + 2154: "WASM_VALUE_OBJECT_TYPE", } # List of known V8 maps. KNOWN_MAPS = { - ("read_only_space", 0x02139): (247, "MetaMap"), - ("read_only_space", 0x02161): (131, "NullMap"), - ("read_only_space", 0x02189): (229, "StrongDescriptorArrayMap"), - ("read_only_space", 0x021b1): (265, "WeakArrayListMap"), - ("read_only_space", 0x021f5): (155, "EnumCacheMap"), - ("read_only_space", 0x02229): (176, "FixedArrayMap"), - ("read_only_space", 0x02275): (8, "OneByteInternalizedStringMap"), - ("read_only_space", 0x022c1): (244, "FreeSpaceMap"), - ("read_only_space", 0x022e9): (243, "OnePointerFillerMap"), - ("read_only_space", 0x02311): (243, "TwoPointerFillerMap"), - ("read_only_space", 0x02339): (131, "UninitializedMap"), - ("read_only_space", 0x023b1): (131, "UndefinedMap"), - ("read_only_space", 0x023f5): (130, "HeapNumberMap"), - ("read_only_space", 0x02429): (131, "TheHoleMap"), - ("read_only_space", 0x02489): (131, "BooleanMap"), - ("read_only_space", 0x0252d): (191, "ByteArrayMap"), - ("read_only_space", 0x02555): (176, "FixedCOWArrayMap"), - ("read_only_space", 0x0257d): (177, "HashTableMap"), - ("read_only_space", 0x025a5): (128, "SymbolMap"), - ("read_only_space", 0x025cd): (40, "OneByteStringMap"), - ("read_only_space", 0x025f5): (253, "ScopeInfoMap"), - ("read_only_space", 0x0261d): (254, "SharedFunctionInfoMap"), - ("read_only_space", 0x02645): (237, "CodeMap"), - ("read_only_space", 0x0266d): (236, "CellMap"), - ("read_only_space", 0x02695): (252, "GlobalPropertyCellMap"), - ("read_only_space", 0x026bd): (204, "ForeignMap"), - ("read_only_space", 0x026e5): (233, "TransitionArrayMap"), - ("read_only_space", 0x0270d): (45, "ThinOneByteStringMap"), - ("read_only_space", 0x02735): (242, "FeedbackVectorMap"), - ("read_only_space", 0x0276d): (131, "ArgumentsMarkerMap"), - ("read_only_space", 0x027cd): (131, "ExceptionMap"), - ("read_only_space", 0x02829): (131, "TerminationExceptionMap"), - ("read_only_space", 0x02891): (131, "OptimizedOutMap"), - ("read_only_space", 0x028f1): (131, "StaleRegisterMap"), - ("read_only_space", 0x02951): (190, "ScriptContextTableMap"), - ("read_only_space", 0x02979): (188, "ClosureFeedbackCellArrayMap"), - ("read_only_space", 0x029a1): (241, "FeedbackMetadataArrayMap"), - ("read_only_space", 0x029c9): (176, "ArrayListMap"), - ("read_only_space", 0x029f1): (129, "BigIntMap"), - ("read_only_space", 0x02a19): (189, "ObjectBoilerplateDescriptionMap"), - ("read_only_space", 0x02a41): (192, "BytecodeArrayMap"), - ("read_only_space", 0x02a69): (238, "CodeDataContainerMap"), - ("read_only_space", 0x02a91): (239, "CoverageInfoMap"), - ("read_only_space", 0x02ab9): (193, "FixedDoubleArrayMap"), - ("read_only_space", 0x02ae1): (179, "GlobalDictionaryMap"), - ("read_only_space", 0x02b09): (157, "ManyClosuresCellMap"), - ("read_only_space", 0x02b31): (248, "MegaDomHandlerMap"), - ("read_only_space", 0x02b59): (176, "ModuleInfoMap"), - ("read_only_space", 0x02b81): (180, "NameDictionaryMap"), - ("read_only_space", 0x02ba9): (157, "NoClosuresCellMap"), - ("read_only_space", 0x02bd1): (182, "NumberDictionaryMap"), - ("read_only_space", 0x02bf9): (157, "OneClosureCellMap"), - ("read_only_space", 0x02c21): (183, "OrderedHashMapMap"), - ("read_only_space", 0x02c49): (184, "OrderedHashSetMap"), - ("read_only_space", 0x02c71): (181, "NameToIndexHashTableMap"), - ("read_only_space", 0x02c99): (186, "RegisteredSymbolTableMap"), - ("read_only_space", 0x02cc1): (185, "OrderedNameDictionaryMap"), - ("read_only_space", 0x02ce9): (250, "PreparseDataMap"), - ("read_only_space", 0x02d11): (251, "PropertyArrayMap"), - ("read_only_space", 0x02d39): (234, "AccessorInfoMap"), - ("read_only_space", 0x02d61): (235, "SideEffectCallHandlerInfoMap"), - ("read_only_space", 0x02d89): (235, "SideEffectFreeCallHandlerInfoMap"), - ("read_only_space", 0x02db1): (235, "NextCallSideEffectFreeCallHandlerInfoMap"), - ("read_only_space", 0x02dd9): (187, "SimpleNumberDictionaryMap"), - ("read_only_space", 0x02e01): (223, "SmallOrderedHashMapMap"), - ("read_only_space", 0x02e29): (224, "SmallOrderedHashSetMap"), - ("read_only_space", 0x02e51): (225, "SmallOrderedNameDictionaryMap"), - ("read_only_space", 0x02e79): (230, "SourceTextModuleMap"), - ("read_only_space", 0x02ea1): (258, "SwissNameDictionaryMap"), - ("read_only_space", 0x02ec9): (231, "SyntheticModuleMap"), - ("read_only_space", 0x02ef1): (259, "WasmApiFunctionRefMap"), - ("read_only_space", 0x02f19): (220, "WasmCapiFunctionDataMap"), - ("read_only_space", 0x02f41): (221, "WasmExportedFunctionDataMap"), - ("read_only_space", 0x02f69): (261, "WasmInternalFunctionMap"), - ("read_only_space", 0x02f91): (222, "WasmJSFunctionDataMap"), - ("read_only_space", 0x02fb9): (262, "WasmResumeDataMap"), - ("read_only_space", 0x02fe1): (264, "WasmTypeInfoMap"), - ("read_only_space", 0x03009): (260, "WasmContinuationObjectMap"), - ("read_only_space", 0x03031): (232, "WeakFixedArrayMap"), - ("read_only_space", 0x03059): (178, "EphemeronHashTableMap"), - ("read_only_space", 0x03081): (240, "EmbedderDataArrayMap"), - ("read_only_space", 0x030a9): (266, "WeakCellMap"), - ("read_only_space", 0x030d1): (32, "StringMap"), - ("read_only_space", 0x030f9): (41, "ConsOneByteStringMap"), - ("read_only_space", 0x03121): (33, "ConsStringMap"), - ("read_only_space", 0x03149): (37, "ThinStringMap"), - ("read_only_space", 0x03171): (35, "SlicedStringMap"), - ("read_only_space", 0x03199): (43, "SlicedOneByteStringMap"), - ("read_only_space", 0x031c1): (34, "ExternalStringMap"), - ("read_only_space", 0x031e9): (42, "ExternalOneByteStringMap"), - ("read_only_space", 0x03211): (50, "UncachedExternalStringMap"), - ("read_only_space", 0x03239): (0, "InternalizedStringMap"), - ("read_only_space", 0x03261): (2, "ExternalInternalizedStringMap"), - ("read_only_space", 0x03289): (10, "ExternalOneByteInternalizedStringMap"), - ("read_only_space", 0x032b1): (18, "UncachedExternalInternalizedStringMap"), - ("read_only_space", 0x032d9): (26, "UncachedExternalOneByteInternalizedStringMap"), - ("read_only_space", 0x03301): (58, "UncachedExternalOneByteStringMap"), - ("read_only_space", 0x03329): (104, "SharedOneByteStringMap"), - ("read_only_space", 0x03351): (96, "SharedStringMap"), - ("read_only_space", 0x03379): (106, "SharedExternalOneByteStringMap"), - ("read_only_space", 0x033a1): (98, "SharedExternalStringMap"), - ("read_only_space", 0x033c9): (122, "SharedUncachedExternalOneByteStringMap"), - ("read_only_space", 0x033f1): (114, "SharedUncachedExternalStringMap"), - ("read_only_space", 0x03419): (109, "SharedThinOneByteStringMap"), - ("read_only_space", 0x03441): (101, "SharedThinStringMap"), - ("read_only_space", 0x03469): (131, "SelfReferenceMarkerMap"), - ("read_only_space", 0x03491): (131, "BasicBlockCountersMarkerMap"), - ("read_only_space", 0x034d5): (146, "ArrayBoilerplateDescriptionMap"), - ("read_only_space", 0x035d5): (159, "InterceptorInfoMap"), - ("read_only_space", 0x07455): (132, "PromiseFulfillReactionJobTaskMap"), - ("read_only_space", 0x0747d): (133, "PromiseRejectReactionJobTaskMap"), - ("read_only_space", 0x074a5): (134, "CallableTaskMap"), - ("read_only_space", 0x074cd): (135, "CallbackTaskMap"), - ("read_only_space", 0x074f5): (136, "PromiseResolveThenableJobTaskMap"), - ("read_only_space", 0x0751d): (139, "FunctionTemplateInfoMap"), - ("read_only_space", 0x07545): (140, "ObjectTemplateInfoMap"), - ("read_only_space", 0x0756d): (141, "AccessCheckInfoMap"), - ("read_only_space", 0x07595): (142, "AccessorPairMap"), - ("read_only_space", 0x075bd): (143, "AliasedArgumentsEntryMap"), - ("read_only_space", 0x075e5): (144, "AllocationMementoMap"), - ("read_only_space", 0x0760d): (147, "AsmWasmDataMap"), - ("read_only_space", 0x07635): (148, "AsyncGeneratorRequestMap"), - ("read_only_space", 0x0765d): (149, "BreakPointMap"), - ("read_only_space", 0x07685): (150, "BreakPointInfoMap"), - ("read_only_space", 0x076ad): (151, "CachedTemplateObjectMap"), - ("read_only_space", 0x076d5): (152, "CallSiteInfoMap"), - ("read_only_space", 0x076fd): (153, "ClassPositionsMap"), - ("read_only_space", 0x07725): (154, "DebugInfoMap"), - ("read_only_space", 0x0774d): (156, "ErrorStackDataMap"), - ("read_only_space", 0x07775): (158, "FunctionTemplateRareDataMap"), - ("read_only_space", 0x0779d): (160, "InterpreterDataMap"), - ("read_only_space", 0x077c5): (161, "ModuleRequestMap"), - ("read_only_space", 0x077ed): (162, "PromiseCapabilityMap"), - ("read_only_space", 0x07815): (163, "PromiseOnStackMap"), - ("read_only_space", 0x0783d): (164, "PromiseReactionMap"), - ("read_only_space", 0x07865): (165, "PropertyDescriptorObjectMap"), - ("read_only_space", 0x0788d): (166, "PrototypeInfoMap"), - ("read_only_space", 0x078b5): (167, "RegExpBoilerplateDescriptionMap"), - ("read_only_space", 0x078dd): (168, "ScriptMap"), - ("read_only_space", 0x07905): (169, "ScriptOrModuleMap"), - ("read_only_space", 0x0792d): (170, "SourceTextModuleInfoEntryMap"), - ("read_only_space", 0x07955): (171, "StackFrameInfoMap"), - ("read_only_space", 0x0797d): (172, "TemplateObjectDescriptionMap"), - ("read_only_space", 0x079a5): (173, "Tuple2Map"), - ("read_only_space", 0x079cd): (174, "WasmExceptionTagMap"), - ("read_only_space", 0x079f5): (175, "WasmIndirectFunctionTableMap"), - ("read_only_space", 0x07a1d): (195, "SloppyArgumentsElementsMap"), - ("read_only_space", 0x07a45): (228, "DescriptorArrayMap"), - ("read_only_space", 0x07a6d): (217, "UncompiledDataWithoutPreparseDataMap"), - ("read_only_space", 0x07a95): (215, "UncompiledDataWithPreparseDataMap"), - ("read_only_space", 0x07abd): (218, "UncompiledDataWithoutPreparseDataWithJobMap"), - ("read_only_space", 0x07ae5): (216, "UncompiledDataWithPreparseDataAndJobMap"), - ("read_only_space", 0x07b0d): (249, "OnHeapBasicBlockProfilerDataMap"), - ("read_only_space", 0x07b35): (196, "TurbofanBitsetTypeMap"), - ("read_only_space", 0x07b5d): (200, "TurbofanUnionTypeMap"), - ("read_only_space", 0x07b85): (199, "TurbofanRangeTypeMap"), - ("read_only_space", 0x07bad): (197, "TurbofanHeapConstantTypeMap"), - ("read_only_space", 0x07bd5): (198, "TurbofanOtherNumberConstantTypeMap"), - ("read_only_space", 0x07bfd): (245, "InternalClassMap"), - ("read_only_space", 0x07c25): (256, "SmiPairMap"), - ("read_only_space", 0x07c4d): (255, "SmiBoxMap"), - ("read_only_space", 0x07c75): (201, "ExportedSubClassBaseMap"), - ("read_only_space", 0x07c9d): (202, "ExportedSubClassMap"), - ("read_only_space", 0x07cc5): (226, "AbstractInternalClassSubclass1Map"), - ("read_only_space", 0x07ced): (227, "AbstractInternalClassSubclass2Map"), - ("read_only_space", 0x07d15): (194, "InternalClassWithSmiElementsMap"), - ("read_only_space", 0x07d3d): (246, "InternalClassWithStructElementsMap"), - ("read_only_space", 0x07d65): (203, "ExportedSubClass2Map"), - ("read_only_space", 0x07d8d): (257, "SortStateMap"), - ("read_only_space", 0x07db5): (263, "WasmStringViewIterMap"), - ("read_only_space", 0x07ddd): (145, "AllocationSiteWithWeakNextMap"), - ("read_only_space", 0x07e05): (145, "AllocationSiteWithoutWeakNextMap"), - ("read_only_space", 0x07ed1): (137, "LoadHandler1Map"), - ("read_only_space", 0x07ef9): (137, "LoadHandler2Map"), - ("read_only_space", 0x07f21): (137, "LoadHandler3Map"), - ("read_only_space", 0x07f49): (138, "StoreHandler0Map"), - ("read_only_space", 0x07f71): (138, "StoreHandler1Map"), - ("read_only_space", 0x07f99): (138, "StoreHandler2Map"), - ("read_only_space", 0x07fc1): (138, "StoreHandler3Map"), - ("map_space", 0x02139): (2115, "ExternalMap"), - ("map_space", 0x02161): (2119, "JSMessageObjectMap"), + ("read_only_space", 0x02141): (247, "MetaMap"), + ("read_only_space", 0x02169): (131, "NullMap"), + ("read_only_space", 0x02191): (229, "StrongDescriptorArrayMap"), + ("read_only_space", 0x021b9): (265, "WeakArrayListMap"), + ("read_only_space", 0x021fd): (155, "EnumCacheMap"), + ("read_only_space", 0x02231): (176, "FixedArrayMap"), + ("read_only_space", 0x0227d): (8, "OneByteInternalizedStringMap"), + ("read_only_space", 0x022c9): (244, "FreeSpaceMap"), + ("read_only_space", 0x022f1): (243, "OnePointerFillerMap"), + ("read_only_space", 0x02319): (243, "TwoPointerFillerMap"), + ("read_only_space", 0x02341): (131, "UninitializedMap"), + ("read_only_space", 0x023b9): (131, "UndefinedMap"), + ("read_only_space", 0x023fd): (130, "HeapNumberMap"), + ("read_only_space", 0x02431): (131, "TheHoleMap"), + ("read_only_space", 0x02491): (131, "BooleanMap"), + ("read_only_space", 0x02535): (191, "ByteArrayMap"), + ("read_only_space", 0x0255d): (176, "FixedCOWArrayMap"), + ("read_only_space", 0x02585): (177, "HashTableMap"), + ("read_only_space", 0x025ad): (128, "SymbolMap"), + ("read_only_space", 0x025d5): (40, "OneByteStringMap"), + ("read_only_space", 0x025fd): (253, "ScopeInfoMap"), + ("read_only_space", 0x02625): (254, "SharedFunctionInfoMap"), + ("read_only_space", 0x0264d): (237, "CodeMap"), + ("read_only_space", 0x02675): (236, "CellMap"), + ("read_only_space", 0x0269d): (252, "GlobalPropertyCellMap"), + ("read_only_space", 0x026c5): (204, "ForeignMap"), + ("read_only_space", 0x026ed): (233, "TransitionArrayMap"), + ("read_only_space", 0x02715): (45, "ThinOneByteStringMap"), + ("read_only_space", 0x0273d): (242, "FeedbackVectorMap"), + ("read_only_space", 0x02775): (131, "ArgumentsMarkerMap"), + ("read_only_space", 0x027d5): (131, "ExceptionMap"), + ("read_only_space", 0x02831): (131, "TerminationExceptionMap"), + ("read_only_space", 0x02899): (131, "OptimizedOutMap"), + ("read_only_space", 0x028f9): (131, "StaleRegisterMap"), + ("read_only_space", 0x02959): (190, "ScriptContextTableMap"), + ("read_only_space", 0x02981): (188, "ClosureFeedbackCellArrayMap"), + ("read_only_space", 0x029a9): (241, "FeedbackMetadataArrayMap"), + ("read_only_space", 0x029d1): (176, "ArrayListMap"), + ("read_only_space", 0x029f9): (129, "BigIntMap"), + ("read_only_space", 0x02a21): (189, "ObjectBoilerplateDescriptionMap"), + ("read_only_space", 0x02a49): (192, "BytecodeArrayMap"), + ("read_only_space", 0x02a71): (238, "CodeDataContainerMap"), + ("read_only_space", 0x02a99): (239, "CoverageInfoMap"), + ("read_only_space", 0x02ac1): (193, "FixedDoubleArrayMap"), + ("read_only_space", 0x02ae9): (179, "GlobalDictionaryMap"), + ("read_only_space", 0x02b11): (157, "ManyClosuresCellMap"), + ("read_only_space", 0x02b39): (248, "MegaDomHandlerMap"), + ("read_only_space", 0x02b61): (176, "ModuleInfoMap"), + ("read_only_space", 0x02b89): (180, "NameDictionaryMap"), + ("read_only_space", 0x02bb1): (157, "NoClosuresCellMap"), + ("read_only_space", 0x02bd9): (182, "NumberDictionaryMap"), + ("read_only_space", 0x02c01): (157, "OneClosureCellMap"), + ("read_only_space", 0x02c29): (183, "OrderedHashMapMap"), + ("read_only_space", 0x02c51): (184, "OrderedHashSetMap"), + ("read_only_space", 0x02c79): (181, "NameToIndexHashTableMap"), + ("read_only_space", 0x02ca1): (186, "RegisteredSymbolTableMap"), + ("read_only_space", 0x02cc9): (185, "OrderedNameDictionaryMap"), + ("read_only_space", 0x02cf1): (250, "PreparseDataMap"), + ("read_only_space", 0x02d19): (251, "PropertyArrayMap"), + ("read_only_space", 0x02d41): (234, "AccessorInfoMap"), + ("read_only_space", 0x02d69): (235, "SideEffectCallHandlerInfoMap"), + ("read_only_space", 0x02d91): (235, "SideEffectFreeCallHandlerInfoMap"), + ("read_only_space", 0x02db9): (235, "NextCallSideEffectFreeCallHandlerInfoMap"), + ("read_only_space", 0x02de1): (187, "SimpleNumberDictionaryMap"), + ("read_only_space", 0x02e09): (223, "SmallOrderedHashMapMap"), + ("read_only_space", 0x02e31): (224, "SmallOrderedHashSetMap"), + ("read_only_space", 0x02e59): (225, "SmallOrderedNameDictionaryMap"), + ("read_only_space", 0x02e81): (230, "SourceTextModuleMap"), + ("read_only_space", 0x02ea9): (258, "SwissNameDictionaryMap"), + ("read_only_space", 0x02ed1): (231, "SyntheticModuleMap"), + ("read_only_space", 0x02ef9): (259, "WasmApiFunctionRefMap"), + ("read_only_space", 0x02f21): (220, "WasmCapiFunctionDataMap"), + ("read_only_space", 0x02f49): (221, "WasmExportedFunctionDataMap"), + ("read_only_space", 0x02f71): (261, "WasmInternalFunctionMap"), + ("read_only_space", 0x02f99): (222, "WasmJSFunctionDataMap"), + ("read_only_space", 0x02fc1): (262, "WasmResumeDataMap"), + ("read_only_space", 0x02fe9): (264, "WasmTypeInfoMap"), + ("read_only_space", 0x03011): (260, "WasmContinuationObjectMap"), + ("read_only_space", 0x03039): (232, "WeakFixedArrayMap"), + ("read_only_space", 0x03061): (178, "EphemeronHashTableMap"), + ("read_only_space", 0x03089): (240, "EmbedderDataArrayMap"), + ("read_only_space", 0x030b1): (266, "WeakCellMap"), + ("read_only_space", 0x030d9): (32, "StringMap"), + ("read_only_space", 0x03101): (41, "ConsOneByteStringMap"), + ("read_only_space", 0x03129): (33, "ConsStringMap"), + ("read_only_space", 0x03151): (37, "ThinStringMap"), + ("read_only_space", 0x03179): (35, "SlicedStringMap"), + ("read_only_space", 0x031a1): (43, "SlicedOneByteStringMap"), + ("read_only_space", 0x031c9): (34, "ExternalStringMap"), + ("read_only_space", 0x031f1): (42, "ExternalOneByteStringMap"), + ("read_only_space", 0x03219): (50, "UncachedExternalStringMap"), + ("read_only_space", 0x03241): (0, "InternalizedStringMap"), + ("read_only_space", 0x03269): (2, "ExternalInternalizedStringMap"), + ("read_only_space", 0x03291): (10, "ExternalOneByteInternalizedStringMap"), + ("read_only_space", 0x032b9): (18, "UncachedExternalInternalizedStringMap"), + ("read_only_space", 0x032e1): (26, "UncachedExternalOneByteInternalizedStringMap"), + ("read_only_space", 0x03309): (58, "UncachedExternalOneByteStringMap"), + ("read_only_space", 0x03331): (104, "SharedOneByteStringMap"), + ("read_only_space", 0x03359): (96, "SharedStringMap"), + ("read_only_space", 0x03381): (106, "SharedExternalOneByteStringMap"), + ("read_only_space", 0x033a9): (98, "SharedExternalStringMap"), + ("read_only_space", 0x033d1): (122, "SharedUncachedExternalOneByteStringMap"), + ("read_only_space", 0x033f9): (114, "SharedUncachedExternalStringMap"), + ("read_only_space", 0x03421): (109, "SharedThinOneByteStringMap"), + ("read_only_space", 0x03449): (101, "SharedThinStringMap"), + ("read_only_space", 0x03471): (131, "SelfReferenceMarkerMap"), + ("read_only_space", 0x03499): (131, "BasicBlockCountersMarkerMap"), + ("read_only_space", 0x034dd): (146, "ArrayBoilerplateDescriptionMap"), + ("read_only_space", 0x035dd): (159, "InterceptorInfoMap"), + ("read_only_space", 0x075c9): (132, "PromiseFulfillReactionJobTaskMap"), + ("read_only_space", 0x075f1): (133, "PromiseRejectReactionJobTaskMap"), + ("read_only_space", 0x07619): (134, "CallableTaskMap"), + ("read_only_space", 0x07641): (135, "CallbackTaskMap"), + ("read_only_space", 0x07669): (136, "PromiseResolveThenableJobTaskMap"), + ("read_only_space", 0x07691): (139, "FunctionTemplateInfoMap"), + ("read_only_space", 0x076b9): (140, "ObjectTemplateInfoMap"), + ("read_only_space", 0x076e1): (141, "AccessCheckInfoMap"), + ("read_only_space", 0x07709): (142, "AccessorPairMap"), + ("read_only_space", 0x07731): (143, "AliasedArgumentsEntryMap"), + ("read_only_space", 0x07759): (144, "AllocationMementoMap"), + ("read_only_space", 0x07781): (147, "AsmWasmDataMap"), + ("read_only_space", 0x077a9): (148, "AsyncGeneratorRequestMap"), + ("read_only_space", 0x077d1): (149, "BreakPointMap"), + ("read_only_space", 0x077f9): (150, "BreakPointInfoMap"), + ("read_only_space", 0x07821): (151, "CachedTemplateObjectMap"), + ("read_only_space", 0x07849): (152, "CallSiteInfoMap"), + ("read_only_space", 0x07871): (153, "ClassPositionsMap"), + ("read_only_space", 0x07899): (154, "DebugInfoMap"), + ("read_only_space", 0x078c1): (156, "ErrorStackDataMap"), + ("read_only_space", 0x078e9): (158, "FunctionTemplateRareDataMap"), + ("read_only_space", 0x07911): (160, "InterpreterDataMap"), + ("read_only_space", 0x07939): (161, "ModuleRequestMap"), + ("read_only_space", 0x07961): (162, "PromiseCapabilityMap"), + ("read_only_space", 0x07989): (163, "PromiseOnStackMap"), + ("read_only_space", 0x079b1): (164, "PromiseReactionMap"), + ("read_only_space", 0x079d9): (165, "PropertyDescriptorObjectMap"), + ("read_only_space", 0x07a01): (166, "PrototypeInfoMap"), + ("read_only_space", 0x07a29): (167, "RegExpBoilerplateDescriptionMap"), + ("read_only_space", 0x07a51): (168, "ScriptMap"), + ("read_only_space", 0x07a79): (169, "ScriptOrModuleMap"), + ("read_only_space", 0x07aa1): (170, "SourceTextModuleInfoEntryMap"), + ("read_only_space", 0x07ac9): (171, "StackFrameInfoMap"), + ("read_only_space", 0x07af1): (172, "TemplateObjectDescriptionMap"), + ("read_only_space", 0x07b19): (173, "Tuple2Map"), + ("read_only_space", 0x07b41): (174, "WasmExceptionTagMap"), + ("read_only_space", 0x07b69): (175, "WasmIndirectFunctionTableMap"), + ("read_only_space", 0x07b91): (195, "SloppyArgumentsElementsMap"), + ("read_only_space", 0x07bb9): (228, "DescriptorArrayMap"), + ("read_only_space", 0x07be1): (217, "UncompiledDataWithoutPreparseDataMap"), + ("read_only_space", 0x07c09): (215, "UncompiledDataWithPreparseDataMap"), + ("read_only_space", 0x07c31): (218, "UncompiledDataWithoutPreparseDataWithJobMap"), + ("read_only_space", 0x07c59): (216, "UncompiledDataWithPreparseDataAndJobMap"), + ("read_only_space", 0x07c81): (249, "OnHeapBasicBlockProfilerDataMap"), + ("read_only_space", 0x07ca9): (196, "TurbofanBitsetTypeMap"), + ("read_only_space", 0x07cd1): (200, "TurbofanUnionTypeMap"), + ("read_only_space", 0x07cf9): (199, "TurbofanRangeTypeMap"), + ("read_only_space", 0x07d21): (197, "TurbofanHeapConstantTypeMap"), + ("read_only_space", 0x07d49): (198, "TurbofanOtherNumberConstantTypeMap"), + ("read_only_space", 0x07d71): (245, "InternalClassMap"), + ("read_only_space", 0x07d99): (256, "SmiPairMap"), + ("read_only_space", 0x07dc1): (255, "SmiBoxMap"), + ("read_only_space", 0x07de9): (201, "ExportedSubClassBaseMap"), + ("read_only_space", 0x07e11): (202, "ExportedSubClassMap"), + ("read_only_space", 0x07e39): (226, "AbstractInternalClassSubclass1Map"), + ("read_only_space", 0x07e61): (227, "AbstractInternalClassSubclass2Map"), + ("read_only_space", 0x07e89): (194, "InternalClassWithSmiElementsMap"), + ("read_only_space", 0x07eb1): (246, "InternalClassWithStructElementsMap"), + ("read_only_space", 0x07ed9): (203, "ExportedSubClass2Map"), + ("read_only_space", 0x07f01): (257, "SortStateMap"), + ("read_only_space", 0x07f29): (263, "WasmStringViewIterMap"), + ("read_only_space", 0x07f51): (145, "AllocationSiteWithWeakNextMap"), + ("read_only_space", 0x07f79): (145, "AllocationSiteWithoutWeakNextMap"), + ("read_only_space", 0x08045): (137, "LoadHandler1Map"), + ("read_only_space", 0x0806d): (137, "LoadHandler2Map"), + ("read_only_space", 0x08095): (137, "LoadHandler3Map"), + ("read_only_space", 0x080bd): (138, "StoreHandler0Map"), + ("read_only_space", 0x080e5): (138, "StoreHandler1Map"), + ("read_only_space", 0x0810d): (138, "StoreHandler2Map"), + ("read_only_space", 0x08135): (138, "StoreHandler3Map"), + ("old_space", 0x043a5): (2116, "ExternalMap"), + ("old_space", 0x043d5): (2120, "JSMessageObjectMap"), } # List of known V8 objects. KNOWN_OBJECTS = { - ("read_only_space", 0x021d9): "EmptyWeakArrayList", - ("read_only_space", 0x021e5): "EmptyDescriptorArray", - ("read_only_space", 0x0221d): "EmptyEnumCache", - ("read_only_space", 0x02251): "EmptyFixedArray", - ("read_only_space", 0x02259): "NullValue", - ("read_only_space", 0x02361): "UninitializedValue", - ("read_only_space", 0x023d9): "UndefinedValue", - ("read_only_space", 0x0241d): "NanValue", - ("read_only_space", 0x02451): "TheHoleValue", - ("read_only_space", 0x0247d): "HoleNanValue", - ("read_only_space", 0x024b1): "TrueValue", - ("read_only_space", 0x024f1): "FalseValue", - ("read_only_space", 0x02521): "empty_string", - ("read_only_space", 0x0275d): "EmptyScopeInfo", - ("read_only_space", 0x02795): "ArgumentsMarker", - ("read_only_space", 0x027f5): "Exception", - ("read_only_space", 0x02851): "TerminationException", - ("read_only_space", 0x028b9): "OptimizedOut", - ("read_only_space", 0x02919): "StaleRegister", - ("read_only_space", 0x034b9): "EmptyPropertyArray", - ("read_only_space", 0x034c1): "EmptyByteArray", - ("read_only_space", 0x034c9): "EmptyObjectBoilerplateDescription", - ("read_only_space", 0x034fd): "EmptyArrayBoilerplateDescription", - ("read_only_space", 0x03509): "EmptyClosureFeedbackCellArray", - ("read_only_space", 0x03511): "EmptySlowElementDictionary", - ("read_only_space", 0x03535): "EmptyOrderedHashMap", - ("read_only_space", 0x03549): "EmptyOrderedHashSet", - ("read_only_space", 0x0355d): "EmptyFeedbackMetadata", - ("read_only_space", 0x03569): "EmptyPropertyDictionary", - ("read_only_space", 0x03591): "EmptyOrderedPropertyDictionary", - ("read_only_space", 0x035a9): "EmptySwissPropertyDictionary", - ("read_only_space", 0x035fd): "NoOpInterceptorInfo", - ("read_only_space", 0x03625): "EmptyArrayList", - ("read_only_space", 0x03631): "EmptyWeakFixedArray", - ("read_only_space", 0x03639): "InfinityValue", - ("read_only_space", 0x03645): "MinusZeroValue", - ("read_only_space", 0x03651): "MinusInfinityValue", - ("read_only_space", 0x0365d): "SingleCharacterStringTable", - ("read_only_space", 0x04a65): "SelfReferenceMarker", - ("read_only_space", 0x04aa5): "BasicBlockCountersMarker", - ("read_only_space", 0x04ae9): "OffHeapTrampolineRelocationInfo", - ("read_only_space", 0x04af5): "GlobalThisBindingScopeInfo", - ("read_only_space", 0x04b25): "EmptyFunctionScopeInfo", - ("read_only_space", 0x04b49): "NativeScopeInfo", - ("read_only_space", 0x04b61): "HashSeed", - ("old_space", 0x04235): "ArgumentsIteratorAccessor", - ("old_space", 0x0424d): "ArrayLengthAccessor", - ("old_space", 0x04265): "BoundFunctionLengthAccessor", - ("old_space", 0x0427d): "BoundFunctionNameAccessor", - ("old_space", 0x04295): "ErrorStackAccessor", - ("old_space", 0x042ad): "FunctionArgumentsAccessor", - ("old_space", 0x042c5): "FunctionCallerAccessor", - ("old_space", 0x042dd): "FunctionNameAccessor", - ("old_space", 0x042f5): "FunctionLengthAccessor", - ("old_space", 0x0430d): "FunctionPrototypeAccessor", - ("old_space", 0x04325): "SharedArrayLengthAccessor", - ("old_space", 0x0433d): "StringLengthAccessor", - ("old_space", 0x04355): "ValueUnavailableAccessor", - ("old_space", 0x0436d): "WrappedFunctionLengthAccessor", - ("old_space", 0x04385): "WrappedFunctionNameAccessor", - ("old_space", 0x0439d): "InvalidPrototypeValidityCell", - ("old_space", 0x043a5): "EmptyScript", - ("old_space", 0x043e9): "ManyClosuresCell", - ("old_space", 0x043f5): "ArrayConstructorProtector", - ("old_space", 0x04409): "NoElementsProtector", - ("old_space", 0x0441d): "MegaDOMProtector", - ("old_space", 0x04431): "IsConcatSpreadableProtector", - ("old_space", 0x04445): "ArraySpeciesProtector", - ("old_space", 0x04459): "TypedArraySpeciesProtector", - ("old_space", 0x0446d): "PromiseSpeciesProtector", - ("old_space", 0x04481): "RegExpSpeciesProtector", - ("old_space", 0x04495): "StringLengthProtector", - ("old_space", 0x044a9): "ArrayIteratorProtector", - ("old_space", 0x044bd): "ArrayBufferDetachingProtector", - ("old_space", 0x044d1): "PromiseHookProtector", - ("old_space", 0x044e5): "PromiseResolveProtector", - ("old_space", 0x044f9): "MapIteratorProtector", - ("old_space", 0x0450d): "PromiseThenProtector", - ("old_space", 0x04521): "SetIteratorProtector", - ("old_space", 0x04535): "StringIteratorProtector", - ("old_space", 0x04549): "StringSplitCache", - ("old_space", 0x04951): "RegExpMultipleCache", - ("old_space", 0x04d59): "BuiltinsConstantsTable", - ("old_space", 0x051ad): "AsyncFunctionAwaitRejectSharedFun", - ("old_space", 0x051d1): "AsyncFunctionAwaitResolveSharedFun", - ("old_space", 0x051f5): "AsyncGeneratorAwaitRejectSharedFun", - ("old_space", 0x05219): "AsyncGeneratorAwaitResolveSharedFun", - ("old_space", 0x0523d): "AsyncGeneratorYieldResolveSharedFun", - ("old_space", 0x05261): "AsyncGeneratorReturnResolveSharedFun", - ("old_space", 0x05285): "AsyncGeneratorReturnClosedRejectSharedFun", - ("old_space", 0x052a9): "AsyncGeneratorReturnClosedResolveSharedFun", - ("old_space", 0x052cd): "AsyncIteratorValueUnwrapSharedFun", - ("old_space", 0x052f1): "PromiseAllResolveElementSharedFun", - ("old_space", 0x05315): "PromiseAllSettledResolveElementSharedFun", - ("old_space", 0x05339): "PromiseAllSettledRejectElementSharedFun", - ("old_space", 0x0535d): "PromiseAnyRejectElementSharedFun", - ("old_space", 0x05381): "PromiseCapabilityDefaultRejectSharedFun", - ("old_space", 0x053a5): "PromiseCapabilityDefaultResolveSharedFun", - ("old_space", 0x053c9): "PromiseCatchFinallySharedFun", - ("old_space", 0x053ed): "PromiseGetCapabilitiesExecutorSharedFun", - ("old_space", 0x05411): "PromiseThenFinallySharedFun", - ("old_space", 0x05435): "PromiseThrowerFinallySharedFun", - ("old_space", 0x05459): "PromiseValueThunkFinallySharedFun", - ("old_space", 0x0547d): "ProxyRevokeSharedFun", - ("old_space", 0x054a1): "ShadowRealmImportValueFulfilledSFI", - ("old_space", 0x054c5): "SourceTextModuleExecuteAsyncModuleFulfilledSFI", - ("old_space", 0x054e9): "SourceTextModuleExecuteAsyncModuleRejectedSFI", + ("read_only_space", 0x021e1): "EmptyWeakArrayList", + ("read_only_space", 0x021ed): "EmptyDescriptorArray", + ("read_only_space", 0x02225): "EmptyEnumCache", + ("read_only_space", 0x02259): "EmptyFixedArray", + ("read_only_space", 0x02261): "NullValue", + ("read_only_space", 0x02369): "UninitializedValue", + ("read_only_space", 0x023e1): "UndefinedValue", + ("read_only_space", 0x02425): "NanValue", + ("read_only_space", 0x02459): "TheHoleValue", + ("read_only_space", 0x02485): "HoleNanValue", + ("read_only_space", 0x024b9): "TrueValue", + ("read_only_space", 0x024f9): "FalseValue", + ("read_only_space", 0x02529): "empty_string", + ("read_only_space", 0x02765): "EmptyScopeInfo", + ("read_only_space", 0x0279d): "ArgumentsMarker", + ("read_only_space", 0x027fd): "Exception", + ("read_only_space", 0x02859): "TerminationException", + ("read_only_space", 0x028c1): "OptimizedOut", + ("read_only_space", 0x02921): "StaleRegister", + ("read_only_space", 0x034c1): "EmptyPropertyArray", + ("read_only_space", 0x034c9): "EmptyByteArray", + ("read_only_space", 0x034d1): "EmptyObjectBoilerplateDescription", + ("read_only_space", 0x03505): "EmptyArrayBoilerplateDescription", + ("read_only_space", 0x03511): "EmptyClosureFeedbackCellArray", + ("read_only_space", 0x03519): "EmptySlowElementDictionary", + ("read_only_space", 0x0353d): "EmptyOrderedHashMap", + ("read_only_space", 0x03551): "EmptyOrderedHashSet", + ("read_only_space", 0x03565): "EmptyFeedbackMetadata", + ("read_only_space", 0x03571): "EmptyPropertyDictionary", + ("read_only_space", 0x03599): "EmptyOrderedPropertyDictionary", + ("read_only_space", 0x035b1): "EmptySwissPropertyDictionary", + ("read_only_space", 0x03605): "NoOpInterceptorInfo", + ("read_only_space", 0x0362d): "EmptyArrayList", + ("read_only_space", 0x03639): "EmptyWeakFixedArray", + ("read_only_space", 0x03641): "InfinityValue", + ("read_only_space", 0x0364d): "MinusZeroValue", + ("read_only_space", 0x03659): "MinusInfinityValue", + ("read_only_space", 0x03665): "SingleCharacterStringTable", + ("read_only_space", 0x04a6d): "SelfReferenceMarker", + ("read_only_space", 0x04aad): "BasicBlockCountersMarker", + ("read_only_space", 0x04af1): "OffHeapTrampolineRelocationInfo", + ("read_only_space", 0x04afd): "GlobalThisBindingScopeInfo", + ("read_only_space", 0x04b2d): "EmptyFunctionScopeInfo", + ("read_only_space", 0x04b51): "NativeScopeInfo", + ("read_only_space", 0x04b69): "HashSeed", + ("old_space", 0x0423d): "ArgumentsIteratorAccessor", + ("old_space", 0x04255): "ArrayLengthAccessor", + ("old_space", 0x0426d): "BoundFunctionLengthAccessor", + ("old_space", 0x04285): "BoundFunctionNameAccessor", + ("old_space", 0x0429d): "ErrorStackAccessor", + ("old_space", 0x042b5): "FunctionArgumentsAccessor", + ("old_space", 0x042cd): "FunctionCallerAccessor", + ("old_space", 0x042e5): "FunctionNameAccessor", + ("old_space", 0x042fd): "FunctionLengthAccessor", + ("old_space", 0x04315): "FunctionPrototypeAccessor", + ("old_space", 0x0432d): "SharedArrayLengthAccessor", + ("old_space", 0x04345): "StringLengthAccessor", + ("old_space", 0x0435d): "ValueUnavailableAccessor", + ("old_space", 0x04375): "WrappedFunctionLengthAccessor", + ("old_space", 0x0438d): "WrappedFunctionNameAccessor", + ("old_space", 0x043a5): "ExternalMap", + ("old_space", 0x043cd): "InvalidPrototypeValidityCell", + ("old_space", 0x043d5): "JSMessageObjectMap", + ("old_space", 0x043fd): "EmptyScript", + ("old_space", 0x04441): "ManyClosuresCell", + ("old_space", 0x0444d): "ArrayConstructorProtector", + ("old_space", 0x04461): "NoElementsProtector", + ("old_space", 0x04475): "MegaDOMProtector", + ("old_space", 0x04489): "IsConcatSpreadableProtector", + ("old_space", 0x0449d): "ArraySpeciesProtector", + ("old_space", 0x044b1): "TypedArraySpeciesProtector", + ("old_space", 0x044c5): "PromiseSpeciesProtector", + ("old_space", 0x044d9): "RegExpSpeciesProtector", + ("old_space", 0x044ed): "StringLengthProtector", + ("old_space", 0x04501): "ArrayIteratorProtector", + ("old_space", 0x04515): "ArrayBufferDetachingProtector", + ("old_space", 0x04529): "PromiseHookProtector", + ("old_space", 0x0453d): "PromiseResolveProtector", + ("old_space", 0x04551): "MapIteratorProtector", + ("old_space", 0x04565): "PromiseThenProtector", + ("old_space", 0x04579): "SetIteratorProtector", + ("old_space", 0x0458d): "StringIteratorProtector", + ("old_space", 0x045a1): "StringSplitCache", + ("old_space", 0x049a9): "RegExpMultipleCache", + ("old_space", 0x04db1): "BuiltinsConstantsTable", + ("old_space", 0x05211): "AsyncFunctionAwaitRejectSharedFun", + ("old_space", 0x05235): "AsyncFunctionAwaitResolveSharedFun", + ("old_space", 0x05259): "AsyncGeneratorAwaitRejectSharedFun", + ("old_space", 0x0527d): "AsyncGeneratorAwaitResolveSharedFun", + ("old_space", 0x052a1): "AsyncGeneratorYieldWithAwaitResolveSharedFun", + ("old_space", 0x052c5): "AsyncGeneratorReturnResolveSharedFun", + ("old_space", 0x052e9): "AsyncGeneratorReturnClosedRejectSharedFun", + ("old_space", 0x0530d): "AsyncGeneratorReturnClosedResolveSharedFun", + ("old_space", 0x05331): "AsyncIteratorValueUnwrapSharedFun", + ("old_space", 0x05355): "PromiseAllResolveElementSharedFun", + ("old_space", 0x05379): "PromiseAllSettledResolveElementSharedFun", + ("old_space", 0x0539d): "PromiseAllSettledRejectElementSharedFun", + ("old_space", 0x053c1): "PromiseAnyRejectElementSharedFun", + ("old_space", 0x053e5): "PromiseCapabilityDefaultRejectSharedFun", + ("old_space", 0x05409): "PromiseCapabilityDefaultResolveSharedFun", + ("old_space", 0x0542d): "PromiseCatchFinallySharedFun", + ("old_space", 0x05451): "PromiseGetCapabilitiesExecutorSharedFun", + ("old_space", 0x05475): "PromiseThenFinallySharedFun", + ("old_space", 0x05499): "PromiseThrowerFinallySharedFun", + ("old_space", 0x054bd): "PromiseValueThunkFinallySharedFun", + ("old_space", 0x054e1): "ProxyRevokeSharedFun", + ("old_space", 0x05505): "ShadowRealmImportValueFulfilledSFI", + ("old_space", 0x05529): "SourceTextModuleExecuteAsyncModuleFulfilledSFI", + ("old_space", 0x0554d): "SourceTextModuleExecuteAsyncModuleRejectedSFI", } # Lower 32 bits of first page addresses for various heap spaces. HEAP_FIRST_PAGES = { 0x000c0000: "old_space", - 0x00100000: "map_space", 0x00000000: "read_only_space", } diff --git a/deps/v8/tools/wasm/module-inspector.cc b/deps/v8/tools/wasm/module-inspector.cc index a31de1446ca76a..267542b24ed76d 100644 --- a/deps/v8/tools/wasm/module-inspector.cc +++ b/deps/v8/tools/wasm/module-inspector.cc @@ -9,7 +9,6 @@ #include "include/libplatform/libplatform.h" #include "include/v8-initialization.h" -#include "src/wasm/function-body-decoder-impl.h" #include "src/wasm/module-decoder-impl.h" #include "src/wasm/names-provider.h" #include "src/wasm/string-builder-multiline.h" @@ -235,7 +234,6 @@ class ExtendedFunctionDis : public FunctionBodyDisassembler { // Decode and print locals. uint32_t locals_length; - InitializeLocalsFromSig(); DecodeLocals(pc_, &locals_length); if (failed()) { // TODO(jkummerow): Better error handling. diff --git a/deps/v8/tools/whitespace.txt b/deps/v8/tools/whitespace.txt index 60b58be703c2d3..34d64218ba44b4 100644 --- a/deps/v8/tools/whitespace.txt +++ b/deps/v8/tools/whitespace.txt @@ -7,7 +7,7 @@ A Smi balks into a war and says: The doubles heard this and started to unbox. The Smi looked at them when a crazy v8-autoroll account showed up... The autoroller bought a round of Himbeerbrause. Suddenly....... -The bartender starts to shake the bottles............................ +The bartender starts to shake the bottles............................. I can't add trailing whitespaces, so I'm adding this line............ I'm starting to think that just adding trailing whitespaces might not be bad. diff --git a/test/parallel/test-v8-stats.js b/test/parallel/test-v8-stats.js index 2eaa3c5b060914..d7360db03309eb 100644 --- a/test/parallel/test-v8-stats.js +++ b/test/parallel/test-v8-stats.js @@ -47,6 +47,8 @@ const expectedHeapSpaces = [ 'new_space', 'old_space', 'read_only_space', + 'shared_large_object_space', + 'shared_space', ]; const heapSpaceStatistics = v8.getHeapSpaceStatistics(); const actualHeapSpaceNames = heapSpaceStatistics.map((s) => s.space_name); diff --git a/test/v8-updates/test-trace-gc-flag.js b/test/v8-updates/test-trace-gc-flag.js index d84e359525127c..0fe9beb52a3503 100644 --- a/test/v8-updates/test-trace-gc-flag.js +++ b/test/v8-updates/test-trace-gc-flag.js @@ -24,7 +24,7 @@ const fixtures = require('../common/fixtures'); scavengeRegex, scavengeRegex, scavengeRegex, - /\bMark-sweep\b/, + /\bMark-Compact\b/, ]; lines.forEach((line, index) => { assert.match(line, expectedOutput[index]);