Skip to content

Commit cf49ebb

Browse files
committed
deps: V8: cherry-pick 53784bdb8f01
Original commit message: [liftoff] Handle constant memory indexes specially This adds detection for constant memory indexes which can statically be proven to be in-bounds (because the effective offset is within the minimum memory size). In these cases, we can skip the bounds check and the out-of-line code for the trap-handler. This often saves 1-2% of code size. [email protected] Bug: v8:11802 Change-Id: I0ee094e6f1f5d132af1d6a8a7c539a4af6c3cb5e Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2919827 Commit-Queue: Clemens Backes <[email protected]> Reviewed-by: Andreas Haas <[email protected]> Cr-Commit-Position: refs/heads/master@{#74825} Refs: v8/v8@53784bd PR-URL: #39337 Reviewed-By: Matteo Collina <[email protected]> Reviewed-By: James M Snell <[email protected]>
1 parent 3d351b2 commit cf49ebb

File tree

2 files changed

+99
-43
lines changed

2 files changed

+99
-43
lines changed

Diff for: common.gypi

+1-1
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@
3636

3737
# Reset this number to 0 on major V8 upgrades.
3838
# Increment by one for each non-official patch applied to deps/v8.
39-
'v8_embedder_string': '-node.14',
39+
'v8_embedder_string': '-node.15',
4040

4141
##### V8 defaults for Node.js #####
4242

Diff for: deps/v8/src/wasm/baseline/liftoff-compiler.cc

+98-42
Original file line numberDiff line numberDiff line change
@@ -2767,33 +2767,73 @@ class LiftoffCompiler {
27672767
return index;
27682768
}
27692769

2770+
bool IndexStaticallyInBounds(const LiftoffAssembler::VarState& index_slot,
2771+
int access_size, uintptr_t* offset) {
2772+
if (!index_slot.is_const()) return false;
2773+
2774+
// Potentially zero extend index (which is a 32-bit constant).
2775+
const uintptr_t index = static_cast<uint32_t>(index_slot.i32_const());
2776+
const uintptr_t effective_offset = index + *offset;
2777+
2778+
if (effective_offset < index // overflow
2779+
|| !base::IsInBounds<uintptr_t>(effective_offset, access_size,
2780+
env_->min_memory_size)) {
2781+
return false;
2782+
}
2783+
2784+
*offset = effective_offset;
2785+
return true;
2786+
}
2787+
27702788
void LoadMem(FullDecoder* decoder, LoadType type,
27712789
const MemoryAccessImmediate<validate>& imm,
27722790
const Value& index_val, Value* result) {
27732791
ValueKind kind = type.value_type().kind();
2792+
RegClass rc = reg_class_for(kind);
27742793
if (!CheckSupportedType(decoder, kind, "load")) return;
2775-
LiftoffRegister full_index = __ PopToRegister();
2776-
Register index = BoundsCheckMem(decoder, type.size(), imm.offset,
2777-
full_index, {}, kDontForceCheck);
2778-
if (index == no_reg) return;
27792794

27802795
uintptr_t offset = imm.offset;
2781-
LiftoffRegList pinned = LiftoffRegList::ForRegs(index);
2782-
index = AddMemoryMasking(index, &offset, &pinned);
2783-
DEBUG_CODE_COMMENT("load from memory");
2784-
Register addr = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
2785-
LOAD_INSTANCE_FIELD(addr, MemoryStart, kSystemPointerSize, pinned);
2786-
RegClass rc = reg_class_for(kind);
2787-
LiftoffRegister value = pinned.set(__ GetUnusedRegister(rc, pinned));
2788-
uint32_t protected_load_pc = 0;
2789-
__ Load(value, addr, index, offset, type, pinned, &protected_load_pc, true);
2790-
if (env_->use_trap_handler) {
2791-
AddOutOfLineTrap(decoder, WasmCode::kThrowWasmTrapMemOutOfBounds,
2792-
protected_load_pc);
2796+
Register index = no_reg;
2797+
2798+
// Only look at the slot, do not pop it yet (will happen in PopToRegister
2799+
// below, if this is not a statically-in-bounds index).
2800+
auto& index_slot = __ cache_state()->stack_state.back();
2801+
if (IndexStaticallyInBounds(index_slot, type.size(), &offset)) {
2802+
__ cache_state()->stack_state.pop_back();
2803+
DEBUG_CODE_COMMENT("load from memory (constant offset)");
2804+
LiftoffRegList pinned;
2805+
Register mem = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
2806+
LOAD_INSTANCE_FIELD(mem, MemoryStart, kSystemPointerSize, pinned);
2807+
LiftoffRegister value = pinned.set(__ GetUnusedRegister(rc, pinned));
2808+
__ Load(value, mem, no_reg, offset, type, pinned, nullptr, true);
2809+
__ PushRegister(kind, value);
2810+
} else {
2811+
LiftoffRegister full_index = __ PopToRegister();
2812+
index = BoundsCheckMem(decoder, type.size(), offset, full_index, {},
2813+
kDontForceCheck);
2814+
if (index == no_reg) return;
2815+
2816+
DEBUG_CODE_COMMENT("load from memory");
2817+
LiftoffRegList pinned = LiftoffRegList::ForRegs(index);
2818+
index = AddMemoryMasking(index, &offset, &pinned);
2819+
2820+
// Load the memory start address only now to reduce register pressure
2821+
// (important on ia32).
2822+
Register mem = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
2823+
LOAD_INSTANCE_FIELD(mem, MemoryStart, kSystemPointerSize, pinned);
2824+
LiftoffRegister value = pinned.set(__ GetUnusedRegister(rc, pinned));
2825+
2826+
uint32_t protected_load_pc = 0;
2827+
__ Load(value, mem, index, offset, type, pinned, &protected_load_pc,
2828+
true);
2829+
if (env_->use_trap_handler) {
2830+
AddOutOfLineTrap(decoder, WasmCode::kThrowWasmTrapMemOutOfBounds,
2831+
protected_load_pc);
2832+
}
2833+
__ PushRegister(kind, value);
27932834
}
2794-
__ PushRegister(kind, value);
27952835

2796-
if (FLAG_trace_wasm_memory) {
2836+
if (V8_UNLIKELY(FLAG_trace_wasm_memory)) {
27972837
TraceMemoryOperation(false, type.mem_type().representation(), index,
27982838
offset, decoder->position());
27992839
}
@@ -2836,7 +2876,7 @@ class LiftoffCompiler {
28362876
}
28372877
__ PushRegister(kS128, value);
28382878

2839-
if (FLAG_trace_wasm_memory) {
2879+
if (V8_UNLIKELY(FLAG_trace_wasm_memory)) {
28402880
// Again load extend is different.
28412881
MachineRepresentation mem_rep =
28422882
transform == LoadTransformationKind::kExtend
@@ -2878,7 +2918,7 @@ class LiftoffCompiler {
28782918

28792919
__ PushRegister(kS128, result);
28802920

2881-
if (FLAG_trace_wasm_memory) {
2921+
if (V8_UNLIKELY(FLAG_trace_wasm_memory)) {
28822922
TraceMemoryOperation(false, type.mem_type().representation(), index,
28832923
offset, decoder->position());
28842924
}
@@ -2889,29 +2929,45 @@ class LiftoffCompiler {
28892929
const Value& index_val, const Value& value_val) {
28902930
ValueKind kind = type.value_type().kind();
28912931
if (!CheckSupportedType(decoder, kind, "store")) return;
2932+
28922933
LiftoffRegList pinned;
28932934
LiftoffRegister value = pinned.set(__ PopToRegister());
2894-
LiftoffRegister full_index = __ PopToRegister(pinned);
2895-
Register index = BoundsCheckMem(decoder, type.size(), imm.offset,
2896-
full_index, pinned, kDontForceCheck);
2897-
if (index == no_reg) return;
28982935

28992936
uintptr_t offset = imm.offset;
2900-
pinned.set(index);
2901-
index = AddMemoryMasking(index, &offset, &pinned);
2902-
DEBUG_CODE_COMMENT("store to memory");
2903-
Register addr = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
2904-
LOAD_INSTANCE_FIELD(addr, MemoryStart, kSystemPointerSize, pinned);
2905-
uint32_t protected_store_pc = 0;
2906-
LiftoffRegList outer_pinned;
2907-
if (FLAG_trace_wasm_memory) outer_pinned.set(index);
2908-
__ Store(addr, index, offset, value, type, outer_pinned,
2909-
&protected_store_pc, true);
2910-
if (env_->use_trap_handler) {
2911-
AddOutOfLineTrap(decoder, WasmCode::kThrowWasmTrapMemOutOfBounds,
2912-
protected_store_pc);
2937+
Register index = no_reg;
2938+
2939+
auto& index_slot = __ cache_state()->stack_state.back();
2940+
if (IndexStaticallyInBounds(index_slot, type.size(), &offset)) {
2941+
__ cache_state()->stack_state.pop_back();
2942+
DEBUG_CODE_COMMENT("store to memory (constant offset)");
2943+
Register mem = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
2944+
LOAD_INSTANCE_FIELD(mem, MemoryStart, kSystemPointerSize, pinned);
2945+
__ Store(mem, no_reg, offset, value, type, pinned, nullptr, true);
2946+
} else {
2947+
LiftoffRegister full_index = __ PopToRegister(pinned);
2948+
index = BoundsCheckMem(decoder, type.size(), imm.offset, full_index,
2949+
pinned, kDontForceCheck);
2950+
if (index == no_reg) return;
2951+
2952+
pinned.set(index);
2953+
index = AddMemoryMasking(index, &offset, &pinned);
2954+
DEBUG_CODE_COMMENT("store to memory");
2955+
uint32_t protected_store_pc = 0;
2956+
// Load the memory start address only now to reduce register pressure
2957+
// (important on ia32).
2958+
Register mem = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
2959+
LOAD_INSTANCE_FIELD(mem, MemoryStart, kSystemPointerSize, pinned);
2960+
LiftoffRegList outer_pinned;
2961+
if (V8_UNLIKELY(FLAG_trace_wasm_memory)) outer_pinned.set(index);
2962+
__ Store(mem, index, offset, value, type, outer_pinned,
2963+
&protected_store_pc, true);
2964+
if (env_->use_trap_handler) {
2965+
AddOutOfLineTrap(decoder, WasmCode::kThrowWasmTrapMemOutOfBounds,
2966+
protected_store_pc);
2967+
}
29132968
}
2914-
if (FLAG_trace_wasm_memory) {
2969+
2970+
if (V8_UNLIKELY(FLAG_trace_wasm_memory)) {
29152971
TraceMemoryOperation(true, type.mem_rep(), index, offset,
29162972
decoder->position());
29172973
}
@@ -2940,7 +2996,7 @@ class LiftoffCompiler {
29402996
AddOutOfLineTrap(decoder, WasmCode::kThrowWasmTrapMemOutOfBounds,
29412997
protected_store_pc);
29422998
}
2943-
if (FLAG_trace_wasm_memory) {
2999+
if (V8_UNLIKELY(FLAG_trace_wasm_memory)) {
29443000
TraceMemoryOperation(true, type.mem_rep(), index, offset,
29453001
decoder->position());
29463002
}
@@ -4156,9 +4212,9 @@ class LiftoffCompiler {
41564212
Register addr = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
41574213
LOAD_INSTANCE_FIELD(addr, MemoryStart, kSystemPointerSize, pinned);
41584214
LiftoffRegList outer_pinned;
4159-
if (FLAG_trace_wasm_memory) outer_pinned.set(index);
4215+
if (V8_UNLIKELY(FLAG_trace_wasm_memory)) outer_pinned.set(index);
41604216
__ AtomicStore(addr, index, offset, value, type, outer_pinned);
4161-
if (FLAG_trace_wasm_memory) {
4217+
if (V8_UNLIKELY(FLAG_trace_wasm_memory)) {
41624218
TraceMemoryOperation(true, type.mem_rep(), index, offset,
41634219
decoder->position());
41644220
}
@@ -4184,7 +4240,7 @@ class LiftoffCompiler {
41844240
__ AtomicLoad(value, addr, index, offset, type, pinned);
41854241
__ PushRegister(kind, value);
41864242

4187-
if (FLAG_trace_wasm_memory) {
4243+
if (V8_UNLIKELY(FLAG_trace_wasm_memory)) {
41884244
TraceMemoryOperation(false, type.mem_type().representation(), index,
41894245
offset, decoder->position());
41904246
}

0 commit comments

Comments
 (0)