Skip to content

Commit d57c889

Browse files
rscaclements
authored andcommitted
runtime: wait to update arena_used until after mapping bitmap
This avoids a race with gcmarkwb_m that was leading to faults. Fixes #10212. Change-Id: I6fcf8d09f2692227063ce29152cb57366ea22487 Reviewed-on: https://go-review.googlesource.com/10816 Run-TryBot: Russ Cox <[email protected]> Reviewed-by: Austin Clements <[email protected]>
1 parent a788c91 commit d57c889

File tree

3 files changed

+24
-11
lines changed

3 files changed

+24
-11
lines changed

src/runtime/malloc.go

+6-6
Original file line numberDiff line numberDiff line change
@@ -418,9 +418,9 @@ func mHeap_SysAlloc(h *mheap, n uintptr) unsafe.Pointer {
418418
// Keep taking from our reservation.
419419
p := h.arena_used
420420
sysMap((unsafe.Pointer)(p), n, h.arena_reserved, &memstats.heap_sys)
421-
h.arena_used += n
422-
mHeap_MapBits(h)
423-
mHeap_MapSpans(h)
421+
mHeap_MapBits(h, p+n)
422+
mHeap_MapSpans(h, p+n)
423+
h.arena_used = p+n
424424
if raceenabled {
425425
racemapshadow((unsafe.Pointer)(p), n)
426426
}
@@ -454,12 +454,12 @@ func mHeap_SysAlloc(h *mheap, n uintptr) unsafe.Pointer {
454454
p_end := p + p_size
455455
p += -p & (_PageSize - 1)
456456
if uintptr(p)+n > uintptr(h.arena_used) {
457-
h.arena_used = p + n
457+
mHeap_MapBits(h, p+n)
458+
mHeap_MapSpans(h, p+n)
459+
h.arena_used = p+n
458460
if p_end > h.arena_end {
459461
h.arena_end = p_end
460462
}
461-
mHeap_MapBits(h)
462-
mHeap_MapSpans(h)
463463
if raceenabled {
464464
racemapshadow((unsafe.Pointer)(p), n)
465465
}

src/runtime/mbitmap.go

+7-2
Original file line numberDiff line numberDiff line change
@@ -118,15 +118,20 @@ func subtract1(p *byte) *byte {
118118

119119
// mHeap_MapBits is called each time arena_used is extended.
120120
// It maps any additional bitmap memory needed for the new arena memory.
121+
// It must be called with the expected new value of arena_used,
122+
// *before* h.arena_used has been updated.
123+
// Waiting to update arena_used until after the memory has been mapped
124+
// avoids faults when other threads try access the bitmap immediately
125+
// after observing the change to arena_used.
121126
//
122127
//go:nowritebarrier
123-
func mHeap_MapBits(h *mheap) {
128+
func mHeap_MapBits(h *mheap, arena_used uintptr) {
124129
// Caller has added extra mappings to the arena.
125130
// Add extra mappings of bitmap words as needed.
126131
// We allocate extra bitmap pieces in chunks of bitmapChunk.
127132
const bitmapChunk = 8192
128133

129-
n := (mheap_.arena_used - mheap_.arena_start) / heapBitmapScale
134+
n := (arena_used - mheap_.arena_start) / heapBitmapScale
130135
n = round(n, bitmapChunk)
131136
n = round(n, _PhysPageSize)
132137
if h.bitmap_mapped >= n {

src/runtime/mheap.go

+11-3
Original file line numberDiff line numberDiff line change
@@ -279,10 +279,18 @@ func mHeap_Init(h *mheap, spans_size uintptr) {
279279
sp.cap = int(spans_size / ptrSize)
280280
}
281281

282-
func mHeap_MapSpans(h *mheap) {
282+
// mHeap_MapSpans makes sure that the spans are mapped
283+
// up to the new value of arena_used.
284+
//
285+
// It must be called with the expected new value of arena_used,
286+
// *before* h.arena_used has been updated.
287+
// Waiting to update arena_used until after the memory has been mapped
288+
// avoids faults when other threads try access the bitmap immediately
289+
// after observing the change to arena_used.
290+
func mHeap_MapSpans(h *mheap, arena_used uintptr) {
283291
// Map spans array, PageSize at a time.
284-
n := uintptr(unsafe.Pointer(h.arena_used))
285-
n -= uintptr(unsafe.Pointer(h.arena_start))
292+
n := arena_used
293+
n -= h.arena_start
286294
n = n / _PageSize * ptrSize
287295
n = round(n, _PhysPageSize)
288296
if h.spans_mapped >= n {

0 commit comments

Comments
 (0)