diff --git a/src/runtime/mheap.go b/src/runtime/mheap.go index 610011863b..cd4634448c 100644 --- a/src/runtime/mheap.go +++ b/src/runtime/mheap.go @@ -100,13 +100,11 @@ type mheap struct { // accounting for current progress. If we could only adjust // the slope, it would create a discontinuity in debt if any // progress has already been made. - pagesInUse atomic.Uint64 // pages of spans in stats mSpanInUse - pagesSwept atomic.Uint64 // pages swept this cycle - pagesSweptBasis atomic.Uint64 // pagesSwept to use as the origin of the sweep ratio - sweepHeapLiveBasis uint64 // value of gcController.heapLive to use as the origin of sweep ratio; written with lock, read without - sweepPagesPerByte float64 // proportional sweep ratio; written with lock, read without - // TODO(austin): pagesInUse should be a uintptr, but the 386 - // compiler can't 8-byte align fields. + pagesInUse atomic.Uintptr // pages of spans in stats mSpanInUse + pagesSwept atomic.Uint64 // pages swept this cycle + pagesSweptBasis atomic.Uint64 // pagesSwept to use as the origin of the sweep ratio + sweepHeapLiveBasis uint64 // value of gcController.heapLive to use as the origin of sweep ratio; written with lock, read without + sweepPagesPerByte float64 // proportional sweep ratio; written with lock, read without // Page reclaimer state @@ -1379,7 +1377,7 @@ HaveSpan: atomic.Or8(&arena.pageInUse[pageIdx], pageMask) // Update related page sweeper stats. - h.pagesInUse.Add(int64(npages)) + h.pagesInUse.Add(npages) } // Make sure the newly allocated span will be observed @@ -1529,7 +1527,7 @@ func (h *mheap) freeSpanLocked(s *mspan, typ spanAllocType) { print("mheap.freeSpanLocked - span ", s, " ptr ", hex(s.base()), " allocCount ", s.allocCount, " sweepgen ", s.sweepgen, "/", h.sweepgen, "\n") throw("mheap.freeSpanLocked - invalid free") } - h.pagesInUse.Add(-int64(s.npages)) + h.pagesInUse.Add(-s.npages) // Clear in-use bit in arena page bitmap. arena, pageIdx, pageMask := pageIndexOf(s.base())