var runtime.memstats
267 uses
runtime (current package)
heapdump.go#L712: sysFree(unsafe.Pointer(&tmpbuf[0]), uintptr(len(tmpbuf)), &memstats.other_sys)
heapdump.go#L730: sysFree(unsafe.Pointer(&tmpbuf[0]), uintptr(len(tmpbuf)), &memstats.other_sys)
heapdump.go#L733: p := sysAlloc(n, &memstats.other_sys)
iface.go#L66: m = (*itab)(persistentalloc(unsafe.Sizeof(itab{})+uintptr(len(inter.mhdr)-1)*sys.PtrSize, 0, &memstats.other_sys))
malloc.go#L432: memstats.by_size[i].size = uint32(class_to_size[i])
malloc.go#L635: v = h.arena.alloc(n, heapArenaBytes, &memstats.heap_sys)
malloc.go#L729: sysMap(v, size, &memstats.heap_sys)
malloc.go#L748: r = (*heapArena)(h.heapArenaAlloc.alloc(unsafe.Sizeof(*r), sys.PtrSize, &memstats.gcMiscSys))
malloc.go#L750: r = (*heapArena)(persistentalloc(unsafe.Sizeof(*r), sys.PtrSize, &memstats.gcMiscSys))
malloc.go#L762: newArray := (*notInHeap)(persistentalloc(size, sys.PtrSize, &memstats.gcMiscSys))
malloc.go#L935: return persistentalloc(size, align, &memstats.other_sys)
malloc.go#L1355: persistent.base = (*notInHeap)(sysAlloc(persistentChunkSize, &memstats.other_sys))
malloc.go#L1380: if sysStat != &memstats.other_sys {
malloc.go#L1382: memstats.other_sys.add(-int64(size))
mbitmap.go#L1015: debugPtrmask.data = (*byte)(persistentalloc(1<<20, 1, &memstats.other_sys))
mbitmap.go#L1553: x := (*[1 << 30]byte)(persistentalloc(n+1, 1, &memstats.buckhash_sys))[:n+1]
mcache.go#L177: stats := memstats.heapStats.acquire()
mcache.go#L179: memstats.heapStats.release()
mcache.go#L183: atomic.Xadd64(&memstats.heap_live, int64(s.npages*pageSize)-int64(usedBytes))
mcache.go#L187: atomic.Xadd64(&memstats.tinyallocs, int64(c.tinyAllocs))
mcache.go#L193: atomic.Xadd64(&memstats.heap_scan, int64(c.scanAlloc))
mcache.go#L228: stats := memstats.heapStats.acquire()
mcache.go#L231: memstats.heapStats.release()
mcache.go#L234: atomic.Xadd64(&memstats.heap_live, int64(npages*pageSize))
mcache.go#L253: atomic.Xadd64(&memstats.heap_scan, int64(c.scanAlloc))
mcache.go#L262: stats := memstats.heapStats.acquire()
mcache.go#L264: memstats.heapStats.release()
mcache.go#L273: atomic.Xadd64(&memstats.heap_live, -int64(n)*int64(s.elemsize))
mcache.go#L283: atomic.Xadd64(&memstats.tinyallocs, int64(c.tinyAllocs))
mcheckmark.go#L46: bitmap = (*checkmarksMap)(persistentalloc(unsafe.Sizeof(*bitmap), 0, &memstats.gcMiscSys))
metrics.go#L133: hist.counts[0] = atomic.Load64(&memstats.gcPauseDist.underflow)
metrics.go#L134: for i := range memstats.gcPauseDist.counts {
metrics.go#L135: hist.counts[i+1] = atomic.Load64(&memstats.gcPauseDist.counts[i])
metrics.go#L328: memstats.heapStats.read(&a.heapStatsDelta)
metrics.go#L363: a.stacksSys = memstats.stacks_sys.load()
metrics.go#L364: a.buckHashSys = memstats.buckhash_sys.load()
metrics.go#L365: a.gcMiscSys = memstats.gcMiscSys.load()
metrics.go#L366: a.otherSys = memstats.other_sys.load()
metrics.go#L367: a.heapGoal = atomic.Load64(&memstats.next_gc)
metrics.go#L368: a.gcCyclesDone = uint64(memstats.numgc)
metrics.go#L369: a.gcCyclesForced = uint64(memstats.numforcedgc)
metrics.go#L373: a.mSpanSys = memstats.mspan_sys.load()
metrics.go#L375: a.mCacheSys = memstats.mcache_sys.load()
mfinal.go#L91: finc = (*finblock)(persistentalloc(_FinBlockSize, 0, &memstats.gcMiscSys))
mgc.go#L181: memstats.triggerRatio = 7 / 8.0
mgc.go#L186: memstats.heap_marked = uint64(float64(heapminimum) / (1 + memstats.triggerRatio))
mgc.go#L221: memstats.enablegc = true // now that runtime is initialized, GC is okay
mgc.go#L236: gcSetTriggerRatio(memstats.triggerRatio)
mgc.go#L448: if memstats.next_gc < memstats.heap_live+1024*1024 {
mgc.go#L449: memstats.next_gc = memstats.heap_live + 1024*1024
mgc.go#L494: " (scan ", memstats.heap_scan>>20, " MB in ",
mgc.go#L496: memstats.next_gc>>20, " MB)",
mgc.go#L530: live := atomic.Load64(&memstats.heap_live)
mgc.go#L531: scan := atomic.Load64(&memstats.heap_scan)
mgc.go#L536: heapGoal := int64(atomic.Load64(&memstats.next_gc))
mgc.go#L609: return memstats.triggerRatio
mgc.go#L629: actualGrowthRatio := float64(memstats.heap_live)/float64(memstats.heap_marked) - 1
mgc.go#L639: triggerError := goalGrowthRatio - memstats.triggerRatio - utilization/gcGoalUtilization*(actualGrowthRatio-memstats.triggerRatio)
mgc.go#L643: triggerRatio := memstats.triggerRatio + triggerGain*triggerError
mgc.go#L648: H_m_prev := memstats.heap_marked
mgc.go#L649: h_t := memstats.triggerRatio
mgc.go#L650: H_T := memstats.gc_trigger
mgc.go#L652: H_a := memstats.heap_live
mgc.go#L831: goal = memstats.heap_marked + memstats.heap_marked*uint64(gcpercent)/100
mgc.go#L869: memstats.triggerRatio = triggerRatio
mgc.go#L877: trigger = uint64(float64(memstats.heap_marked) * (1 + triggerRatio))
mgc.go#L886: sweepMin := atomic.Load64(&memstats.heap_live) + sweepMinHeapDistance
mgc.go#L895: print("runtime: next_gc=", memstats.next_gc, " heap_marked=", memstats.heap_marked, " heap_live=", memstats.heap_live, " initialHeapLive=", work.initialHeapLive, "triggerRatio=", triggerRatio, " minTrigger=", minTrigger, "\n")
mgc.go#L907: memstats.gc_trigger = trigger
mgc.go#L908: atomic.Store64(&memstats.next_gc, goal)
mgc.go#L927: heapLiveBasis := atomic.Load64(&memstats.heap_live)
mgc.go#L967: egogc := float64(atomic.Load64(&memstats.next_gc)-memstats.heap_marked) / float64(memstats.heap_marked)
mgc.go#L1263: if !memstats.enablegc || panicking != 0 || gcphase != _GCoff {
mgc.go#L1272: return memstats.heap_live >= memstats.gc_trigger
mgc.go#L1277: lastgc := int64(atomic.Load64(&memstats.last_gc_nanotime))
mgc.go#L1368: work.heap0 = atomic.Load64(&memstats.heap_live)
mgc.go#L1391: work.heapGoal = memstats.next_gc
mgc.go#L1446: memstats.gcPauseDist.record(now - work.pauseStart)
mgc.go#L1594: memstats.gcPauseDist.record(now - work.pauseStart)
mgc.go#L1632: work.heap1 = memstats.heap_live
mgc.go#L1695: memstats.last_next_gc = memstats.next_gc
mgc.go#L1696: memstats.last_heap_inuse = memstats.heap_inuse
mgc.go#L1707: memstats.gcPauseDist.record(now - work.pauseStart)
mgc.go#L1708: atomic.Store64(&memstats.last_gc_unix, uint64(unixNow)) // must be Unix time to make sense to user
mgc.go#L1709: atomic.Store64(&memstats.last_gc_nanotime, uint64(now)) // monotonic time for us
mgc.go#L1710: memstats.pause_ns[memstats.numgc%uint32(len(memstats.pause_ns))] = uint64(work.pauseNS)
mgc.go#L1711: memstats.pause_end[memstats.numgc%uint32(len(memstats.pause_end))] = uint64(unixNow)
mgc.go#L1712: memstats.pause_total_ns += uint64(work.pauseNS)
mgc.go#L1725: memstats.gc_cpu_fraction = float64(work.totaltime) / float64(totalCpu)
mgc.go#L1732: memstats.numforcedgc++
mgc.go#L1737: memstats.numgc++
mgc.go#L1774: util := int(memstats.gc_cpu_fraction * 100)
mgc.go#L1778: print("gc ", memstats.numgc,
mgc.go#L2139: memstats.heap_marked = work.bytesMarked
mgc.go#L2149: memstats.heap_scan += uint64(c.scanAlloc)
mgc.go#L2156: memstats.heap_live = work.bytesMarked
mgc.go#L2157: memstats.heap_scan = uint64(gcController.scanWork)
mgc.go#L2252: work.initialHeapLive = atomic.Load64(&memstats.heap_live)
mgcscavenge.go#L103: return memstats.heap_sys.load() - atomic.Load64(&memstats.heap_released)
mgcscavenge.go#L121: if memstats.last_next_gc == 0 {
mgcscavenge.go#L126: goalRatio := float64(atomic.Load64(&memstats.next_gc)) / float64(memstats.last_next_gc)
mgcscavenge.go#L127: retainedGoal := uint64(float64(memstats.last_heap_inuse) * goalRatio)
mgcscavenge.go#L432: atomic.Load64(&memstats.heap_released)>>10, " KiB total, ",
mgcscavenge.go#L433: (atomic.Load64(&memstats.heap_inuse)*100)/heapRetained(), "% util",
mgcscavenge.go#L733: atomic.Xadd64(&memstats.heap_released, nbytes)
mgcscavenge.go#L736: stats := memstats.heapStats.acquire()
mgcscavenge.go#L739: memstats.heapStats.release()
mgcsweep.go#L267: print("pacer: sweep done at heap size ", memstats.heap_live>>20, "MB; allocated ", (memstats.heap_live-mheap_.sweepHeapLiveBasis)>>20, "MB during sweep; swept ", mheap_.pagesSwept, " pages at ", sweepRatio, " pages/byte\n")
mgcsweep.go#L506: stats := memstats.heapStats.acquire()
mgcsweep.go#L508: memstats.heapStats.release()
mgcsweep.go#L553: stats := memstats.heapStats.acquire()
mgcsweep.go#L556: memstats.heapStats.release()
mgcsweep.go#L648: newHeapLive := uintptr(atomic.Load64(&memstats.heap_live)-mheap_.sweepHeapLiveBasis) + spanBytes
mheap.go#L507: sp.array = sysAlloc(uintptr(n)*sys.PtrSize, &memstats.other_sys)
mheap.go#L519: sysFree(unsafe.Pointer(&oldAllspans[0]), uintptr(cap(oldAllspans))*unsafe.Sizeof(oldAllspans[0]), &memstats.other_sys)
mheap.go#L705: h.spanalloc.init(unsafe.Sizeof(mspan{}), recordspan, unsafe.Pointer(h), &memstats.mspan_sys)
mheap.go#L706: h.cachealloc.init(unsafe.Sizeof(mcache{}), nil, nil, &memstats.mcache_sys)
mheap.go#L707: h.specialfinalizeralloc.init(unsafe.Sizeof(specialfinalizer{}), nil, nil, &memstats.other_sys)
mheap.go#L708: h.specialprofilealloc.init(unsafe.Sizeof(specialprofile{}), nil, nil, &memstats.other_sys)
mheap.go#L709: h.arenaHintAlloc.init(unsafe.Sizeof(arenaHint{}), nil, nil, &memstats.other_sys)
mheap.go#L726: h.pages.init(&h.lock, &memstats.gcMiscSys)
mheap.go#L1272: atomic.Xadd64(&memstats.heap_released, -int64(scav))
mheap.go#L1276: atomic.Xadd64(&memstats.heap_inuse, int64(nbytes))
mheap.go#L1280: memstats.heap_sys.add(-int64(nbytes))
mheap.go#L1283: stats := memstats.heapStats.acquire()
mheap.go#L1296: memstats.heapStats.release()
mheap.go#L1348: print("runtime: out of memory: cannot allocate ", ask, "-byte block (", memstats.heap_sys, " in use)\n")
mheap.go#L1375: atomic.Xadd64(&memstats.heap_released, int64(asize))
mheap.go#L1376: stats := memstats.heapStats.acquire()
mheap.go#L1378: memstats.heapStats.release()
mheap.go#L1467: atomic.Xadd64(&memstats.heap_inuse, -int64(nbytes))
mheap.go#L1471: memstats.heap_sys.add(int64(nbytes))
mheap.go#L1474: stats := memstats.heapStats.acquire()
mheap.go#L1485: memstats.heapStats.release()
mheap.go#L2028: result = (*gcBitsArena)(sysAlloc(gcBitsChunkBytes, &memstats.gcMiscSys))
mprof.go#L173: b := (*bucket)(persistentalloc(size, 0, &memstats.buckhash_sys))
mprof.go#L207: buckhash = (*[buckHashSize]*bucket)(sysAlloc(unsafe.Sizeof(*buckhash), &memstats.buckhash_sys))
mspanset.go#L105: newSpine := persistentalloc(newCap*sys.PtrSize, cpu.CacheLineSize, &memstats.gcMiscSys)
mspanset.go#L286: return (*spanSetBlock)(persistentalloc(unsafe.Sizeof(spanSetBlock{}), cpu.CacheLineSize, &memstats.gcMiscSys))
mstats.go#L170: var memstats mstats
mstats.go#L446: if offset := unsafe.Offsetof(memstats.heap_live); offset%8 != 0 {
mstats.go#L450: if offset := unsafe.Offsetof(memstats.heapStats); offset%8 != 0 {
mstats.go#L454: if offset := unsafe.Offsetof(memstats.gcPauseDist); offset%8 != 0 {
mstats.go#L485: stats.Alloc = memstats.alloc
mstats.go#L486: stats.TotalAlloc = memstats.total_alloc
mstats.go#L487: stats.Sys = memstats.sys
mstats.go#L488: stats.Mallocs = memstats.nmalloc
mstats.go#L489: stats.Frees = memstats.nfree
mstats.go#L490: stats.HeapAlloc = memstats.alloc
mstats.go#L491: stats.HeapSys = memstats.heap_sys.load()
mstats.go#L508: stats.HeapIdle = memstats.heap_sys.load() - memstats.heap_inuse
mstats.go#L509: stats.HeapInuse = memstats.heap_inuse
mstats.go#L510: stats.HeapReleased = memstats.heap_released
mstats.go#L511: stats.HeapObjects = memstats.heap_objects
mstats.go#L512: stats.StackInuse = memstats.stacks_inuse
mstats.go#L515: stats.StackSys = memstats.stacks_inuse + memstats.stacks_sys.load()
mstats.go#L516: stats.MSpanInuse = memstats.mspan_inuse
mstats.go#L517: stats.MSpanSys = memstats.mspan_sys.load()
mstats.go#L518: stats.MCacheInuse = memstats.mcache_inuse
mstats.go#L519: stats.MCacheSys = memstats.mcache_sys.load()
mstats.go#L520: stats.BuckHashSys = memstats.buckhash_sys.load()
mstats.go#L524: stats.GCSys = memstats.gcMiscSys.load() + memstats.gcWorkBufInUse + memstats.gcProgPtrScalarBitsInUse
mstats.go#L525: stats.OtherSys = memstats.other_sys.load()
mstats.go#L526: stats.NextGC = memstats.next_gc
mstats.go#L527: stats.LastGC = memstats.last_gc_unix
mstats.go#L528: stats.PauseTotalNs = memstats.pause_total_ns
mstats.go#L529: stats.PauseNs = memstats.pause_ns
mstats.go#L530: stats.PauseEnd = memstats.pause_end
mstats.go#L531: stats.NumGC = memstats.numgc
mstats.go#L532: stats.NumForcedGC = memstats.numforcedgc
mstats.go#L533: stats.GCCPUFraction = memstats.gc_cpu_fraction
mstats.go#L545: if l := len(memstats.by_size); l < bySizeLen {
mstats.go#L549: stats.BySize[i].Size = memstats.by_size[i].size
mstats.go#L550: stats.BySize[i].Mallocs = memstats.by_size[i].nmalloc
mstats.go#L551: stats.BySize[i].Frees = memstats.by_size[i].nfree
mstats.go#L568: if cap(p) < len(memstats.pause_ns)+3 {
mstats.go#L575: n := memstats.numgc
mstats.go#L576: if n > uint32(len(memstats.pause_ns)) {
mstats.go#L577: n = uint32(len(memstats.pause_ns))
mstats.go#L586: j := (memstats.numgc - 1 - i) % uint32(len(memstats.pause_ns))
mstats.go#L587: p[i] = memstats.pause_ns[j]
mstats.go#L588: p[n+i] = memstats.pause_end[j]
mstats.go#L591: p[n+n] = memstats.last_gc_unix
mstats.go#L592: p[n+n+1] = uint64(memstats.numgc)
mstats.go#L593: p[n+n+2] = memstats.pause_total_ns
mstats.go#L612: memstats.mcache_inuse = uint64(mheap_.cachealloc.inuse)
mstats.go#L613: memstats.mspan_inuse = uint64(mheap_.spanalloc.inuse)
mstats.go#L614: memstats.sys = memstats.heap_sys.load() + memstats.stacks_sys.load() + memstats.mspan_sys.load() +
mstats.go#L615: memstats.mcache_sys.load() + memstats.buckhash_sys.load() + memstats.gcMiscSys.load() +
mstats.go#L616: memstats.other_sys.load()
mstats.go#L625: memstats.alloc = 0
mstats.go#L626: memstats.total_alloc = 0
mstats.go#L627: memstats.nmalloc = 0
mstats.go#L628: memstats.nfree = 0
mstats.go#L629: for i := 0; i < len(memstats.by_size); i++ {
mstats.go#L630: memstats.by_size[i].nmalloc = 0
mstats.go#L631: memstats.by_size[i].nfree = 0
mstats.go#L635: memstats.heapStats.unsafeRead(&consStats)
mstats.go#L639: memstats.nmalloc += uint64(consStats.largeAllocCount)
mstats.go#L641: memstats.nfree += uint64(consStats.largeFreeCount)
mstats.go#L648: memstats.nmalloc += a
mstats.go#L649: memstats.by_size[i].nmalloc = a
mstats.go#L654: memstats.nfree += f
mstats.go#L655: memstats.by_size[i].nfree = f
mstats.go#L659: memstats.nfree += memstats.tinyallocs
mstats.go#L660: memstats.nmalloc += memstats.tinyallocs
mstats.go#L663: memstats.total_alloc = totalAlloc
mstats.go#L664: memstats.alloc = totalAlloc - totalFree
mstats.go#L665: memstats.heap_objects = memstats.nmalloc - memstats.nfree
mstats.go#L667: memstats.stacks_inuse = uint64(consStats.inStacks)
mstats.go#L668: memstats.gcWorkBufInUse = uint64(consStats.inWorkBufs)
mstats.go#L669: memstats.gcProgPtrScalarBitsInUse = uint64(consStats.inPtrScalarBits)
mstats.go#L672: memstats.sys += memstats.stacks_inuse + memstats.gcWorkBufInUse + memstats.gcProgPtrScalarBitsInUse
mstats.go#L686: if memstats.heap_inuse != uint64(consStats.inHeap) {
mstats.go#L687: print("runtime: heap_inuse=", memstats.heap_inuse, "\n")
mstats.go#L691: if memstats.heap_released != uint64(consStats.released) {
mstats.go#L692: print("runtime: heap_released=", memstats.heap_released, "\n")
mstats.go#L696: globalRetained := memstats.heap_sys.load() - memstats.heap_released
netpoll.go#L539: mem := persistentalloc(n*pdSize, 0, &memstats.other_sys)
os_darwin.go#L261: memstats.stacks_sys.add(int64(stacksize))
proc.go#L619: lockInit(&memstats.heapStats.noPLock, lockRankLeafRank)
stack.go#L344: v := sysAlloc(uintptr(n), &memstats.stacks_sys)
stack.go#L445: sysFree(v, n, &memstats.stacks_sys)
trace.go#L364: sysFree(unsafe.Pointer(buf), unsafe.Sizeof(*buf.ptr()), &memstats.other_sys)
trace.go#L643: buf = traceBufPtr(sysAlloc(unsafe.Sizeof(traceBuf{}), &memstats.other_sys))
trace.go#L947: block := (*traceAllocBlock)(sysAlloc(unsafe.Sizeof(traceAllocBlock{}), &memstats.other_sys))
trace.go#L965: sysFree(unsafe.Pointer(block), unsafe.Sizeof(traceAllocBlock{}), &memstats.other_sys)
trace.go#L1146: traceEvent(traceEvHeapAlloc, -1, memstats.heap_live)
trace.go#L1150: if nextGC := atomic.Load64(&memstats.next_gc); nextGC == ^uint64(0) {
 |
The pages are generated with Golds v0.3.2-preview. (GOOS=darwin GOARCH=amd64)
Golds is a Go 101 project developed by Tapir Liu.
PR and bug reports are welcome and can be submitted to the issue list.
Please follow @Go100and1 (reachable from the left QR code) to get the latest news of Golds. |