runtime.mspan.base (method)

55 uses

	runtime (current package)
		heapdump.go#L459: 				p := unsafe.Pointer(s.base() + uintptr(spf.special.offset))
		heapdump.go#L481: 		p := s.base()
		heapdump.go#L658: 			p := s.base() + uintptr(spp.special.offset)
		malloc.go#L857: 			return gclinkptr(result*s.elemsize + s.base())
		malloc.go#L893: 	v = gclinkptr(freeIndex*s.elemsize + s.base())
		malloc.go#L1081: 		x = unsafe.Pointer(span.base())
		mbitmap.go#L230: 	byteOffset := p - s.base()
		mbitmap.go#L345: 	print(" span.base()=", hex(s.base()), " span.limit=", hex(s.limit), " span.state=", state, "\n")
		mbitmap.go#L379: 	if state := s.state.get(); state != mSpanInUse || p < s.base() || p >= s.limit {
		mbitmap.go#L395: 		base = s.base()
		mbitmap.go#L397: 		objIndex = (base - s.base()) >> s.divShift
		mbitmap.go#L402: 		base = s.base()
		mbitmap.go#L575: 	} else if s.state.get() != mSpanInUse || dst < s.base() || s.limit <= dst {
		mcache.go#L246: 	s.limit = s.base() + size
		mcache.go#L247: 	heapBitsForAddr(s.base()).initSpan(s)
		mcentral.go#L240: 	s.limit = s.base() + size*n
		mcentral.go#L241: 	heapBitsForAddr(s.base()).initSpan(s)
		mgcmark.go#L368: 				p := s.base() + uintptr(spf.special.offset)/s.elemsize*s.elemsize
		mgcmark.go#L1221: 		if b == s.base() {
		mgcmark.go#L1238: 			for oblet := b + maxObletBytes; oblet < s.base()+s.elemsize; oblet += maxObletBytes {
		mgcmark.go#L1248: 		n = s.base() + s.elemsize - b
		mgcmark.go#L1385: 		obj := span.base() + idx*span.elemsize
		mgcmark.go#L1436: 		arena, pageIdx, pageMask := pageIndexOf(span.base())
		mgcmark.go#L1469: 	print(" s.base()=", hex(s.base()), " s.limit=", hex(s.limit), " s.spanclass=", s.spanclass, " s.elemsize=", s.elemsize, " s.state=")
		mgcmark.go#L1524: 	arena, pageIdx, pageMask := pageIndexOf(span.base())
		mgcsweep.go#L364: 		p := s.base() + objIndex*size
		mgcsweep.go#L370: 			endOffset := p - s.base() + size
		mgcsweep.go#L383: 				p := s.base() + uintptr(special.offset)
		mgcsweep.go#L414: 				x := s.base() + i*s.elemsize
		mgcsweep.go#L549: 				sysFault(unsafe.Pointer(s.base()), size)
		mgcsweep.go#L586: 		addr := s.base() + i*s.elemsize
		mgcwork.go#L387: 			newb := (*workbuf)(unsafe.Pointer(s.base() + i))
		mheap.go#L468: func (s *mspan) base() uintptr {
		mheap.go#L609: 	if s == nil || b < s.base() {
		mheap.go#L684: 	if s == nil || s.state.get() != mSpanInUse || p < s.base() || p >= s.limit {
		mheap.go#L915: 			memclrNoHeapPointers(unsafe.Pointer(s.base()), s.npages<<_PageShift)
		mheap.go#L1218: 		s.limit = s.base() + s.npages*pageSize
		mheap.go#L1305: 	h.setSpans(s.base(), npages, s)
		mheap.go#L1313: 		arena, pageIdx, pageMask := pageIndexOf(s.base())
		mheap.go#L1413: 			base := unsafe.Pointer(s.base())
		mheap.go#L1450: 			print("mheap.freeSpanLocked - span ", s, " ptr ", hex(s.base()), " allocCount ", s.allocCount, " sweepgen ", s.sweepgen, "/", h.sweepgen, "\n")
		mheap.go#L1456: 		arena, pageIdx, pageMask := pageIndexOf(s.base())
		mheap.go#L1488: 	h.pages.free(s.base(), s.npages)
		mheap.go#L1658: 	arenaPage := (s.base() / pageSize) % pagesPerArena
		mheap.go#L1659: 	ai := arenaIndex(s.base())
		mheap.go#L1666: 	arenaPage := (s.base() / pageSize) % pagesPerArena
		mheap.go#L1667: 	ai := arenaIndex(s.base())
		mheap.go#L1690: 	offset := uintptr(p) - span.base()
		mheap.go#L1739: 	offset := uintptr(p) - span.base()
		mwbbuf.go#L273: 		arena, pageIdx, pageMask := pageIndexOf(span.base())
		signal_unix.go#L394: 			if s != nil && s.state.get() == mSpanManual && s.base() < sp && sp < s.limit {
		signal_unix.go#L395: 				gp := *(**g)(unsafe.Pointer(s.base()))
		stack.go#L203: 			x := gclinkptr(s.base() + i)
		stack.go#L406: 		v = unsafe.Pointer(s.base())
		stack.go#L476: 			println(hex(s.base()), v)