func runtime.add
242 uses
runtime (current package)
alg.go#L176: h = typehash(a.elem, add(p, i*a.elem.size), h)
alg.go#L186: h = memhash(add(p, memStart), h, memEnd-memStart)
alg.go#L193: h = typehash(f.typ, add(p, f.offset()), h)
alg.go#L202: h = memhash(add(p, memStart), h, memEnd-memStart)
cgocall.go#L454: p = add(p, at.elem.size)
cgocall.go#L480: p = *(*unsafe.Pointer)(add(p, sys.PtrSize))
cgocall.go#L503: p = add(p, st.elem.size)
cgocall.go#L526: cgoCheckArg(f.typ, add(p, f.offset()), true, top, msg)
cgocheck.go#L99: p = add(p, typ.size)
cgocheck.go#L126: cgoCheckBits(add(src, -doff), datap.gcdatamask.bytedata, off+doff, size)
cgocheck.go#L131: cgoCheckBits(add(src, -boff), datap.gcbssmask.bytedata, off+boff, size)
cgocheck.go#L157: v := *(*unsafe.Pointer)(add(src, i))
cgocheck.go#L175: src = add(src, skipBytes)
cgocheck.go#L190: v := *(*unsafe.Pointer)(add(src, i))
cgocheck.go#L232: src = add(src, at.elem.size)
cgocheck.go#L250: src = add(src, f.typ.size)
chan.go#L102: c.buf = add(unsafe.Pointer(c), hchanSize)
chan.go#L122: return add(c.buf, uintptr(i)*uintptr(c.elemsize))
checkptr.go#L19: if size := n * elem.size; size > 1 && checkptrBase(p) != checkptrBase(add(p, size-1)) {
hash64.go#L30: h ^= uint64(*(*byte)(add(p, s>>1))) << 8
hash64.go#L31: h ^= uint64(*(*byte)(add(p, s-1))) << 16
hash64.go#L35: h ^= uint64(readUnaligned32(add(p, s-4))) << 32
hash64.go#L40: h ^= readUnaligned64(add(p, s-8))
hash64.go#L45: h ^= readUnaligned64(add(p, 8))
hash64.go#L47: h ^= readUnaligned64(add(p, s-16))
hash64.go#L49: h ^= readUnaligned64(add(p, s-8))
hash64.go#L59: p = add(p, 8)
hash64.go#L62: p = add(p, 8)
hash64.go#L65: p = add(p, 8)
hash64.go#L68: p = add(p, 8)
hash64.go#L95: h ^= uint64(readUnaligned32(p)) | uint64(readUnaligned32(add(p, 4)))<<32
iface.go#L103: p := (**itab)(add(unsafe.Pointer(&t.entries), h*sys.PtrSize))
iface.go#L164: p := (**itab)(add(unsafe.Pointer(&t.entries), h*sys.PtrSize))
iface.go#L202: xmhdr := (*[1 << 16]method)(add(unsafe.Pointer(x), uintptr(x.moff)))[:nt:nt]
iface.go#L338: x = add(x, 6)
iface.go#L351: x = add(x, 4)
iface.go#L524: m := *(**itab)(add(unsafe.Pointer(&t.entries), i*sys.PtrSize))
map.go#L208: return *(**bmap)(add(unsafe.Pointer(b), uintptr(t.bucketsize)-sys.PtrSize))
map.go#L212: *(**bmap)(add(unsafe.Pointer(b), uintptr(t.bucketsize)-sys.PtrSize)) = ovf
map.go#L216: return add(unsafe.Pointer(b), dataOffset)
map.go#L253: h.extra.nextOverflow = (*bmap)(add(unsafe.Pointer(ovf), uintptr(t.bucketsize)))
map.go#L382: nextOverflow = (*bmap)(add(buckets, base*uintptr(t.bucketsize)))
map.go#L383: last := (*bmap)(add(buckets, (nbuckets-1)*uintptr(t.bucketsize)))
map.go#L415: b := (*bmap)(add(h.buckets, (hash&m)*uintptr(t.bucketsize)))
map.go#L421: oldb := (*bmap)(add(c, (hash&m)*uintptr(t.bucketsize)))
map.go#L436: k := add(unsafe.Pointer(b), dataOffset+i*uintptr(t.keysize))
map.go#L441: e := add(unsafe.Pointer(b), dataOffset+bucketCnt*uintptr(t.keysize)+i*uintptr(t.elemsize))
map.go#L494: k := add(unsafe.Pointer(b), dataOffset+i*uintptr(t.keysize))
map.go#L499: e := add(unsafe.Pointer(b), dataOffset+bucketCnt*uintptr(t.keysize)+i*uintptr(t.elemsize))
map.go#L538: k := add(unsafe.Pointer(b), dataOffset+i*uintptr(t.keysize))
map.go#L543: e := add(unsafe.Pointer(b), dataOffset+bucketCnt*uintptr(t.keysize)+i*uintptr(t.elemsize))
map.go#L602: b := (*bmap)(add(h.buckets, bucket*uintptr(t.bucketsize)))
map.go#L614: insertk = add(unsafe.Pointer(b), dataOffset+i*uintptr(t.keysize))
map.go#L615: elem = add(unsafe.Pointer(b), dataOffset+bucketCnt*uintptr(t.keysize)+i*uintptr(t.elemsize))
map.go#L622: k := add(unsafe.Pointer(b), dataOffset+i*uintptr(t.keysize))
map.go#L633: elem = add(unsafe.Pointer(b), dataOffset+bucketCnt*uintptr(t.keysize)+i*uintptr(t.elemsize))
map.go#L656: insertk = add(unsafe.Pointer(newb), dataOffset)
map.go#L657: elem = add(insertk, bucketCnt*uintptr(t.keysize))
map.go#L715: b := (*bmap)(add(h.buckets, bucket*uintptr(t.bucketsize)))
map.go#L727: k := add(unsafe.Pointer(b), dataOffset+i*uintptr(t.keysize))
map.go#L741: e := add(unsafe.Pointer(b), dataOffset+bucketCnt*uintptr(t.keysize)+i*uintptr(t.elemsize))
map.go#L880: b = (*bmap)(add(h.oldbuckets, oldbucket*uintptr(t.bucketsize)))
map.go#L884: b = (*bmap)(add(it.buckets, bucket*uintptr(t.bucketsize)))
map.go#L888: b = (*bmap)(add(it.buckets, bucket*uintptr(t.bucketsize)))
map.go#L905: k := add(unsafe.Pointer(b), dataOffset+uintptr(offi)*uintptr(t.keysize))
map.go#L909: e := add(unsafe.Pointer(b), dataOffset+bucketCnt*uintptr(t.keysize)+uintptr(offi)*uintptr(t.elemsize))
map.go#L1125: b := (*bmap)(add(h.oldbuckets, bucket*uintptr(t.bucketsize)))
map.go#L1138: b := (*bmap)(add(h.oldbuckets, oldbucket*uintptr(t.bucketsize)))
map.go#L1147: x.b = (*bmap)(add(h.buckets, oldbucket*uintptr(t.bucketsize)))
map.go#L1148: x.k = add(unsafe.Pointer(x.b), dataOffset)
map.go#L1149: x.e = add(x.k, bucketCnt*uintptr(t.keysize))
map.go#L1155: y.b = (*bmap)(add(h.buckets, (oldbucket+newbit)*uintptr(t.bucketsize)))
map.go#L1156: y.k = add(unsafe.Pointer(y.b), dataOffset)
map.go#L1157: y.e = add(y.k, bucketCnt*uintptr(t.keysize))
map.go#L1161: k := add(unsafe.Pointer(b), dataOffset)
map.go#L1162: e := add(k, bucketCnt*uintptr(t.keysize))
map.go#L1163: for i := 0; i < bucketCnt; i, k, e = i+1, add(k, uintptr(t.keysize)), add(e, uintptr(t.elemsize)) {
map.go#L1212: dst.k = add(unsafe.Pointer(dst.b), dataOffset)
map.go#L1213: dst.e = add(dst.k, bucketCnt*uintptr(t.keysize))
map.go#L1231: dst.k = add(dst.k, uintptr(t.keysize))
map.go#L1232: dst.e = add(dst.e, uintptr(t.elemsize))
map.go#L1237: b := add(h.oldbuckets, oldbucket*uintptr(t.bucketsize))
map.go#L1240: ptr := add(b, dataOffset)
map_fast32.go#L30: b = (*bmap)(add(h.buckets, (hash&m)*uintptr(t.bucketsize)))
map_fast32.go#L36: oldb := (*bmap)(add(c, (hash&m)*uintptr(t.bucketsize)))
map_fast32.go#L43: for i, k := uintptr(0), b.keys(); i < bucketCnt; i, k = i+1, add(k, 4) {
map_fast32.go#L45: return add(unsafe.Pointer(b), dataOffset+bucketCnt*4+i*uintptr(t.elemsize))
map_fast32.go#L70: b = (*bmap)(add(h.buckets, (hash&m)*uintptr(t.bucketsize)))
map_fast32.go#L76: oldb := (*bmap)(add(c, (hash&m)*uintptr(t.bucketsize)))
map_fast32.go#L83: for i, k := uintptr(0), b.keys(); i < bucketCnt; i, k = i+1, add(k, 4) {
map_fast32.go#L85: return add(unsafe.Pointer(b), dataOffset+bucketCnt*4+i*uintptr(t.elemsize)), true
map_fast32.go#L117: b := (*bmap)(add(h.buckets, bucket*uintptr(t.bucketsize)))
map_fast32.go#L136: k := *((*uint32)(add(unsafe.Pointer(b), dataOffset+i*4)))
map_fast32.go#L167: insertk = add(unsafe.Pointer(insertb), dataOffset+inserti*4)
map_fast32.go#L174: elem := add(unsafe.Pointer(insertb), dataOffset+bucketCnt*4+inserti*uintptr(t.elemsize))
map_fast32.go#L207: b := (*bmap)(add(h.buckets, bucket*uintptr(t.bucketsize)))
map_fast32.go#L226: k := *((*unsafe.Pointer)(add(unsafe.Pointer(b), dataOffset+i*4)))
map_fast32.go#L257: insertk = add(unsafe.Pointer(insertb), dataOffset+inserti*4)
map_fast32.go#L264: elem := add(unsafe.Pointer(insertb), dataOffset+bucketCnt*4+inserti*uintptr(t.elemsize))
map_fast32.go#L293: b := (*bmap)(add(h.buckets, bucket*uintptr(t.bucketsize)))
map_fast32.go#L297: for i, k := uintptr(0), b.keys(); i < bucketCnt; i, k = i+1, add(k, 4) {
map_fast32.go#L309: e := add(unsafe.Pointer(b), dataOffset+bucketCnt*4+i*uintptr(t.elemsize))
map_fast32.go#L374: b := (*bmap)(add(h.oldbuckets, oldbucket*uintptr(t.bucketsize)))
map_fast32.go#L383: x.b = (*bmap)(add(h.buckets, oldbucket*uintptr(t.bucketsize)))
map_fast32.go#L384: x.k = add(unsafe.Pointer(x.b), dataOffset)
map_fast32.go#L385: x.e = add(x.k, bucketCnt*4)
map_fast32.go#L391: y.b = (*bmap)(add(h.buckets, (oldbucket+newbit)*uintptr(t.bucketsize)))
map_fast32.go#L392: y.k = add(unsafe.Pointer(y.b), dataOffset)
map_fast32.go#L393: y.e = add(y.k, bucketCnt*4)
map_fast32.go#L397: k := add(unsafe.Pointer(b), dataOffset)
map_fast32.go#L398: e := add(k, bucketCnt*4)
map_fast32.go#L399: for i := 0; i < bucketCnt; i, k, e = i+1, add(k, 4), add(e, uintptr(t.elemsize)) {
map_fast32.go#L424: dst.k = add(unsafe.Pointer(dst.b), dataOffset)
map_fast32.go#L425: dst.e = add(dst.k, bucketCnt*4)
map_fast32.go#L443: dst.k = add(dst.k, 4)
map_fast32.go#L444: dst.e = add(dst.e, uintptr(t.elemsize))
map_fast32.go#L449: b := add(h.oldbuckets, oldbucket*uintptr(t.bucketsize))
map_fast32.go#L452: ptr := add(b, dataOffset)
map_fast64.go#L30: b = (*bmap)(add(h.buckets, (hash&m)*uintptr(t.bucketsize)))
map_fast64.go#L36: oldb := (*bmap)(add(c, (hash&m)*uintptr(t.bucketsize)))
map_fast64.go#L43: for i, k := uintptr(0), b.keys(); i < bucketCnt; i, k = i+1, add(k, 8) {
map_fast64.go#L45: return add(unsafe.Pointer(b), dataOffset+bucketCnt*8+i*uintptr(t.elemsize))
map_fast64.go#L70: b = (*bmap)(add(h.buckets, (hash&m)*uintptr(t.bucketsize)))
map_fast64.go#L76: oldb := (*bmap)(add(c, (hash&m)*uintptr(t.bucketsize)))
map_fast64.go#L83: for i, k := uintptr(0), b.keys(); i < bucketCnt; i, k = i+1, add(k, 8) {
map_fast64.go#L85: return add(unsafe.Pointer(b), dataOffset+bucketCnt*8+i*uintptr(t.elemsize)), true
map_fast64.go#L117: b := (*bmap)(add(h.buckets, bucket*uintptr(t.bucketsize)))
map_fast64.go#L136: k := *((*uint64)(add(unsafe.Pointer(b), dataOffset+i*8)))
map_fast64.go#L167: insertk = add(unsafe.Pointer(insertb), dataOffset+inserti*8)
map_fast64.go#L174: elem := add(unsafe.Pointer(insertb), dataOffset+bucketCnt*8+inserti*uintptr(t.elemsize))
map_fast64.go#L207: b := (*bmap)(add(h.buckets, bucket*uintptr(t.bucketsize)))
map_fast64.go#L226: k := *((*unsafe.Pointer)(add(unsafe.Pointer(b), dataOffset+i*8)))
map_fast64.go#L257: insertk = add(unsafe.Pointer(insertb), dataOffset+inserti*8)
map_fast64.go#L264: elem := add(unsafe.Pointer(insertb), dataOffset+bucketCnt*8+inserti*uintptr(t.elemsize))
map_fast64.go#L293: b := (*bmap)(add(h.buckets, bucket*uintptr(t.bucketsize)))
map_fast64.go#L297: for i, k := uintptr(0), b.keys(); i < bucketCnt; i, k = i+1, add(k, 8) {
map_fast64.go#L311: e := add(unsafe.Pointer(b), dataOffset+bucketCnt*8+i*uintptr(t.elemsize))
map_fast64.go#L376: b := (*bmap)(add(h.oldbuckets, oldbucket*uintptr(t.bucketsize)))
map_fast64.go#L385: x.b = (*bmap)(add(h.buckets, oldbucket*uintptr(t.bucketsize)))
map_fast64.go#L386: x.k = add(unsafe.Pointer(x.b), dataOffset)
map_fast64.go#L387: x.e = add(x.k, bucketCnt*8)
map_fast64.go#L393: y.b = (*bmap)(add(h.buckets, (oldbucket+newbit)*uintptr(t.bucketsize)))
map_fast64.go#L394: y.k = add(unsafe.Pointer(y.b), dataOffset)
map_fast64.go#L395: y.e = add(y.k, bucketCnt*8)
map_fast64.go#L399: k := add(unsafe.Pointer(b), dataOffset)
map_fast64.go#L400: e := add(k, bucketCnt*8)
map_fast64.go#L401: for i := 0; i < bucketCnt; i, k, e = i+1, add(k, 8), add(e, uintptr(t.elemsize)) {
map_fast64.go#L426: dst.k = add(unsafe.Pointer(dst.b), dataOffset)
map_fast64.go#L427: dst.e = add(dst.k, bucketCnt*8)
map_fast64.go#L451: dst.k = add(dst.k, 8)
map_fast64.go#L452: dst.e = add(dst.e, uintptr(t.elemsize))
map_fast64.go#L457: b := add(h.oldbuckets, oldbucket*uintptr(t.bucketsize))
map_fast64.go#L460: ptr := add(b, dataOffset)
map_faststr.go#L29: for i, kptr := uintptr(0), b.keys(); i < bucketCnt; i, kptr = i+1, add(kptr, 2*sys.PtrSize) {
map_faststr.go#L38: return add(unsafe.Pointer(b), dataOffset+bucketCnt*2*sys.PtrSize+i*uintptr(t.elemsize))
map_faststr.go#L45: for i, kptr := uintptr(0), b.keys(); i < bucketCnt; i, kptr = i+1, add(kptr, 2*sys.PtrSize) {
map_faststr.go#L54: return add(unsafe.Pointer(b), dataOffset+bucketCnt*2*sys.PtrSize+i*uintptr(t.elemsize))
map_faststr.go#L61: if *((*[4]byte)(add(key.str, uintptr(key.len)-4))) != *((*[4]byte)(add(k.str, uintptr(key.len)-4))) {
map_faststr.go#L71: k := (*stringStruct)(add(unsafe.Pointer(b), dataOffset+keymaybe*2*sys.PtrSize))
map_faststr.go#L73: return add(unsafe.Pointer(b), dataOffset+bucketCnt*2*sys.PtrSize+keymaybe*uintptr(t.elemsize))
map_faststr.go#L81: b := (*bmap)(add(h.buckets, (hash&m)*uintptr(t.bucketsize)))
map_faststr.go#L87: oldb := (*bmap)(add(c, (hash&m)*uintptr(t.bucketsize)))
map_faststr.go#L94: for i, kptr := uintptr(0), b.keys(); i < bucketCnt; i, kptr = i+1, add(kptr, 2*sys.PtrSize) {
map_faststr.go#L100: return add(unsafe.Pointer(b), dataOffset+bucketCnt*2*sys.PtrSize+i*uintptr(t.elemsize))
map_faststr.go#L124: for i, kptr := uintptr(0), b.keys(); i < bucketCnt; i, kptr = i+1, add(kptr, 2*sys.PtrSize) {
map_faststr.go#L133: return add(unsafe.Pointer(b), dataOffset+bucketCnt*2*sys.PtrSize+i*uintptr(t.elemsize)), true
map_faststr.go#L140: for i, kptr := uintptr(0), b.keys(); i < bucketCnt; i, kptr = i+1, add(kptr, 2*sys.PtrSize) {
map_faststr.go#L149: return add(unsafe.Pointer(b), dataOffset+bucketCnt*2*sys.PtrSize+i*uintptr(t.elemsize)), true
map_faststr.go#L156: if *((*[4]byte)(add(key.str, uintptr(key.len)-4))) != *((*[4]byte)(add(k.str, uintptr(key.len)-4))) {
map_faststr.go#L166: k := (*stringStruct)(add(unsafe.Pointer(b), dataOffset+keymaybe*2*sys.PtrSize))
map_faststr.go#L168: return add(unsafe.Pointer(b), dataOffset+bucketCnt*2*sys.PtrSize+keymaybe*uintptr(t.elemsize)), true
map_faststr.go#L176: b := (*bmap)(add(h.buckets, (hash&m)*uintptr(t.bucketsize)))
map_faststr.go#L182: oldb := (*bmap)(add(c, (hash&m)*uintptr(t.bucketsize)))
map_faststr.go#L189: for i, kptr := uintptr(0), b.keys(); i < bucketCnt; i, kptr = i+1, add(kptr, 2*sys.PtrSize) {
map_faststr.go#L195: return add(unsafe.Pointer(b), dataOffset+bucketCnt*2*sys.PtrSize+i*uintptr(t.elemsize)), true
map_faststr.go#L228: b := (*bmap)(add(h.buckets, bucket*uintptr(t.bucketsize)))
map_faststr.go#L248: k := (*stringStruct)(add(unsafe.Pointer(b), dataOffset+i*2*sys.PtrSize))
map_faststr.go#L283: insertk = add(unsafe.Pointer(insertb), dataOffset+inserti*2*sys.PtrSize)
map_faststr.go#L289: elem := add(unsafe.Pointer(insertb), dataOffset+bucketCnt*2*sys.PtrSize+inserti*uintptr(t.elemsize))
map_faststr.go#L319: b := (*bmap)(add(h.buckets, bucket*uintptr(t.bucketsize)))
map_faststr.go#L324: for i, kptr := uintptr(0), b.keys(); i < bucketCnt; i, kptr = i+1, add(kptr, 2*sys.PtrSize) {
map_faststr.go#L334: e := add(unsafe.Pointer(b), dataOffset+bucketCnt*2*sys.PtrSize+i*uintptr(t.elemsize))
map_faststr.go#L399: b := (*bmap)(add(h.oldbuckets, oldbucket*uintptr(t.bucketsize)))
map_faststr.go#L408: x.b = (*bmap)(add(h.buckets, oldbucket*uintptr(t.bucketsize)))
map_faststr.go#L409: x.k = add(unsafe.Pointer(x.b), dataOffset)
map_faststr.go#L410: x.e = add(x.k, bucketCnt*2*sys.PtrSize)
map_faststr.go#L416: y.b = (*bmap)(add(h.buckets, (oldbucket+newbit)*uintptr(t.bucketsize)))
map_faststr.go#L417: y.k = add(unsafe.Pointer(y.b), dataOffset)
map_faststr.go#L418: y.e = add(y.k, bucketCnt*2*sys.PtrSize)
map_faststr.go#L422: k := add(unsafe.Pointer(b), dataOffset)
map_faststr.go#L423: e := add(k, bucketCnt*2*sys.PtrSize)
map_faststr.go#L424: for i := 0; i < bucketCnt; i, k, e = i+1, add(k, 2*sys.PtrSize), add(e, uintptr(t.elemsize)) {
map_faststr.go#L449: dst.k = add(unsafe.Pointer(dst.b), dataOffset)
map_faststr.go#L450: dst.e = add(dst.k, bucketCnt*2*sys.PtrSize)
map_faststr.go#L463: dst.k = add(dst.k, 2*sys.PtrSize)
map_faststr.go#L464: dst.e = add(dst.e, uintptr(t.elemsize))
map_faststr.go#L469: b := add(h.oldbuckets, oldbucket*uintptr(t.bucketsize))
map_faststr.go#L472: ptr := add(b, dataOffset)
mgcmark.go#L262: ptrmask := (*uint8)(add(unsafe.Pointer(ptrmask0), uintptr(shard)*(rootBlockBytes/(8*sys.PtrSize))))
mgcsweep.go#L671: *(*uint32)(add(x, i)) = 0xdeadbeef
mpagealloc_64bit.go#L121: offAddr{uintptr(add(base, baseOffset))},
mpagealloc_64bit.go#L122: offAddr{uintptr(add(base, limitOffset))},
mprof.go#L182: stk := (*[maxStack]uintptr)(add(unsafe.Pointer(b), unsafe.Sizeof(*b)))
mprof.go#L191: data := add(unsafe.Pointer(b), unsafe.Sizeof(*b)+b.nstk*unsafe.Sizeof(uintptr(0)))
mprof.go#L200: data := add(unsafe.Pointer(b), unsafe.Sizeof(*b)+b.nstk*unsafe.Sizeof(uintptr(0)))
mspanset.go#L85: blockp := add(spine, sys.PtrSize*top)
mspanset.go#L127: blockp := add(b.spine, sys.PtrSize*top)
mspanset.go#L184: blockp := add(spine, sys.PtrSize*uintptr(top))
mspanset.go#L244: blockp := (**spanSetBlock)(add(b.spine, sys.PtrSize*uintptr(top)))
netpoll.go#L541: pd := (*pollDesc)(add(mem, i*pdSize))
panic.go#L373: return add(unsafe.Pointer(d), unsafe.Sizeof(*d))
panic.go#L794: fd = add(fd, unsafe.Sizeof(b))
proc.go#L541: return *(**g)(add(unsafe.Pointer(ptr), i*sys.PtrSize))
proc.go#L3975: argp := add(unsafe.Pointer(&fn), sys.PtrSize)
proc.go#L6240: p := add(unsafe.Pointer(t), (3+i)*sys.PtrSize)
proc.go#L6261: firstFunc := add(unsafe.Pointer(t), (3+t.ndeps)*sys.PtrSize)
proc.go#L6263: p := add(firstFunc, i*sys.PtrSize)
runtime1.go#L58: return *(**byte)(add(unsafe.Pointer(argv), uintptr(i)*sys.PtrSize))
slice.go#L57: memclrNoHeapPointers(add(to, copymem), tomem-copymem)
slice.go#L227: memclrNoHeapPointers(add(p, newlenmem), capmem-newlenmem)
stack.go#L592: print(" ", add(scanp, (i+j)*sys.PtrSize), ":", ptrnames[bv.ptrbit(i+j)], ":", hex(*(*uintptr)(add(scanp, (i+j)*sys.PtrSize))), " # ", i, " ", *addb(bv.bytedata, i/8), "\n")
stack.go#L599: pp := (*uintptr)(add(scanp, (i+j)*sys.PtrSize))
stack.go#L1307: p = add(p, sys.PtrSize)
string.go#L99: p = add(p, 7)
string.go#L278: memclrNoHeapPointers(add(p, uintptr(size)), cap-uintptr(size))
string.go#L293: memclrNoHeapPointers(add(p, uintptr(size)*4), mem-uintptr(size)*4)
stubs.go#L11: func add(p unsafe.Pointer, x uintptr) unsafe.Pointer {
symtab.go#L682: ffb := (*findfuncbucket)(add(unsafe.Pointer(datap.findfunctab), b*unsafe.Sizeof(findfuncbucket{})))
symtab.go#L931: return *(*uint32)(add(unsafe.Pointer(&f.nfuncdata), unsafe.Sizeof(f.nfuncdata)+uintptr(table)*4))
symtab.go#L963: p := add(unsafe.Pointer(&f.nfuncdata), unsafe.Sizeof(f.nfuncdata)+uintptr(f.npcdata)*4)
symtab.go#L968: p = add(p, 4)
symtab.go#L970: return *(*unsafe.Pointer)(add(p, uintptr(i)*sys.PtrSize))
type.go#L325: return (*[1 << 20]*_type)(add(unsafe.Pointer(t), uadd))[:t.inCount]
type.go#L335: return (*[1 << 20]*_type)(add(unsafe.Pointer(t), uadd))[t.inCount : t.inCount+outCount]
type.go#L455: return (*byte)(add(unsafe.Pointer(n.bytes), uintptr(off)))
 |
The pages are generated with Golds v0.3.2-preview. (GOOS=darwin GOARCH=amd64)
Golds is a Go 101 project developed by Tapir Liu.
PR and bug reports are welcome and can be submitted to the issue list.
Please follow @Go100and1 (reachable from the left QR code) to get the latest news of Golds. |