internal/abi.MapType.Key (field)

58 uses

	internal/abi (current package)
		type.go#L442: 	Key    *Type
		type.go#L473: 		return (*MapType)(unsafe.Pointer(t)).Key

	internal/reflectlite
		type.go#L340: 	return toType(tt.Key)

	reflect
		type.go#L749: 	return toType(tt.Key)
		type.go#L1667: 		if mt.Key == ktyp && mt.Elem == etyp {
		type.go#L1681: 	mt.Key = ktyp
		value.go#L1752: 	if (tt.Key == stringType || key.kind() == String) && tt.Key == key.typ() && tt.Elem.Size() <= maxValSize {
		value.go#L1756: 		key = key.assignTo("reflect.Value.MapIndex", tt.Key, nil)
		value.go#L1781: 	keyType := tt.Key
		value.go#L1852: 	ktype := t.Key
		value.go#L1876: 	ktype := t.Key
		value.go#L2388: 	if (tt.Key == stringType || key.kind() == String) && tt.Key == key.typ() && tt.Elem.Size() <= maxValSize {
		value.go#L2406: 	key = key.assignTo("reflect.Value.SetMapIndex", tt.Key, nil)

	runtime
		map.go#L401: 		raceReadObjectPC(t.Key, key, callerpc, pc)
		map.go#L404: 		msanread(key, t.Key.Size_)
		map.go#L407: 		asanread(key, t.Key.Size_)
		map.go#L445: 			if t.Key.Equal(key, k) {
		map.go#L462: 		raceReadObjectPC(t.Key, key, callerpc, pc)
		map.go#L465: 		msanread(key, t.Key.Size_)
		map.go#L468: 		asanread(key, t.Key.Size_)
		map.go#L506: 			if t.Key.Equal(key, k) {
		map.go#L550: 			if t.Key.Equal(key, k) {
		map.go#L587: 		raceReadObjectPC(t.Key, key, callerpc, pc)
		map.go#L590: 		msanread(key, t.Key.Size_)
		map.go#L593: 		asanread(key, t.Key.Size_)
		map.go#L637: 			if !t.Key.Equal(key, k) {
		map.go#L642: 				typedmemmove(t.Key, k, key)
		map.go#L673: 		kmem := newobject(t.Key)
		map.go#L681: 	typedmemmove(t.Key, insertk, key)
		map.go#L701: 		raceReadObjectPC(t.Key, key, callerpc, pc)
		map.go#L704: 		msanread(key, t.Key.Size_)
		map.go#L707: 		asanread(key, t.Key.Size_)
		map.go#L746: 			if !t.Key.Equal(key, k2) {
		map.go#L752: 			} else if t.Key.PtrBytes != 0 {
		map.go#L753: 				memclrHasPointers(k, t.Key.Size_)
		map.go#L934: 			if t.ReflexiveKey() || t.Key.Equal(k, k) {
		map.go#L955: 			!(t.ReflexiveKey() || t.Key.Equal(k, k)) {
		map.go#L1213: 					if h.flags&iterator != 0 && !t.ReflexiveKey() && !t.Key.Equal(k2, k2) {
		map.go#L1251: 					typedmemmove(t.Key, dst.k, k) // copy elem
		map.go#L1312: 	if t.Key.Equal == nil {
		map.go#L1315: 	if t.Key.Size_ > maxKeySize && (!t.IndirectKey() || t.KeySize != uint8(goarch.PtrSize)) ||
		map.go#L1316: 		t.Key.Size_ <= maxKeySize && (t.IndirectKey() || t.KeySize != uint8(t.Key.Size_)) {
		map.go#L1323: 	if t.Key.Align_ > bucketCnt {
		map.go#L1329: 	if t.Key.Size_%uintptr(t.Key.Align_) != 0 {
		map.go#L1338: 	if dataOffset%uintptr(t.Key.Align_) != 0 {
		map.go#L1486: 			typedmemmove(t.Key, dstK, srcK)
		map.go#L1654: 			typedmemmove(t.Key, add(s.array, uintptr(s.len)*uintptr(t.KeySize)), k)
		map_fast32.go#L305: 			if goarch.PtrSize == 4 && t.Key.PtrBytes != 0 {
		map_fast32.go#L431: 				if goarch.PtrSize == 4 && t.Key.PtrBytes != 0 && writeBarrier.enabled {
		map_fast64.go#L303: 			if t.Key.PtrBytes != 0 {
		map_fast64.go#L433: 				if t.Key.PtrBytes != 0 && writeBarrier.enabled {
		map_fast64.go#L440: 						typedmemmove(t.Key, dst.k, k)
		type.go#L426: 		return typesEqual(mt.Key, mv.Key, seen) && typesEqual(mt.Elem, mv.Elem, seen)