...

Source file src/cmd/compile/internal/walk/builtin.go

Documentation: cmd/compile/internal/walk

     1  // Copyright 2009 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package walk
     6  
     7  import (
     8  	"fmt"
     9  	"go/constant"
    10  	"go/token"
    11  	"internal/abi"
    12  	"strings"
    13  
    14  	"cmd/compile/internal/base"
    15  	"cmd/compile/internal/escape"
    16  	"cmd/compile/internal/ir"
    17  	"cmd/compile/internal/reflectdata"
    18  	"cmd/compile/internal/typecheck"
    19  	"cmd/compile/internal/types"
    20  )
    21  
    22  // Rewrite append(src, x, y, z) so that any side effects in
    23  // x, y, z (including runtime panics) are evaluated in
    24  // initialization statements before the append.
    25  // For normal code generation, stop there and leave the
    26  // rest to ssagen.
    27  //
    28  // For race detector, expand append(src, a [, b]* ) to
    29  //
    30  //	init {
    31  //	  s := src
    32  //	  const argc = len(args) - 1
    33  //	  newLen := s.len + argc
    34  //	  if uint(newLen) <= uint(s.cap) {
    35  //	    s = s[:newLen]
    36  //	  } else {
    37  //	    s = growslice(s.ptr, newLen, s.cap, argc, elemType)
    38  //	  }
    39  //	  s[s.len - argc] = a
    40  //	  s[s.len - argc + 1] = b
    41  //	  ...
    42  //	}
    43  //	s
    44  func walkAppend(n *ir.CallExpr, init *ir.Nodes, dst ir.Node) ir.Node {
    45  	if !ir.SameSafeExpr(dst, n.Args[0]) {
    46  		n.Args[0] = safeExpr(n.Args[0], init)
    47  		n.Args[0] = walkExpr(n.Args[0], init)
    48  	}
    49  	walkExprListSafe(n.Args[1:], init)
    50  
    51  	nsrc := n.Args[0]
    52  
    53  	// walkExprListSafe will leave OINDEX (s[n]) alone if both s
    54  	// and n are name or literal, but those may index the slice we're
    55  	// modifying here. Fix explicitly.
    56  	// Using cheapExpr also makes sure that the evaluation
    57  	// of all arguments (and especially any panics) happen
    58  	// before we begin to modify the slice in a visible way.
    59  	ls := n.Args[1:]
    60  	for i, n := range ls {
    61  		n = cheapExpr(n, init)
    62  		if !types.Identical(n.Type(), nsrc.Type().Elem()) {
    63  			n = typecheck.AssignConv(n, nsrc.Type().Elem(), "append")
    64  			n = walkExpr(n, init)
    65  		}
    66  		ls[i] = n
    67  	}
    68  
    69  	argc := len(n.Args) - 1
    70  	if argc < 1 {
    71  		return nsrc
    72  	}
    73  
    74  	// General case, with no function calls left as arguments.
    75  	// Leave for ssagen, except that instrumentation requires the old form.
    76  	if !base.Flag.Cfg.Instrumenting || base.Flag.CompilingRuntime {
    77  		return n
    78  	}
    79  
    80  	var l []ir.Node
    81  
    82  	// s = slice to append to
    83  	s := typecheck.TempAt(base.Pos, ir.CurFunc, nsrc.Type())
    84  	l = append(l, ir.NewAssignStmt(base.Pos, s, nsrc))
    85  
    86  	// num = number of things to append
    87  	num := ir.NewInt(base.Pos, int64(argc))
    88  
    89  	// newLen := s.len + num
    90  	newLen := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TINT])
    91  	l = append(l, ir.NewAssignStmt(base.Pos, newLen, ir.NewBinaryExpr(base.Pos, ir.OADD, ir.NewUnaryExpr(base.Pos, ir.OLEN, s), num)))
    92  
    93  	// if uint(newLen) <= uint(s.cap)
    94  	nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
    95  	nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLE, typecheck.Conv(newLen, types.Types[types.TUINT]), typecheck.Conv(ir.NewUnaryExpr(base.Pos, ir.OCAP, s), types.Types[types.TUINT]))
    96  	nif.Likely = true
    97  
    98  	// then { s = s[:n] }
    99  	slice := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, nil, newLen, nil)
   100  	slice.SetBounded(true)
   101  	nif.Body = []ir.Node{
   102  		ir.NewAssignStmt(base.Pos, s, slice),
   103  	}
   104  
   105  	// else { s = growslice(s.ptr, n, s.cap, a, T) }
   106  	nif.Else = []ir.Node{
   107  		ir.NewAssignStmt(base.Pos, s, walkGrowslice(s, nif.PtrInit(),
   108  			ir.NewUnaryExpr(base.Pos, ir.OSPTR, s),
   109  			newLen,
   110  			ir.NewUnaryExpr(base.Pos, ir.OCAP, s),
   111  			num)),
   112  	}
   113  
   114  	l = append(l, nif)
   115  
   116  	ls = n.Args[1:]
   117  	for i, n := range ls {
   118  		// s[s.len-argc+i] = arg
   119  		ix := ir.NewIndexExpr(base.Pos, s, ir.NewBinaryExpr(base.Pos, ir.OSUB, newLen, ir.NewInt(base.Pos, int64(argc-i))))
   120  		ix.SetBounded(true)
   121  		l = append(l, ir.NewAssignStmt(base.Pos, ix, n))
   122  	}
   123  
   124  	typecheck.Stmts(l)
   125  	walkStmtList(l)
   126  	init.Append(l...)
   127  	return s
   128  }
   129  
   130  // growslice(ptr *T, newLen, oldCap, num int, <type>) (ret []T)
   131  func walkGrowslice(slice *ir.Name, init *ir.Nodes, oldPtr, newLen, oldCap, num ir.Node) *ir.CallExpr {
   132  	elemtype := slice.Type().Elem()
   133  	fn := typecheck.LookupRuntime("growslice", elemtype, elemtype)
   134  	elemtypeptr := reflectdata.TypePtrAt(base.Pos, elemtype)
   135  	return mkcall1(fn, slice.Type(), init, oldPtr, newLen, oldCap, num, elemtypeptr)
   136  }
   137  
   138  // walkClear walks an OCLEAR node.
   139  func walkClear(n *ir.UnaryExpr) ir.Node {
   140  	typ := n.X.Type()
   141  	switch {
   142  	case typ.IsSlice():
   143  		if n := arrayClear(n.X.Pos(), n.X, nil); n != nil {
   144  			return n
   145  		}
   146  		// If n == nil, we are clearing an array which takes zero memory, do nothing.
   147  		return ir.NewBlockStmt(n.Pos(), nil)
   148  	case typ.IsMap():
   149  		return mapClear(n.X, reflectdata.TypePtrAt(n.X.Pos(), n.X.Type()))
   150  	}
   151  	panic("unreachable")
   152  }
   153  
   154  // walkClose walks an OCLOSE node.
   155  func walkClose(n *ir.UnaryExpr, init *ir.Nodes) ir.Node {
   156  	return mkcall1(chanfn("closechan", 1, n.X.Type()), nil, init, n.X)
   157  }
   158  
   159  // Lower copy(a, b) to a memmove call or a runtime call.
   160  //
   161  //	init {
   162  //	  n := len(a)
   163  //	  if n > len(b) { n = len(b) }
   164  //	  if a.ptr != b.ptr { memmove(a.ptr, b.ptr, n*sizeof(elem(a))) }
   165  //	}
   166  //	n;
   167  //
   168  // Also works if b is a string.
   169  func walkCopy(n *ir.BinaryExpr, init *ir.Nodes, runtimecall bool) ir.Node {
   170  	if n.X.Type().Elem().HasPointers() {
   171  		ir.CurFunc.SetWBPos(n.Pos())
   172  		fn := writebarrierfn("typedslicecopy", n.X.Type().Elem(), n.Y.Type().Elem())
   173  		n.X = cheapExpr(n.X, init)
   174  		ptrL, lenL := backingArrayPtrLen(n.X)
   175  		n.Y = cheapExpr(n.Y, init)
   176  		ptrR, lenR := backingArrayPtrLen(n.Y)
   177  		return mkcall1(fn, n.Type(), init, reflectdata.CopyElemRType(base.Pos, n), ptrL, lenL, ptrR, lenR)
   178  	}
   179  
   180  	if runtimecall {
   181  		// rely on runtime to instrument:
   182  		//  copy(n.Left, n.Right)
   183  		// n.Right can be a slice or string.
   184  
   185  		n.X = cheapExpr(n.X, init)
   186  		ptrL, lenL := backingArrayPtrLen(n.X)
   187  		n.Y = cheapExpr(n.Y, init)
   188  		ptrR, lenR := backingArrayPtrLen(n.Y)
   189  
   190  		fn := typecheck.LookupRuntime("slicecopy", ptrL.Type().Elem(), ptrR.Type().Elem())
   191  
   192  		return mkcall1(fn, n.Type(), init, ptrL, lenL, ptrR, lenR, ir.NewInt(base.Pos, n.X.Type().Elem().Size()))
   193  	}
   194  
   195  	n.X = walkExpr(n.X, init)
   196  	n.Y = walkExpr(n.Y, init)
   197  	nl := typecheck.TempAt(base.Pos, ir.CurFunc, n.X.Type())
   198  	nr := typecheck.TempAt(base.Pos, ir.CurFunc, n.Y.Type())
   199  	var l []ir.Node
   200  	l = append(l, ir.NewAssignStmt(base.Pos, nl, n.X))
   201  	l = append(l, ir.NewAssignStmt(base.Pos, nr, n.Y))
   202  
   203  	nfrm := ir.NewUnaryExpr(base.Pos, ir.OSPTR, nr)
   204  	nto := ir.NewUnaryExpr(base.Pos, ir.OSPTR, nl)
   205  
   206  	nlen := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TINT])
   207  
   208  	// n = len(to)
   209  	l = append(l, ir.NewAssignStmt(base.Pos, nlen, ir.NewUnaryExpr(base.Pos, ir.OLEN, nl)))
   210  
   211  	// if n > len(frm) { n = len(frm) }
   212  	nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
   213  
   214  	nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OGT, nlen, ir.NewUnaryExpr(base.Pos, ir.OLEN, nr))
   215  	nif.Body.Append(ir.NewAssignStmt(base.Pos, nlen, ir.NewUnaryExpr(base.Pos, ir.OLEN, nr)))
   216  	l = append(l, nif)
   217  
   218  	// if to.ptr != frm.ptr { memmove( ... ) }
   219  	ne := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.ONE, nto, nfrm), nil, nil)
   220  	ne.Likely = true
   221  	l = append(l, ne)
   222  
   223  	fn := typecheck.LookupRuntime("memmove", nl.Type().Elem(), nl.Type().Elem())
   224  	nwid := ir.Node(typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TUINTPTR]))
   225  	setwid := ir.NewAssignStmt(base.Pos, nwid, typecheck.Conv(nlen, types.Types[types.TUINTPTR]))
   226  	ne.Body.Append(setwid)
   227  	nwid = ir.NewBinaryExpr(base.Pos, ir.OMUL, nwid, ir.NewInt(base.Pos, nl.Type().Elem().Size()))
   228  	call := mkcall1(fn, nil, init, nto, nfrm, nwid)
   229  	ne.Body.Append(call)
   230  
   231  	typecheck.Stmts(l)
   232  	walkStmtList(l)
   233  	init.Append(l...)
   234  	return nlen
   235  }
   236  
   237  // walkDelete walks an ODELETE node.
   238  func walkDelete(init *ir.Nodes, n *ir.CallExpr) ir.Node {
   239  	init.Append(ir.TakeInit(n)...)
   240  	map_ := n.Args[0]
   241  	key := n.Args[1]
   242  	map_ = walkExpr(map_, init)
   243  	key = walkExpr(key, init)
   244  
   245  	t := map_.Type()
   246  	fast := mapfast(t)
   247  	key = mapKeyArg(fast, n, key, false)
   248  	return mkcall1(mapfndel(mapdelete[fast], t), nil, init, reflectdata.DeleteMapRType(base.Pos, n), map_, key)
   249  }
   250  
   251  // walkLenCap walks an OLEN or OCAP node.
   252  func walkLenCap(n *ir.UnaryExpr, init *ir.Nodes) ir.Node {
   253  	if isRuneCount(n) {
   254  		// Replace len([]rune(string)) with runtime.countrunes(string).
   255  		return mkcall("countrunes", n.Type(), init, typecheck.Conv(n.X.(*ir.ConvExpr).X, types.Types[types.TSTRING]))
   256  	}
   257  	if isByteCount(n) {
   258  		conv := n.X.(*ir.ConvExpr)
   259  		walkStmtList(conv.Init())
   260  		init.Append(ir.TakeInit(conv)...)
   261  		_, len := backingArrayPtrLen(cheapExpr(conv.X, init))
   262  		return len
   263  	}
   264  	if isChanLenCap(n) {
   265  		name := "chanlen"
   266  		if n.Op() == ir.OCAP {
   267  			name = "chancap"
   268  		}
   269  		// cannot use chanfn - closechan takes any, not chan any,
   270  		// because it accepts both send-only and recv-only channels.
   271  		fn := typecheck.LookupRuntime(name, n.X.Type())
   272  		return mkcall1(fn, n.Type(), init, n.X)
   273  	}
   274  
   275  	n.X = walkExpr(n.X, init)
   276  
   277  	// replace len(*[10]int) with 10.
   278  	// delayed until now to preserve side effects.
   279  	t := n.X.Type()
   280  
   281  	if t.IsPtr() {
   282  		t = t.Elem()
   283  	}
   284  	if t.IsArray() {
   285  		safeExpr(n.X, init)
   286  		con := ir.NewConstExpr(constant.MakeInt64(t.NumElem()), n)
   287  		con.SetTypecheck(1)
   288  		return con
   289  	}
   290  	return n
   291  }
   292  
   293  // walkMakeChan walks an OMAKECHAN node.
   294  func walkMakeChan(n *ir.MakeExpr, init *ir.Nodes) ir.Node {
   295  	// When size fits into int, use makechan instead of
   296  	// makechan64, which is faster and shorter on 32 bit platforms.
   297  	size := n.Len
   298  	fnname := "makechan64"
   299  	argtype := types.Types[types.TINT64]
   300  
   301  	// Type checking guarantees that TIDEAL size is positive and fits in an int.
   302  	// The case of size overflow when converting TUINT or TUINTPTR to TINT
   303  	// will be handled by the negative range checks in makechan during runtime.
   304  	if size.Type().IsKind(types.TIDEAL) || size.Type().Size() <= types.Types[types.TUINT].Size() {
   305  		fnname = "makechan"
   306  		argtype = types.Types[types.TINT]
   307  	}
   308  
   309  	return mkcall1(chanfn(fnname, 1, n.Type()), n.Type(), init, reflectdata.MakeChanRType(base.Pos, n), typecheck.Conv(size, argtype))
   310  }
   311  
   312  // walkMakeMap walks an OMAKEMAP node.
   313  func walkMakeMap(n *ir.MakeExpr, init *ir.Nodes) ir.Node {
   314  	t := n.Type()
   315  	hmapType := reflectdata.MapType()
   316  	hint := n.Len
   317  
   318  	// var h *hmap
   319  	var h ir.Node
   320  	if n.Esc() == ir.EscNone {
   321  		// Allocate hmap on stack.
   322  
   323  		// var hv hmap
   324  		// h = &hv
   325  		h = stackTempAddr(init, hmapType)
   326  
   327  		// Allocate one bucket pointed to by hmap.buckets on stack if hint
   328  		// is not larger than BUCKETSIZE. In case hint is larger than
   329  		// BUCKETSIZE runtime.makemap will allocate the buckets on the heap.
   330  		// Maximum key and elem size is 128 bytes, larger objects
   331  		// are stored with an indirection. So max bucket size is 2048+eps.
   332  		if !ir.IsConst(hint, constant.Int) ||
   333  			constant.Compare(hint.Val(), token.LEQ, constant.MakeInt64(abi.MapBucketCount)) {
   334  
   335  			// In case hint is larger than BUCKETSIZE runtime.makemap
   336  			// will allocate the buckets on the heap, see #20184
   337  			//
   338  			// if hint <= BUCKETSIZE {
   339  			//     var bv bmap
   340  			//     b = &bv
   341  			//     h.buckets = b
   342  			// }
   343  
   344  			nif := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OLE, hint, ir.NewInt(base.Pos, abi.MapBucketCount)), nil, nil)
   345  			nif.Likely = true
   346  
   347  			// var bv bmap
   348  			// b = &bv
   349  			b := stackTempAddr(&nif.Body, reflectdata.MapBucketType(t))
   350  
   351  			// h.buckets = b
   352  			bsym := hmapType.Field(5).Sym // hmap.buckets see reflect.go:hmap
   353  			na := ir.NewAssignStmt(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, h, bsym), typecheck.ConvNop(b, types.Types[types.TUNSAFEPTR]))
   354  			nif.Body.Append(na)
   355  			appendWalkStmt(init, nif)
   356  		}
   357  	}
   358  
   359  	if ir.IsConst(hint, constant.Int) && constant.Compare(hint.Val(), token.LEQ, constant.MakeInt64(abi.MapBucketCount)) {
   360  		// Handling make(map[any]any) and
   361  		// make(map[any]any, hint) where hint <= BUCKETSIZE
   362  		// special allows for faster map initialization and
   363  		// improves binary size by using calls with fewer arguments.
   364  		// For hint <= BUCKETSIZE overLoadFactor(hint, 0) is false
   365  		// and no buckets will be allocated by makemap. Therefore,
   366  		// no buckets need to be allocated in this code path.
   367  		if n.Esc() == ir.EscNone {
   368  			// Only need to initialize h.hash0 since
   369  			// hmap h has been allocated on the stack already.
   370  			// h.hash0 = rand32()
   371  			rand := mkcall("rand32", types.Types[types.TUINT32], init)
   372  			hashsym := hmapType.Field(4).Sym // hmap.hash0 see reflect.go:hmap
   373  			appendWalkStmt(init, ir.NewAssignStmt(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, h, hashsym), rand))
   374  			return typecheck.ConvNop(h, t)
   375  		}
   376  		// Call runtime.makehmap to allocate an
   377  		// hmap on the heap and initialize hmap's hash0 field.
   378  		fn := typecheck.LookupRuntime("makemap_small", t.Key(), t.Elem())
   379  		return mkcall1(fn, n.Type(), init)
   380  	}
   381  
   382  	if n.Esc() != ir.EscNone {
   383  		h = typecheck.NodNil()
   384  	}
   385  	// Map initialization with a variable or large hint is
   386  	// more complicated. We therefore generate a call to
   387  	// runtime.makemap to initialize hmap and allocate the
   388  	// map buckets.
   389  
   390  	// When hint fits into int, use makemap instead of
   391  	// makemap64, which is faster and shorter on 32 bit platforms.
   392  	fnname := "makemap64"
   393  	argtype := types.Types[types.TINT64]
   394  
   395  	// Type checking guarantees that TIDEAL hint is positive and fits in an int.
   396  	// See checkmake call in TMAP case of OMAKE case in OpSwitch in typecheck1 function.
   397  	// The case of hint overflow when converting TUINT or TUINTPTR to TINT
   398  	// will be handled by the negative range checks in makemap during runtime.
   399  	if hint.Type().IsKind(types.TIDEAL) || hint.Type().Size() <= types.Types[types.TUINT].Size() {
   400  		fnname = "makemap"
   401  		argtype = types.Types[types.TINT]
   402  	}
   403  
   404  	fn := typecheck.LookupRuntime(fnname, hmapType, t.Key(), t.Elem())
   405  	return mkcall1(fn, n.Type(), init, reflectdata.MakeMapRType(base.Pos, n), typecheck.Conv(hint, argtype), h)
   406  }
   407  
   408  // walkMakeSlice walks an OMAKESLICE node.
   409  func walkMakeSlice(n *ir.MakeExpr, init *ir.Nodes) ir.Node {
   410  	l := n.Len
   411  	r := n.Cap
   412  	if r == nil {
   413  		r = safeExpr(l, init)
   414  		l = r
   415  	}
   416  	t := n.Type()
   417  	if t.Elem().NotInHeap() {
   418  		base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", t.Elem())
   419  	}
   420  	if n.Esc() == ir.EscNone {
   421  		if why := escape.HeapAllocReason(n); why != "" {
   422  			base.Fatalf("%v has EscNone, but %v", n, why)
   423  		}
   424  		// var arr [r]T
   425  		// n = arr[:l]
   426  		i := typecheck.IndexConst(r)
   427  		if i < 0 {
   428  			base.Fatalf("walkExpr: invalid index %v", r)
   429  		}
   430  
   431  		// cap is constrained to [0,2^31) or [0,2^63) depending on whether
   432  		// we're in 32-bit or 64-bit systems. So it's safe to do:
   433  		//
   434  		// if uint64(len) > cap {
   435  		//     if len < 0 { panicmakeslicelen() }
   436  		//     panicmakeslicecap()
   437  		// }
   438  		nif := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OGT, typecheck.Conv(l, types.Types[types.TUINT64]), ir.NewInt(base.Pos, i)), nil, nil)
   439  		niflen := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OLT, l, ir.NewInt(base.Pos, 0)), nil, nil)
   440  		niflen.Body = []ir.Node{mkcall("panicmakeslicelen", nil, init)}
   441  		nif.Body.Append(niflen, mkcall("panicmakeslicecap", nil, init))
   442  		init.Append(typecheck.Stmt(nif))
   443  
   444  		t = types.NewArray(t.Elem(), i) // [r]T
   445  		var_ := typecheck.TempAt(base.Pos, ir.CurFunc, t)
   446  		appendWalkStmt(init, ir.NewAssignStmt(base.Pos, var_, nil))  // zero temp
   447  		r := ir.NewSliceExpr(base.Pos, ir.OSLICE, var_, nil, l, nil) // arr[:l]
   448  		// The conv is necessary in case n.Type is named.
   449  		return walkExpr(typecheck.Expr(typecheck.Conv(r, n.Type())), init)
   450  	}
   451  
   452  	// n escapes; set up a call to makeslice.
   453  	// When len and cap can fit into int, use makeslice instead of
   454  	// makeslice64, which is faster and shorter on 32 bit platforms.
   455  
   456  	len, cap := l, r
   457  
   458  	fnname := "makeslice64"
   459  	argtype := types.Types[types.TINT64]
   460  
   461  	// Type checking guarantees that TIDEAL len/cap are positive and fit in an int.
   462  	// The case of len or cap overflow when converting TUINT or TUINTPTR to TINT
   463  	// will be handled by the negative range checks in makeslice during runtime.
   464  	if (len.Type().IsKind(types.TIDEAL) || len.Type().Size() <= types.Types[types.TUINT].Size()) &&
   465  		(cap.Type().IsKind(types.TIDEAL) || cap.Type().Size() <= types.Types[types.TUINT].Size()) {
   466  		fnname = "makeslice"
   467  		argtype = types.Types[types.TINT]
   468  	}
   469  	fn := typecheck.LookupRuntime(fnname)
   470  	ptr := mkcall1(fn, types.Types[types.TUNSAFEPTR], init, reflectdata.MakeSliceElemRType(base.Pos, n), typecheck.Conv(len, argtype), typecheck.Conv(cap, argtype))
   471  	ptr.MarkNonNil()
   472  	len = typecheck.Conv(len, types.Types[types.TINT])
   473  	cap = typecheck.Conv(cap, types.Types[types.TINT])
   474  	sh := ir.NewSliceHeaderExpr(base.Pos, t, ptr, len, cap)
   475  	return walkExpr(typecheck.Expr(sh), init)
   476  }
   477  
   478  // walkMakeSliceCopy walks an OMAKESLICECOPY node.
   479  func walkMakeSliceCopy(n *ir.MakeExpr, init *ir.Nodes) ir.Node {
   480  	if n.Esc() == ir.EscNone {
   481  		base.Fatalf("OMAKESLICECOPY with EscNone: %v", n)
   482  	}
   483  
   484  	t := n.Type()
   485  	if t.Elem().NotInHeap() {
   486  		base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", t.Elem())
   487  	}
   488  
   489  	length := typecheck.Conv(n.Len, types.Types[types.TINT])
   490  	copylen := ir.NewUnaryExpr(base.Pos, ir.OLEN, n.Cap)
   491  	copyptr := ir.NewUnaryExpr(base.Pos, ir.OSPTR, n.Cap)
   492  
   493  	if !t.Elem().HasPointers() && n.Bounded() {
   494  		// When len(to)==len(from) and elements have no pointers:
   495  		// replace make+copy with runtime.mallocgc+runtime.memmove.
   496  
   497  		// We do not check for overflow of len(to)*elem.Width here
   498  		// since len(from) is an existing checked slice capacity
   499  		// with same elem.Width for the from slice.
   500  		size := ir.NewBinaryExpr(base.Pos, ir.OMUL, typecheck.Conv(length, types.Types[types.TUINTPTR]), typecheck.Conv(ir.NewInt(base.Pos, t.Elem().Size()), types.Types[types.TUINTPTR]))
   501  
   502  		// instantiate mallocgc(size uintptr, typ *byte, needszero bool) unsafe.Pointer
   503  		fn := typecheck.LookupRuntime("mallocgc")
   504  		ptr := mkcall1(fn, types.Types[types.TUNSAFEPTR], init, size, typecheck.NodNil(), ir.NewBool(base.Pos, false))
   505  		ptr.MarkNonNil()
   506  		sh := ir.NewSliceHeaderExpr(base.Pos, t, ptr, length, length)
   507  
   508  		s := typecheck.TempAt(base.Pos, ir.CurFunc, t)
   509  		r := typecheck.Stmt(ir.NewAssignStmt(base.Pos, s, sh))
   510  		r = walkExpr(r, init)
   511  		init.Append(r)
   512  
   513  		// instantiate memmove(to *any, frm *any, size uintptr)
   514  		fn = typecheck.LookupRuntime("memmove", t.Elem(), t.Elem())
   515  		ncopy := mkcall1(fn, nil, init, ir.NewUnaryExpr(base.Pos, ir.OSPTR, s), copyptr, size)
   516  		init.Append(walkExpr(typecheck.Stmt(ncopy), init))
   517  
   518  		return s
   519  	}
   520  	// Replace make+copy with runtime.makeslicecopy.
   521  	// instantiate makeslicecopy(typ *byte, tolen int, fromlen int, from unsafe.Pointer) unsafe.Pointer
   522  	fn := typecheck.LookupRuntime("makeslicecopy")
   523  	ptr := mkcall1(fn, types.Types[types.TUNSAFEPTR], init, reflectdata.MakeSliceElemRType(base.Pos, n), length, copylen, typecheck.Conv(copyptr, types.Types[types.TUNSAFEPTR]))
   524  	ptr.MarkNonNil()
   525  	sh := ir.NewSliceHeaderExpr(base.Pos, t, ptr, length, length)
   526  	return walkExpr(typecheck.Expr(sh), init)
   527  }
   528  
   529  // walkNew walks an ONEW node.
   530  func walkNew(n *ir.UnaryExpr, init *ir.Nodes) ir.Node {
   531  	t := n.Type().Elem()
   532  	if t.NotInHeap() {
   533  		base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", n.Type().Elem())
   534  	}
   535  	if n.Esc() == ir.EscNone {
   536  		if t.Size() > ir.MaxImplicitStackVarSize {
   537  			base.Fatalf("large ONEW with EscNone: %v", n)
   538  		}
   539  		return stackTempAddr(init, t)
   540  	}
   541  	types.CalcSize(t)
   542  	n.MarkNonNil()
   543  	return n
   544  }
   545  
   546  func walkMinMax(n *ir.CallExpr, init *ir.Nodes) ir.Node {
   547  	init.Append(ir.TakeInit(n)...)
   548  	walkExprList(n.Args, init)
   549  	return n
   550  }
   551  
   552  // generate code for print.
   553  func walkPrint(nn *ir.CallExpr, init *ir.Nodes) ir.Node {
   554  	// Hoist all the argument evaluation up before the lock.
   555  	walkExprListCheap(nn.Args, init)
   556  
   557  	// For println, add " " between elements and "\n" at the end.
   558  	if nn.Op() == ir.OPRINTLN {
   559  		s := nn.Args
   560  		t := make([]ir.Node, 0, len(s)*2)
   561  		for i, n := range s {
   562  			if i != 0 {
   563  				t = append(t, ir.NewString(base.Pos, " "))
   564  			}
   565  			t = append(t, n)
   566  		}
   567  		t = append(t, ir.NewString(base.Pos, "\n"))
   568  		nn.Args = t
   569  	}
   570  
   571  	// Collapse runs of constant strings.
   572  	s := nn.Args
   573  	t := make([]ir.Node, 0, len(s))
   574  	for i := 0; i < len(s); {
   575  		var strs []string
   576  		for i < len(s) && ir.IsConst(s[i], constant.String) {
   577  			strs = append(strs, ir.StringVal(s[i]))
   578  			i++
   579  		}
   580  		if len(strs) > 0 {
   581  			t = append(t, ir.NewString(base.Pos, strings.Join(strs, "")))
   582  		}
   583  		if i < len(s) {
   584  			t = append(t, s[i])
   585  			i++
   586  		}
   587  	}
   588  	nn.Args = t
   589  
   590  	calls := []ir.Node{mkcall("printlock", nil, init)}
   591  	for i, n := range nn.Args {
   592  		if n.Op() == ir.OLITERAL {
   593  			if n.Type() == types.UntypedRune {
   594  				n = typecheck.DefaultLit(n, types.RuneType)
   595  			}
   596  
   597  			switch n.Val().Kind() {
   598  			case constant.Int:
   599  				n = typecheck.DefaultLit(n, types.Types[types.TINT64])
   600  
   601  			case constant.Float:
   602  				n = typecheck.DefaultLit(n, types.Types[types.TFLOAT64])
   603  			}
   604  		}
   605  
   606  		if n.Op() != ir.OLITERAL && n.Type() != nil && n.Type().Kind() == types.TIDEAL {
   607  			n = typecheck.DefaultLit(n, types.Types[types.TINT64])
   608  		}
   609  		n = typecheck.DefaultLit(n, nil)
   610  		nn.Args[i] = n
   611  		if n.Type() == nil || n.Type().Kind() == types.TFORW {
   612  			continue
   613  		}
   614  
   615  		var on *ir.Name
   616  		switch n.Type().Kind() {
   617  		case types.TINTER:
   618  			if n.Type().IsEmptyInterface() {
   619  				on = typecheck.LookupRuntime("printeface", n.Type())
   620  			} else {
   621  				on = typecheck.LookupRuntime("printiface", n.Type())
   622  			}
   623  		case types.TPTR:
   624  			if n.Type().Elem().NotInHeap() {
   625  				on = typecheck.LookupRuntime("printuintptr")
   626  				n = ir.NewConvExpr(base.Pos, ir.OCONV, nil, n)
   627  				n.SetType(types.Types[types.TUNSAFEPTR])
   628  				n = ir.NewConvExpr(base.Pos, ir.OCONV, nil, n)
   629  				n.SetType(types.Types[types.TUINTPTR])
   630  				break
   631  			}
   632  			fallthrough
   633  		case types.TCHAN, types.TMAP, types.TFUNC, types.TUNSAFEPTR:
   634  			on = typecheck.LookupRuntime("printpointer", n.Type())
   635  		case types.TSLICE:
   636  			on = typecheck.LookupRuntime("printslice", n.Type())
   637  		case types.TUINT, types.TUINT8, types.TUINT16, types.TUINT32, types.TUINT64, types.TUINTPTR:
   638  			if types.RuntimeSymName(n.Type().Sym()) == "hex" {
   639  				on = typecheck.LookupRuntime("printhex")
   640  			} else {
   641  				on = typecheck.LookupRuntime("printuint")
   642  			}
   643  		case types.TINT, types.TINT8, types.TINT16, types.TINT32, types.TINT64:
   644  			on = typecheck.LookupRuntime("printint")
   645  		case types.TFLOAT32, types.TFLOAT64:
   646  			on = typecheck.LookupRuntime("printfloat")
   647  		case types.TCOMPLEX64, types.TCOMPLEX128:
   648  			on = typecheck.LookupRuntime("printcomplex")
   649  		case types.TBOOL:
   650  			on = typecheck.LookupRuntime("printbool")
   651  		case types.TSTRING:
   652  			cs := ""
   653  			if ir.IsConst(n, constant.String) {
   654  				cs = ir.StringVal(n)
   655  			}
   656  			switch cs {
   657  			case " ":
   658  				on = typecheck.LookupRuntime("printsp")
   659  			case "\n":
   660  				on = typecheck.LookupRuntime("printnl")
   661  			default:
   662  				on = typecheck.LookupRuntime("printstring")
   663  			}
   664  		default:
   665  			badtype(ir.OPRINT, n.Type(), nil)
   666  			continue
   667  		}
   668  
   669  		r := ir.NewCallExpr(base.Pos, ir.OCALL, on, nil)
   670  		if params := on.Type().Params(); len(params) > 0 {
   671  			t := params[0].Type
   672  			n = typecheck.Conv(n, t)
   673  			r.Args.Append(n)
   674  		}
   675  		calls = append(calls, r)
   676  	}
   677  
   678  	calls = append(calls, mkcall("printunlock", nil, init))
   679  
   680  	typecheck.Stmts(calls)
   681  	walkExprList(calls, init)
   682  
   683  	r := ir.NewBlockStmt(base.Pos, nil)
   684  	r.List = calls
   685  	return walkStmt(typecheck.Stmt(r))
   686  }
   687  
   688  // walkRecoverFP walks an ORECOVERFP node.
   689  func walkRecoverFP(nn *ir.CallExpr, init *ir.Nodes) ir.Node {
   690  	return mkcall("gorecover", nn.Type(), init, walkExpr(nn.Args[0], init))
   691  }
   692  
   693  // walkUnsafeData walks an OUNSAFESLICEDATA or OUNSAFESTRINGDATA expression.
   694  func walkUnsafeData(n *ir.UnaryExpr, init *ir.Nodes) ir.Node {
   695  	slice := walkExpr(n.X, init)
   696  	res := typecheck.Expr(ir.NewUnaryExpr(n.Pos(), ir.OSPTR, slice))
   697  	res.SetType(n.Type())
   698  	return walkExpr(res, init)
   699  }
   700  
   701  func walkUnsafeSlice(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
   702  	ptr := safeExpr(n.X, init)
   703  	len := safeExpr(n.Y, init)
   704  	sliceType := n.Type()
   705  
   706  	lenType := types.Types[types.TINT64]
   707  	unsafePtr := typecheck.Conv(ptr, types.Types[types.TUNSAFEPTR])
   708  
   709  	// If checkptr enabled, call runtime.unsafeslicecheckptr to check ptr and len.
   710  	// for simplicity, unsafeslicecheckptr always uses int64.
   711  	// Type checking guarantees that TIDEAL len/cap are positive and fit in an int.
   712  	// The case of len or cap overflow when converting TUINT or TUINTPTR to TINT
   713  	// will be handled by the negative range checks in unsafeslice during runtime.
   714  	if ir.ShouldCheckPtr(ir.CurFunc, 1) {
   715  		fnname := "unsafeslicecheckptr"
   716  		fn := typecheck.LookupRuntime(fnname)
   717  		init.Append(mkcall1(fn, nil, init, reflectdata.UnsafeSliceElemRType(base.Pos, n), unsafePtr, typecheck.Conv(len, lenType)))
   718  	} else {
   719  		// Otherwise, open code unsafe.Slice to prevent runtime call overhead.
   720  		// Keep this code in sync with runtime.unsafeslice{,64}
   721  		if len.Type().IsKind(types.TIDEAL) || len.Type().Size() <= types.Types[types.TUINT].Size() {
   722  			lenType = types.Types[types.TINT]
   723  		} else {
   724  			// len64 := int64(len)
   725  			// if int64(int(len64)) != len64 {
   726  			//     panicunsafeslicelen()
   727  			// }
   728  			len64 := typecheck.Conv(len, lenType)
   729  			nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
   730  			nif.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, typecheck.Conv(typecheck.Conv(len64, types.Types[types.TINT]), lenType), len64)
   731  			nif.Body.Append(mkcall("panicunsafeslicelen", nil, &nif.Body))
   732  			appendWalkStmt(init, nif)
   733  		}
   734  
   735  		// if len < 0 { panicunsafeslicelen() }
   736  		nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
   737  		nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, typecheck.Conv(len, lenType), ir.NewInt(base.Pos, 0))
   738  		nif.Body.Append(mkcall("panicunsafeslicelen", nil, &nif.Body))
   739  		appendWalkStmt(init, nif)
   740  
   741  		if sliceType.Elem().Size() == 0 {
   742  			// if ptr == nil && len > 0  {
   743  			//      panicunsafesliceptrnil()
   744  			// }
   745  			nifPtr := ir.NewIfStmt(base.Pos, nil, nil, nil)
   746  			isNil := ir.NewBinaryExpr(base.Pos, ir.OEQ, unsafePtr, typecheck.NodNil())
   747  			gtZero := ir.NewBinaryExpr(base.Pos, ir.OGT, typecheck.Conv(len, lenType), ir.NewInt(base.Pos, 0))
   748  			nifPtr.Cond =
   749  				ir.NewLogicalExpr(base.Pos, ir.OANDAND, isNil, gtZero)
   750  			nifPtr.Body.Append(mkcall("panicunsafeslicenilptr", nil, &nifPtr.Body))
   751  			appendWalkStmt(init, nifPtr)
   752  
   753  			h := ir.NewSliceHeaderExpr(n.Pos(), sliceType,
   754  				typecheck.Conv(ptr, types.Types[types.TUNSAFEPTR]),
   755  				typecheck.Conv(len, types.Types[types.TINT]),
   756  				typecheck.Conv(len, types.Types[types.TINT]))
   757  			return walkExpr(typecheck.Expr(h), init)
   758  		}
   759  
   760  		// mem, overflow := math.mulUintptr(et.size, len)
   761  		mem := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TUINTPTR])
   762  		overflow := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TBOOL])
   763  
   764  		decl := types.NewSignature(nil,
   765  			[]*types.Field{
   766  				types.NewField(base.Pos, nil, types.Types[types.TUINTPTR]),
   767  				types.NewField(base.Pos, nil, types.Types[types.TUINTPTR]),
   768  			},
   769  			[]*types.Field{
   770  				types.NewField(base.Pos, nil, types.Types[types.TUINTPTR]),
   771  				types.NewField(base.Pos, nil, types.Types[types.TBOOL]),
   772  			})
   773  
   774  		fn := ir.NewFunc(n.Pos(), n.Pos(), math_MulUintptr, decl)
   775  
   776  		call := mkcall1(fn.Nname, fn.Type().ResultsTuple(), init, ir.NewInt(base.Pos, sliceType.Elem().Size()), typecheck.Conv(typecheck.Conv(len, lenType), types.Types[types.TUINTPTR]))
   777  		appendWalkStmt(init, ir.NewAssignListStmt(base.Pos, ir.OAS2, []ir.Node{mem, overflow}, []ir.Node{call}))
   778  
   779  		// if overflow || mem > -uintptr(ptr) {
   780  		//     if ptr == nil {
   781  		//         panicunsafesliceptrnil()
   782  		//     }
   783  		//     panicunsafeslicelen()
   784  		// }
   785  		nif = ir.NewIfStmt(base.Pos, nil, nil, nil)
   786  		memCond := ir.NewBinaryExpr(base.Pos, ir.OGT, mem, ir.NewUnaryExpr(base.Pos, ir.ONEG, typecheck.Conv(unsafePtr, types.Types[types.TUINTPTR])))
   787  		nif.Cond = ir.NewLogicalExpr(base.Pos, ir.OOROR, overflow, memCond)
   788  		nifPtr := ir.NewIfStmt(base.Pos, nil, nil, nil)
   789  		nifPtr.Cond = ir.NewBinaryExpr(base.Pos, ir.OEQ, unsafePtr, typecheck.NodNil())
   790  		nifPtr.Body.Append(mkcall("panicunsafeslicenilptr", nil, &nifPtr.Body))
   791  		nif.Body.Append(nifPtr, mkcall("panicunsafeslicelen", nil, &nif.Body))
   792  		appendWalkStmt(init, nif)
   793  	}
   794  
   795  	h := ir.NewSliceHeaderExpr(n.Pos(), sliceType,
   796  		typecheck.Conv(ptr, types.Types[types.TUNSAFEPTR]),
   797  		typecheck.Conv(len, types.Types[types.TINT]),
   798  		typecheck.Conv(len, types.Types[types.TINT]))
   799  	return walkExpr(typecheck.Expr(h), init)
   800  }
   801  
   802  var math_MulUintptr = &types.Sym{Pkg: types.NewPkg("runtime/internal/math", "math"), Name: "MulUintptr"}
   803  
   804  func walkUnsafeString(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
   805  	ptr := safeExpr(n.X, init)
   806  	len := safeExpr(n.Y, init)
   807  
   808  	lenType := types.Types[types.TINT64]
   809  	unsafePtr := typecheck.Conv(ptr, types.Types[types.TUNSAFEPTR])
   810  
   811  	// If checkptr enabled, call runtime.unsafestringcheckptr to check ptr and len.
   812  	// for simplicity, unsafestringcheckptr always uses int64.
   813  	// Type checking guarantees that TIDEAL len are positive and fit in an int.
   814  	if ir.ShouldCheckPtr(ir.CurFunc, 1) {
   815  		fnname := "unsafestringcheckptr"
   816  		fn := typecheck.LookupRuntime(fnname)
   817  		init.Append(mkcall1(fn, nil, init, unsafePtr, typecheck.Conv(len, lenType)))
   818  	} else {
   819  		// Otherwise, open code unsafe.String to prevent runtime call overhead.
   820  		// Keep this code in sync with runtime.unsafestring{,64}
   821  		if len.Type().IsKind(types.TIDEAL) || len.Type().Size() <= types.Types[types.TUINT].Size() {
   822  			lenType = types.Types[types.TINT]
   823  		} else {
   824  			// len64 := int64(len)
   825  			// if int64(int(len64)) != len64 {
   826  			//     panicunsafestringlen()
   827  			// }
   828  			len64 := typecheck.Conv(len, lenType)
   829  			nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
   830  			nif.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, typecheck.Conv(typecheck.Conv(len64, types.Types[types.TINT]), lenType), len64)
   831  			nif.Body.Append(mkcall("panicunsafestringlen", nil, &nif.Body))
   832  			appendWalkStmt(init, nif)
   833  		}
   834  
   835  		// if len < 0 { panicunsafestringlen() }
   836  		nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
   837  		nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, typecheck.Conv(len, lenType), ir.NewInt(base.Pos, 0))
   838  		nif.Body.Append(mkcall("panicunsafestringlen", nil, &nif.Body))
   839  		appendWalkStmt(init, nif)
   840  
   841  		// if uintpr(len) > -uintptr(ptr) {
   842  		//    if ptr == nil {
   843  		//       panicunsafestringnilptr()
   844  		//    }
   845  		//    panicunsafeslicelen()
   846  		// }
   847  		nifLen := ir.NewIfStmt(base.Pos, nil, nil, nil)
   848  		nifLen.Cond = ir.NewBinaryExpr(base.Pos, ir.OGT, typecheck.Conv(len, types.Types[types.TUINTPTR]), ir.NewUnaryExpr(base.Pos, ir.ONEG, typecheck.Conv(unsafePtr, types.Types[types.TUINTPTR])))
   849  		nifPtr := ir.NewIfStmt(base.Pos, nil, nil, nil)
   850  		nifPtr.Cond = ir.NewBinaryExpr(base.Pos, ir.OEQ, unsafePtr, typecheck.NodNil())
   851  		nifPtr.Body.Append(mkcall("panicunsafestringnilptr", nil, &nifPtr.Body))
   852  		nifLen.Body.Append(nifPtr, mkcall("panicunsafestringlen", nil, &nifLen.Body))
   853  		appendWalkStmt(init, nifLen)
   854  	}
   855  	h := ir.NewStringHeaderExpr(n.Pos(),
   856  		typecheck.Conv(ptr, types.Types[types.TUNSAFEPTR]),
   857  		typecheck.Conv(len, types.Types[types.TINT]),
   858  	)
   859  	return walkExpr(typecheck.Expr(h), init)
   860  }
   861  
   862  func badtype(op ir.Op, tl, tr *types.Type) {
   863  	var s string
   864  	if tl != nil {
   865  		s += fmt.Sprintf("\n\t%v", tl)
   866  	}
   867  	if tr != nil {
   868  		s += fmt.Sprintf("\n\t%v", tr)
   869  	}
   870  
   871  	// common mistake: *struct and *interface.
   872  	if tl != nil && tr != nil && tl.IsPtr() && tr.IsPtr() {
   873  		if tl.Elem().IsStruct() && tr.Elem().IsInterface() {
   874  			s += "\n\t(*struct vs *interface)"
   875  		} else if tl.Elem().IsInterface() && tr.Elem().IsStruct() {
   876  			s += "\n\t(*interface vs *struct)"
   877  		}
   878  	}
   879  
   880  	base.Errorf("illegal types for operand: %v%s", op, s)
   881  }
   882  
   883  func writebarrierfn(name string, l *types.Type, r *types.Type) ir.Node {
   884  	return typecheck.LookupRuntime(name, l, r)
   885  }
   886  
   887  // isRuneCount reports whether n is of the form len([]rune(string)).
   888  // These are optimized into a call to runtime.countrunes.
   889  func isRuneCount(n ir.Node) bool {
   890  	return base.Flag.N == 0 && !base.Flag.Cfg.Instrumenting && n.Op() == ir.OLEN && n.(*ir.UnaryExpr).X.Op() == ir.OSTR2RUNES
   891  }
   892  
   893  // isByteCount reports whether n is of the form len(string([]byte)).
   894  func isByteCount(n ir.Node) bool {
   895  	return base.Flag.N == 0 && !base.Flag.Cfg.Instrumenting && n.Op() == ir.OLEN &&
   896  		(n.(*ir.UnaryExpr).X.Op() == ir.OBYTES2STR || n.(*ir.UnaryExpr).X.Op() == ir.OBYTES2STRTMP)
   897  }
   898  
   899  // isChanLenCap reports whether n is of the form len(c) or cap(c) for a channel c.
   900  // Note that this does not check for -n or instrumenting because this
   901  // is a correctness rewrite, not an optimization.
   902  func isChanLenCap(n ir.Node) bool {
   903  	return (n.Op() == ir.OLEN || n.Op() == ir.OCAP) && n.(*ir.UnaryExpr).X.Type().IsChan()
   904  }
   905  

View as plain text