Source file src/cmd/compile/internal/walk/assign.go

     1  // Copyright 2009 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package walk
     6  
     7  import (
     8  	"go/constant"
     9  
    10  	"cmd/compile/internal/base"
    11  	"cmd/compile/internal/ir"
    12  	"cmd/compile/internal/reflectdata"
    13  	"cmd/compile/internal/typecheck"
    14  	"cmd/compile/internal/types"
    15  	"cmd/internal/src"
    16  )
    17  
    18  // walkAssign walks an OAS (AssignExpr) or OASOP (AssignOpExpr) node.
    19  func walkAssign(init *ir.Nodes, n ir.Node) ir.Node {
    20  	init.Append(ir.TakeInit(n)...)
    21  
    22  	var left, right ir.Node
    23  	switch n.Op() {
    24  	case ir.OAS:
    25  		n := n.(*ir.AssignStmt)
    26  		left, right = n.X, n.Y
    27  	case ir.OASOP:
    28  		n := n.(*ir.AssignOpStmt)
    29  		left, right = n.X, n.Y
    30  	}
    31  
    32  	// Recognize m[k] = append(m[k], ...) so we can reuse
    33  	// the mapassign call.
    34  	var mapAppend *ir.CallExpr
    35  	if left.Op() == ir.OINDEXMAP && right.Op() == ir.OAPPEND {
    36  		left := left.(*ir.IndexExpr)
    37  		mapAppend = right.(*ir.CallExpr)
    38  		if !ir.SameSafeExpr(left, mapAppend.Args[0]) {
    39  			base.Fatalf("not same expressions: %v != %v", left, mapAppend.Args[0])
    40  		}
    41  	}
    42  
    43  	left = walkExpr(left, init)
    44  	left = safeExpr(left, init)
    45  	if mapAppend != nil {
    46  		mapAppend.Args[0] = left
    47  	}
    48  
    49  	if n.Op() == ir.OASOP {
    50  		// Rewrite x op= y into x = x op y.
    51  		n = ir.NewAssignStmt(base.Pos, left, typecheck.Expr(ir.NewBinaryExpr(base.Pos, n.(*ir.AssignOpStmt).AsOp, left, right)))
    52  	} else {
    53  		n.(*ir.AssignStmt).X = left
    54  	}
    55  	as := n.(*ir.AssignStmt)
    56  
    57  	if oaslit(as, init) {
    58  		return ir.NewBlockStmt(as.Pos(), nil)
    59  	}
    60  
    61  	if as.Y == nil {
    62  		// TODO(austin): Check all "implicit zeroing"
    63  		return as
    64  	}
    65  
    66  	if !base.Flag.Cfg.Instrumenting && ir.IsZero(as.Y) {
    67  		return as
    68  	}
    69  
    70  	switch as.Y.Op() {
    71  	default:
    72  		as.Y = walkExpr(as.Y, init)
    73  
    74  	case ir.ORECV:
    75  		// x = <-c; as.Left is x, as.Right.Left is c.
    76  		// order.stmt made sure x is addressable.
    77  		recv := as.Y.(*ir.UnaryExpr)
    78  		recv.X = walkExpr(recv.X, init)
    79  
    80  		n1 := typecheck.NodAddr(as.X)
    81  		r := recv.X // the channel
    82  		return mkcall1(chanfn("chanrecv1", 2, r.Type()), nil, init, r, n1)
    83  
    84  	case ir.OAPPEND:
    85  		// x = append(...)
    86  		call := as.Y.(*ir.CallExpr)
    87  		if call.Type().Elem().NotInHeap() {
    88  			base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", call.Type().Elem())
    89  		}
    90  		var r ir.Node
    91  		switch {
    92  		case isAppendOfMake(call):
    93  			// x = append(y, make([]T, y)...)
    94  			r = extendSlice(call, init)
    95  		case call.IsDDD:
    96  			r = appendSlice(call, init) // also works for append(slice, string).
    97  		default:
    98  			r = walkAppend(call, init, as)
    99  		}
   100  		as.Y = r
   101  		if r.Op() == ir.OAPPEND {
   102  			// Left in place for back end.
   103  			// Do not add a new write barrier.
   104  			// Set up address of type for back end.
   105  			r.(*ir.CallExpr).X = reflectdata.TypePtr(r.Type().Elem())
   106  			return as
   107  		}
   108  		// Otherwise, lowered for race detector.
   109  		// Treat as ordinary assignment.
   110  	}
   111  
   112  	if as.X != nil && as.Y != nil {
   113  		return convas(as, init)
   114  	}
   115  	return as
   116  }
   117  
   118  // walkAssignDotType walks an OAS2DOTTYPE node.
   119  func walkAssignDotType(n *ir.AssignListStmt, init *ir.Nodes) ir.Node {
   120  	walkExprListSafe(n.Lhs, init)
   121  	n.Rhs[0] = walkExpr(n.Rhs[0], init)
   122  	return n
   123  }
   124  
   125  // walkAssignFunc walks an OAS2FUNC node.
   126  func walkAssignFunc(init *ir.Nodes, n *ir.AssignListStmt) ir.Node {
   127  	init.Append(ir.TakeInit(n)...)
   128  
   129  	r := n.Rhs[0]
   130  	walkExprListSafe(n.Lhs, init)
   131  	r = walkExpr(r, init)
   132  
   133  	if ir.IsIntrinsicCall(r.(*ir.CallExpr)) {
   134  		n.Rhs = []ir.Node{r}
   135  		return n
   136  	}
   137  	init.Append(r)
   138  
   139  	ll := ascompatet(n.Lhs, r.Type())
   140  	return ir.NewBlockStmt(src.NoXPos, ll)
   141  }
   142  
   143  // walkAssignList walks an OAS2 node.
   144  func walkAssignList(init *ir.Nodes, n *ir.AssignListStmt) ir.Node {
   145  	init.Append(ir.TakeInit(n)...)
   146  	return ir.NewBlockStmt(src.NoXPos, ascompatee(ir.OAS, n.Lhs, n.Rhs))
   147  }
   148  
   149  // walkAssignMapRead walks an OAS2MAPR node.
   150  func walkAssignMapRead(init *ir.Nodes, n *ir.AssignListStmt) ir.Node {
   151  	init.Append(ir.TakeInit(n)...)
   152  
   153  	r := n.Rhs[0].(*ir.IndexExpr)
   154  	walkExprListSafe(n.Lhs, init)
   155  	r.X = walkExpr(r.X, init)
   156  	r.Index = walkExpr(r.Index, init)
   157  	t := r.X.Type()
   158  
   159  	fast := mapfast(t)
   160  	key := mapKeyArg(fast, r, r.Index)
   161  
   162  	// from:
   163  	//   a,b = m[i]
   164  	// to:
   165  	//   var,b = mapaccess2*(t, m, i)
   166  	//   a = *var
   167  	a := n.Lhs[0]
   168  
   169  	var call *ir.CallExpr
   170  	if w := t.Elem().Size(); w <= zeroValSize {
   171  		fn := mapfn(mapaccess2[fast], t, false)
   172  		call = mkcall1(fn, fn.Type().Results(), init, reflectdata.TypePtr(t), r.X, key)
   173  	} else {
   174  		fn := mapfn("mapaccess2_fat", t, true)
   175  		z := reflectdata.ZeroAddr(w)
   176  		call = mkcall1(fn, fn.Type().Results(), init, reflectdata.TypePtr(t), r.X, key, z)
   177  	}
   178  
   179  	// mapaccess2* returns a typed bool, but due to spec changes,
   180  	// the boolean result of i.(T) is now untyped so we make it the
   181  	// same type as the variable on the lhs.
   182  	if ok := n.Lhs[1]; !ir.IsBlank(ok) && ok.Type().IsBoolean() {
   183  		call.Type().Field(1).Type = ok.Type()
   184  	}
   185  	n.Rhs = []ir.Node{call}
   186  	n.SetOp(ir.OAS2FUNC)
   187  
   188  	// don't generate a = *var if a is _
   189  	if ir.IsBlank(a) {
   190  		return walkExpr(typecheck.Stmt(n), init)
   191  	}
   192  
   193  	var_ := typecheck.Temp(types.NewPtr(t.Elem()))
   194  	var_.SetTypecheck(1)
   195  	var_.MarkNonNil() // mapaccess always returns a non-nil pointer
   196  
   197  	n.Lhs[0] = var_
   198  	init.Append(walkExpr(n, init))
   199  
   200  	as := ir.NewAssignStmt(base.Pos, a, ir.NewStarExpr(base.Pos, var_))
   201  	return walkExpr(typecheck.Stmt(as), init)
   202  }
   203  
   204  // walkAssignRecv walks an OAS2RECV node.
   205  func walkAssignRecv(init *ir.Nodes, n *ir.AssignListStmt) ir.Node {
   206  	init.Append(ir.TakeInit(n)...)
   207  
   208  	r := n.Rhs[0].(*ir.UnaryExpr) // recv
   209  	walkExprListSafe(n.Lhs, init)
   210  	r.X = walkExpr(r.X, init)
   211  	var n1 ir.Node
   212  	if ir.IsBlank(n.Lhs[0]) {
   213  		n1 = typecheck.NodNil()
   214  	} else {
   215  		n1 = typecheck.NodAddr(n.Lhs[0])
   216  	}
   217  	fn := chanfn("chanrecv2", 2, r.X.Type())
   218  	ok := n.Lhs[1]
   219  	call := mkcall1(fn, types.Types[types.TBOOL], init, r.X, n1)
   220  	return typecheck.Stmt(ir.NewAssignStmt(base.Pos, ok, call))
   221  }
   222  
   223  // walkReturn walks an ORETURN node.
   224  func walkReturn(n *ir.ReturnStmt) ir.Node {
   225  	fn := ir.CurFunc
   226  
   227  	fn.NumReturns++
   228  	if len(n.Results) == 0 {
   229  		return n
   230  	}
   231  
   232  	results := fn.Type().Results().FieldSlice()
   233  	dsts := make([]ir.Node, len(results))
   234  	for i, v := range results {
   235  		// TODO(mdempsky): typecheck should have already checked the result variables.
   236  		dsts[i] = typecheck.AssignExpr(v.Nname.(*ir.Name))
   237  	}
   238  
   239  	n.Results = ascompatee(n.Op(), dsts, n.Results)
   240  	return n
   241  }
   242  
   243  // check assign type list to
   244  // an expression list. called in
   245  //	expr-list = func()
   246  func ascompatet(nl ir.Nodes, nr *types.Type) []ir.Node {
   247  	if len(nl) != nr.NumFields() {
   248  		base.Fatalf("ascompatet: assignment count mismatch: %d = %d", len(nl), nr.NumFields())
   249  	}
   250  
   251  	var nn ir.Nodes
   252  	for i, l := range nl {
   253  		if ir.IsBlank(l) {
   254  			continue
   255  		}
   256  		r := nr.Field(i)
   257  
   258  		// Order should have created autotemps of the appropriate type for
   259  		// us to store results into.
   260  		if tmp, ok := l.(*ir.Name); !ok || !tmp.AutoTemp() || !types.Identical(tmp.Type(), r.Type) {
   261  			base.FatalfAt(l.Pos(), "assigning %v to %+v", r.Type, l)
   262  		}
   263  
   264  		res := ir.NewResultExpr(base.Pos, nil, types.BADWIDTH)
   265  		res.Index = int64(i)
   266  		res.SetType(r.Type)
   267  		res.SetTypecheck(1)
   268  
   269  		nn.Append(ir.NewAssignStmt(base.Pos, l, res))
   270  	}
   271  	return nn
   272  }
   273  
   274  // check assign expression list to
   275  // an expression list. called in
   276  //	expr-list = expr-list
   277  func ascompatee(op ir.Op, nl, nr []ir.Node) []ir.Node {
   278  	// cannot happen: should have been rejected during type checking
   279  	if len(nl) != len(nr) {
   280  		base.Fatalf("assignment operands mismatch: %+v / %+v", ir.Nodes(nl), ir.Nodes(nr))
   281  	}
   282  
   283  	var assigned ir.NameSet
   284  	var memWrite, deferResultWrite bool
   285  
   286  	// affected reports whether expression n could be affected by
   287  	// the assignments applied so far.
   288  	affected := func(n ir.Node) bool {
   289  		if deferResultWrite {
   290  			return true
   291  		}
   292  		return ir.Any(n, func(n ir.Node) bool {
   293  			if n.Op() == ir.ONAME && assigned.Has(n.(*ir.Name)) {
   294  				return true
   295  			}
   296  			if memWrite && readsMemory(n) {
   297  				return true
   298  			}
   299  			return false
   300  		})
   301  	}
   302  
   303  	// If a needed expression may be affected by an
   304  	// earlier assignment, make an early copy of that
   305  	// expression and use the copy instead.
   306  	var early ir.Nodes
   307  	save := func(np *ir.Node) {
   308  		if n := *np; affected(n) {
   309  			*np = copyExpr(n, n.Type(), &early)
   310  		}
   311  	}
   312  
   313  	var late ir.Nodes
   314  	for i, lorig := range nl {
   315  		l, r := lorig, nr[i]
   316  
   317  		// Do not generate 'x = x' during return. See issue 4014.
   318  		if op == ir.ORETURN && ir.SameSafeExpr(l, r) {
   319  			continue
   320  		}
   321  
   322  		// Save subexpressions needed on left side.
   323  		// Drill through non-dereferences.
   324  		for {
   325  			// If an expression has init statements, they must be evaluated
   326  			// before any of its saved sub-operands (#45706).
   327  			// TODO(mdempsky): Disallow init statements on lvalues.
   328  			init := ir.TakeInit(l)
   329  			walkStmtList(init)
   330  			early.Append(init...)
   331  
   332  			switch ll := l.(type) {
   333  			case *ir.IndexExpr:
   334  				if ll.X.Type().IsArray() {
   335  					save(&ll.Index)
   336  					l = ll.X
   337  					continue
   338  				}
   339  			case *ir.ParenExpr:
   340  				l = ll.X
   341  				continue
   342  			case *ir.SelectorExpr:
   343  				if ll.Op() == ir.ODOT {
   344  					l = ll.X
   345  					continue
   346  				}
   347  			}
   348  			break
   349  		}
   350  
   351  		var name *ir.Name
   352  		switch l.Op() {
   353  		default:
   354  			base.Fatalf("unexpected lvalue %v", l.Op())
   355  		case ir.ONAME:
   356  			name = l.(*ir.Name)
   357  		case ir.OINDEX, ir.OINDEXMAP:
   358  			l := l.(*ir.IndexExpr)
   359  			save(&l.X)
   360  			save(&l.Index)
   361  		case ir.ODEREF:
   362  			l := l.(*ir.StarExpr)
   363  			save(&l.X)
   364  		case ir.ODOTPTR:
   365  			l := l.(*ir.SelectorExpr)
   366  			save(&l.X)
   367  		}
   368  
   369  		// Save expression on right side.
   370  		save(&r)
   371  
   372  		appendWalkStmt(&late, convas(ir.NewAssignStmt(base.Pos, lorig, r), &late))
   373  
   374  		// Check for reasons why we may need to compute later expressions
   375  		// before this assignment happens.
   376  
   377  		if name == nil {
   378  			// Not a direct assignment to a declared variable.
   379  			// Conservatively assume any memory access might alias.
   380  			memWrite = true
   381  			continue
   382  		}
   383  
   384  		if name.Class == ir.PPARAMOUT && ir.CurFunc.HasDefer() {
   385  			// Assignments to a result parameter in a function with defers
   386  			// becomes visible early if evaluation of any later expression
   387  			// panics (#43835).
   388  			deferResultWrite = true
   389  			continue
   390  		}
   391  
   392  		if sym := types.OrigSym(name.Sym()); sym == nil || sym.IsBlank() {
   393  			// We can ignore assignments to blank or anonymous result parameters.
   394  			// These can't appear in expressions anyway.
   395  			continue
   396  		}
   397  
   398  		if name.Addrtaken() || !name.OnStack() {
   399  			// Global variable, heap escaped, or just addrtaken.
   400  			// Conservatively assume any memory access might alias.
   401  			memWrite = true
   402  			continue
   403  		}
   404  
   405  		// Local, non-addrtaken variable.
   406  		// Assignments can only alias with direct uses of this variable.
   407  		assigned.Add(name)
   408  	}
   409  
   410  	early.Append(late.Take()...)
   411  	return early
   412  }
   413  
   414  // readsMemory reports whether the evaluation n directly reads from
   415  // memory that might be written to indirectly.
   416  func readsMemory(n ir.Node) bool {
   417  	switch n.Op() {
   418  	case ir.ONAME:
   419  		n := n.(*ir.Name)
   420  		if n.Class == ir.PFUNC {
   421  			return false
   422  		}
   423  		return n.Addrtaken() || !n.OnStack()
   424  
   425  	case ir.OADD,
   426  		ir.OAND,
   427  		ir.OANDAND,
   428  		ir.OANDNOT,
   429  		ir.OBITNOT,
   430  		ir.OCONV,
   431  		ir.OCONVIFACE,
   432  		ir.OCONVIDATA,
   433  		ir.OCONVNOP,
   434  		ir.ODIV,
   435  		ir.ODOT,
   436  		ir.ODOTTYPE,
   437  		ir.OLITERAL,
   438  		ir.OLSH,
   439  		ir.OMOD,
   440  		ir.OMUL,
   441  		ir.ONEG,
   442  		ir.ONIL,
   443  		ir.OOR,
   444  		ir.OOROR,
   445  		ir.OPAREN,
   446  		ir.OPLUS,
   447  		ir.ORSH,
   448  		ir.OSUB,
   449  		ir.OXOR:
   450  		return false
   451  	}
   452  
   453  	// Be conservative.
   454  	return true
   455  }
   456  
   457  // expand append(l1, l2...) to
   458  //   init {
   459  //     s := l1
   460  //     n := len(s) + len(l2)
   461  //     // Compare as uint so growslice can panic on overflow.
   462  //     if uint(n) > uint(cap(s)) {
   463  //       s = growslice(s, n)
   464  //     }
   465  //     s = s[:n]
   466  //     memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T))
   467  //   }
   468  //   s
   469  //
   470  // l2 is allowed to be a string.
   471  func appendSlice(n *ir.CallExpr, init *ir.Nodes) ir.Node {
   472  	walkAppendArgs(n, init)
   473  
   474  	l1 := n.Args[0]
   475  	l2 := n.Args[1]
   476  	l2 = cheapExpr(l2, init)
   477  	n.Args[1] = l2
   478  
   479  	var nodes ir.Nodes
   480  
   481  	// var s []T
   482  	s := typecheck.Temp(l1.Type())
   483  	nodes.Append(ir.NewAssignStmt(base.Pos, s, l1)) // s = l1
   484  
   485  	elemtype := s.Type().Elem()
   486  
   487  	// n := len(s) + len(l2)
   488  	nn := typecheck.Temp(types.Types[types.TINT])
   489  	nodes.Append(ir.NewAssignStmt(base.Pos, nn, ir.NewBinaryExpr(base.Pos, ir.OADD, ir.NewUnaryExpr(base.Pos, ir.OLEN, s), ir.NewUnaryExpr(base.Pos, ir.OLEN, l2))))
   490  
   491  	// if uint(n) > uint(cap(s))
   492  	nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
   493  	nuint := typecheck.Conv(nn, types.Types[types.TUINT])
   494  	scapuint := typecheck.Conv(ir.NewUnaryExpr(base.Pos, ir.OCAP, s), types.Types[types.TUINT])
   495  	nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OGT, nuint, scapuint)
   496  
   497  	// instantiate growslice(typ *type, []any, int) []any
   498  	fn := typecheck.LookupRuntime("growslice")
   499  	fn = typecheck.SubstArgTypes(fn, elemtype, elemtype)
   500  
   501  	// s = growslice(T, s, n)
   502  	nif.Body = []ir.Node{ir.NewAssignStmt(base.Pos, s, mkcall1(fn, s.Type(), nif.PtrInit(), reflectdata.TypePtr(elemtype), s, nn))}
   503  	nodes.Append(nif)
   504  
   505  	// s = s[:n]
   506  	nt := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, nil, nn, nil)
   507  	nt.SetBounded(true)
   508  	nodes.Append(ir.NewAssignStmt(base.Pos, s, nt))
   509  
   510  	var ncopy ir.Node
   511  	if elemtype.HasPointers() {
   512  		// copy(s[len(l1):], l2)
   513  		slice := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, ir.NewUnaryExpr(base.Pos, ir.OLEN, l1), nil, nil)
   514  		slice.SetType(s.Type())
   515  
   516  		ir.CurFunc.SetWBPos(n.Pos())
   517  
   518  		// instantiate typedslicecopy(typ *type, dstPtr *any, dstLen int, srcPtr *any, srcLen int) int
   519  		fn := typecheck.LookupRuntime("typedslicecopy")
   520  		fn = typecheck.SubstArgTypes(fn, l1.Type().Elem(), l2.Type().Elem())
   521  		ptr1, len1 := backingArrayPtrLen(cheapExpr(slice, &nodes))
   522  		ptr2, len2 := backingArrayPtrLen(l2)
   523  		ncopy = mkcall1(fn, types.Types[types.TINT], &nodes, reflectdata.TypePtr(elemtype), ptr1, len1, ptr2, len2)
   524  	} else if base.Flag.Cfg.Instrumenting && !base.Flag.CompilingRuntime {
   525  		// rely on runtime to instrument:
   526  		//  copy(s[len(l1):], l2)
   527  		// l2 can be a slice or string.
   528  		slice := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, ir.NewUnaryExpr(base.Pos, ir.OLEN, l1), nil, nil)
   529  		slice.SetType(s.Type())
   530  
   531  		ptr1, len1 := backingArrayPtrLen(cheapExpr(slice, &nodes))
   532  		ptr2, len2 := backingArrayPtrLen(l2)
   533  
   534  		fn := typecheck.LookupRuntime("slicecopy")
   535  		fn = typecheck.SubstArgTypes(fn, ptr1.Type().Elem(), ptr2.Type().Elem())
   536  		ncopy = mkcall1(fn, types.Types[types.TINT], &nodes, ptr1, len1, ptr2, len2, ir.NewInt(elemtype.Size()))
   537  	} else {
   538  		// memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T))
   539  		ix := ir.NewIndexExpr(base.Pos, s, ir.NewUnaryExpr(base.Pos, ir.OLEN, l1))
   540  		ix.SetBounded(true)
   541  		addr := typecheck.NodAddr(ix)
   542  
   543  		sptr := ir.NewUnaryExpr(base.Pos, ir.OSPTR, l2)
   544  
   545  		nwid := cheapExpr(typecheck.Conv(ir.NewUnaryExpr(base.Pos, ir.OLEN, l2), types.Types[types.TUINTPTR]), &nodes)
   546  		nwid = ir.NewBinaryExpr(base.Pos, ir.OMUL, nwid, ir.NewInt(elemtype.Size()))
   547  
   548  		// instantiate func memmove(to *any, frm *any, length uintptr)
   549  		fn := typecheck.LookupRuntime("memmove")
   550  		fn = typecheck.SubstArgTypes(fn, elemtype, elemtype)
   551  		ncopy = mkcall1(fn, nil, &nodes, addr, sptr, nwid)
   552  	}
   553  	ln := append(nodes, ncopy)
   554  
   555  	typecheck.Stmts(ln)
   556  	walkStmtList(ln)
   557  	init.Append(ln...)
   558  	return s
   559  }
   560  
   561  // isAppendOfMake reports whether n is of the form append(x, make([]T, y)...).
   562  // isAppendOfMake assumes n has already been typechecked.
   563  func isAppendOfMake(n ir.Node) bool {
   564  	if base.Flag.N != 0 || base.Flag.Cfg.Instrumenting {
   565  		return false
   566  	}
   567  
   568  	if n.Typecheck() == 0 {
   569  		base.Fatalf("missing typecheck: %+v", n)
   570  	}
   571  
   572  	if n.Op() != ir.OAPPEND {
   573  		return false
   574  	}
   575  	call := n.(*ir.CallExpr)
   576  	if !call.IsDDD || len(call.Args) != 2 || call.Args[1].Op() != ir.OMAKESLICE {
   577  		return false
   578  	}
   579  
   580  	mk := call.Args[1].(*ir.MakeExpr)
   581  	if mk.Cap != nil {
   582  		return false
   583  	}
   584  
   585  	// y must be either an integer constant or the largest possible positive value
   586  	// of variable y needs to fit into an uint.
   587  
   588  	// typecheck made sure that constant arguments to make are not negative and fit into an int.
   589  
   590  	// The care of overflow of the len argument to make will be handled by an explicit check of int(len) < 0 during runtime.
   591  	y := mk.Len
   592  	if !ir.IsConst(y, constant.Int) && y.Type().Size() > types.Types[types.TUINT].Size() {
   593  		return false
   594  	}
   595  
   596  	return true
   597  }
   598  
   599  // extendSlice rewrites append(l1, make([]T, l2)...) to
   600  //   init {
   601  //     if l2 >= 0 { // Empty if block here for more meaningful node.SetLikely(true)
   602  //     } else {
   603  //       panicmakeslicelen()
   604  //     }
   605  //     s := l1
   606  //     n := len(s) + l2
   607  //     // Compare n and s as uint so growslice can panic on overflow of len(s) + l2.
   608  //     // cap is a positive int and n can become negative when len(s) + l2
   609  //     // overflows int. Interpreting n when negative as uint makes it larger
   610  //     // than cap(s). growslice will check the int n arg and panic if n is
   611  //     // negative. This prevents the overflow from being undetected.
   612  //     if uint(n) > uint(cap(s)) {
   613  //       s = growslice(T, s, n)
   614  //     }
   615  //     s = s[:n]
   616  //     lptr := &l1[0]
   617  //     sptr := &s[0]
   618  //     if lptr == sptr || !T.HasPointers() {
   619  //       // growslice did not clear the whole underlying array (or did not get called)
   620  //       hp := &s[len(l1)]
   621  //       hn := l2 * sizeof(T)
   622  //       memclr(hp, hn)
   623  //     }
   624  //   }
   625  //   s
   626  func extendSlice(n *ir.CallExpr, init *ir.Nodes) ir.Node {
   627  	// isAppendOfMake made sure all possible positive values of l2 fit into an uint.
   628  	// The case of l2 overflow when converting from e.g. uint to int is handled by an explicit
   629  	// check of l2 < 0 at runtime which is generated below.
   630  	l2 := typecheck.Conv(n.Args[1].(*ir.MakeExpr).Len, types.Types[types.TINT])
   631  	l2 = typecheck.Expr(l2)
   632  	n.Args[1] = l2 // walkAppendArgs expects l2 in n.List.Second().
   633  
   634  	walkAppendArgs(n, init)
   635  
   636  	l1 := n.Args[0]
   637  	l2 = n.Args[1] // re-read l2, as it may have been updated by walkAppendArgs
   638  
   639  	var nodes []ir.Node
   640  
   641  	// if l2 >= 0 (likely happens), do nothing
   642  	nifneg := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OGE, l2, ir.NewInt(0)), nil, nil)
   643  	nifneg.Likely = true
   644  
   645  	// else panicmakeslicelen()
   646  	nifneg.Else = []ir.Node{mkcall("panicmakeslicelen", nil, init)}
   647  	nodes = append(nodes, nifneg)
   648  
   649  	// s := l1
   650  	s := typecheck.Temp(l1.Type())
   651  	nodes = append(nodes, ir.NewAssignStmt(base.Pos, s, l1))
   652  
   653  	elemtype := s.Type().Elem()
   654  
   655  	// n := len(s) + l2
   656  	nn := typecheck.Temp(types.Types[types.TINT])
   657  	nodes = append(nodes, ir.NewAssignStmt(base.Pos, nn, ir.NewBinaryExpr(base.Pos, ir.OADD, ir.NewUnaryExpr(base.Pos, ir.OLEN, s), l2)))
   658  
   659  	// if uint(n) > uint(cap(s))
   660  	nuint := typecheck.Conv(nn, types.Types[types.TUINT])
   661  	capuint := typecheck.Conv(ir.NewUnaryExpr(base.Pos, ir.OCAP, s), types.Types[types.TUINT])
   662  	nif := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OGT, nuint, capuint), nil, nil)
   663  
   664  	// instantiate growslice(typ *type, old []any, newcap int) []any
   665  	fn := typecheck.LookupRuntime("growslice")
   666  	fn = typecheck.SubstArgTypes(fn, elemtype, elemtype)
   667  
   668  	// s = growslice(T, s, n)
   669  	nif.Body = []ir.Node{ir.NewAssignStmt(base.Pos, s, mkcall1(fn, s.Type(), nif.PtrInit(), reflectdata.TypePtr(elemtype), s, nn))}
   670  	nodes = append(nodes, nif)
   671  
   672  	// s = s[:n]
   673  	nt := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, nil, nn, nil)
   674  	nt.SetBounded(true)
   675  	nodes = append(nodes, ir.NewAssignStmt(base.Pos, s, nt))
   676  
   677  	// lptr := &l1[0]
   678  	l1ptr := typecheck.Temp(l1.Type().Elem().PtrTo())
   679  	tmp := ir.NewUnaryExpr(base.Pos, ir.OSPTR, l1)
   680  	nodes = append(nodes, ir.NewAssignStmt(base.Pos, l1ptr, tmp))
   681  
   682  	// sptr := &s[0]
   683  	sptr := typecheck.Temp(elemtype.PtrTo())
   684  	tmp = ir.NewUnaryExpr(base.Pos, ir.OSPTR, s)
   685  	nodes = append(nodes, ir.NewAssignStmt(base.Pos, sptr, tmp))
   686  
   687  	// hp := &s[len(l1)]
   688  	ix := ir.NewIndexExpr(base.Pos, s, ir.NewUnaryExpr(base.Pos, ir.OLEN, l1))
   689  	ix.SetBounded(true)
   690  	hp := typecheck.ConvNop(typecheck.NodAddr(ix), types.Types[types.TUNSAFEPTR])
   691  
   692  	// hn := l2 * sizeof(elem(s))
   693  	hn := typecheck.Conv(ir.NewBinaryExpr(base.Pos, ir.OMUL, l2, ir.NewInt(elemtype.Size())), types.Types[types.TUINTPTR])
   694  
   695  	clrname := "memclrNoHeapPointers"
   696  	hasPointers := elemtype.HasPointers()
   697  	if hasPointers {
   698  		clrname = "memclrHasPointers"
   699  		ir.CurFunc.SetWBPos(n.Pos())
   700  	}
   701  
   702  	var clr ir.Nodes
   703  	clrfn := mkcall(clrname, nil, &clr, hp, hn)
   704  	clr.Append(clrfn)
   705  
   706  	if hasPointers {
   707  		// if l1ptr == sptr
   708  		nifclr := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OEQ, l1ptr, sptr), nil, nil)
   709  		nifclr.Body = clr
   710  		nodes = append(nodes, nifclr)
   711  	} else {
   712  		nodes = append(nodes, clr...)
   713  	}
   714  
   715  	typecheck.Stmts(nodes)
   716  	walkStmtList(nodes)
   717  	init.Append(nodes...)
   718  	return s
   719  }
   720  

View as plain text