Source file src/cmd/compile/internal/walk/order.go

     1  // Copyright 2012 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package walk
     6  
     7  import (
     8  	"fmt"
     9  	"go/constant"
    10  	"internal/abi"
    11  
    12  	"cmd/compile/internal/base"
    13  	"cmd/compile/internal/ir"
    14  	"cmd/compile/internal/reflectdata"
    15  	"cmd/compile/internal/ssa"
    16  	"cmd/compile/internal/staticinit"
    17  	"cmd/compile/internal/typecheck"
    18  	"cmd/compile/internal/types"
    19  	"cmd/internal/objabi"
    20  	"cmd/internal/src"
    21  )
    22  
    23  // Rewrite tree to use separate statements to enforce
    24  // order of evaluation. Makes walk easier, because it
    25  // can (after this runs) reorder at will within an expression.
    26  //
    27  // Rewrite m[k] op= r into m[k] = m[k] op r if op is / or %.
    28  //
    29  // Introduce temporaries as needed by runtime routines.
    30  // For example, the map runtime routines take the map key
    31  // by reference, so make sure all map keys are addressable
    32  // by copying them to temporaries as needed.
    33  // The same is true for channel operations.
    34  //
    35  // Arrange that map index expressions only appear in direct
    36  // assignments x = m[k] or m[k] = x, never in larger expressions.
    37  //
    38  // Arrange that receive expressions only appear in direct assignments
    39  // x = <-c or as standalone statements <-c, never in larger expressions.
    40  
    41  // orderState holds state during the ordering process.
    42  type orderState struct {
    43  	out  []ir.Node             // list of generated statements
    44  	temp []*ir.Name            // stack of temporary variables
    45  	free map[string][]*ir.Name // free list of unused temporaries, by type.LinkString().
    46  	edit func(ir.Node) ir.Node // cached closure of o.exprNoLHS
    47  }
    48  
    49  // order rewrites fn.Nbody to apply the ordering constraints
    50  // described in the comment at the top of the file.
    51  func order(fn *ir.Func) {
    52  	if base.Flag.W > 1 {
    53  		s := fmt.Sprintf("\nbefore order %v", fn.Sym())
    54  		ir.DumpList(s, fn.Body)
    55  	}
    56  	ir.SetPos(fn) // Set reasonable position for instrumenting code. See issue 53688.
    57  	orderBlock(&fn.Body, map[string][]*ir.Name{})
    58  }
    59  
    60  // append typechecks stmt and appends it to out.
    61  func (o *orderState) append(stmt ir.Node) {
    62  	o.out = append(o.out, typecheck.Stmt(stmt))
    63  }
    64  
    65  // newTemp allocates a new temporary with the given type,
    66  // pushes it onto the temp stack, and returns it.
    67  // If clear is true, newTemp emits code to zero the temporary.
    68  func (o *orderState) newTemp(t *types.Type, clear bool) *ir.Name {
    69  	var v *ir.Name
    70  	key := t.LinkString()
    71  	if a := o.free[key]; len(a) > 0 {
    72  		v = a[len(a)-1]
    73  		if !types.Identical(t, v.Type()) {
    74  			base.Fatalf("expected %L to have type %v", v, t)
    75  		}
    76  		o.free[key] = a[:len(a)-1]
    77  	} else {
    78  		v = typecheck.TempAt(base.Pos, ir.CurFunc, t)
    79  	}
    80  	if clear {
    81  		o.append(ir.NewAssignStmt(base.Pos, v, nil))
    82  	}
    83  
    84  	o.temp = append(o.temp, v)
    85  	return v
    86  }
    87  
    88  // copyExpr behaves like newTemp but also emits
    89  // code to initialize the temporary to the value n.
    90  func (o *orderState) copyExpr(n ir.Node) *ir.Name {
    91  	return o.copyExpr1(n, false)
    92  }
    93  
    94  // copyExprClear is like copyExpr but clears the temp before assignment.
    95  // It is provided for use when the evaluation of tmp = n turns into
    96  // a function call that is passed a pointer to the temporary as the output space.
    97  // If the call blocks before tmp has been written,
    98  // the garbage collector will still treat the temporary as live,
    99  // so we must zero it before entering that call.
   100  // Today, this only happens for channel receive operations.
   101  // (The other candidate would be map access, but map access
   102  // returns a pointer to the result data instead of taking a pointer
   103  // to be filled in.)
   104  func (o *orderState) copyExprClear(n ir.Node) *ir.Name {
   105  	return o.copyExpr1(n, true)
   106  }
   107  
   108  func (o *orderState) copyExpr1(n ir.Node, clear bool) *ir.Name {
   109  	t := n.Type()
   110  	v := o.newTemp(t, clear)
   111  	o.append(ir.NewAssignStmt(base.Pos, v, n))
   112  	return v
   113  }
   114  
   115  // cheapExpr returns a cheap version of n.
   116  // The definition of cheap is that n is a variable or constant.
   117  // If not, cheapExpr allocates a new tmp, emits tmp = n,
   118  // and then returns tmp.
   119  func (o *orderState) cheapExpr(n ir.Node) ir.Node {
   120  	if n == nil {
   121  		return nil
   122  	}
   123  
   124  	switch n.Op() {
   125  	case ir.ONAME, ir.OLITERAL, ir.ONIL:
   126  		return n
   127  	case ir.OLEN, ir.OCAP:
   128  		n := n.(*ir.UnaryExpr)
   129  		l := o.cheapExpr(n.X)
   130  		if l == n.X {
   131  			return n
   132  		}
   133  		a := ir.Copy(n).(*ir.UnaryExpr)
   134  		a.X = l
   135  		return typecheck.Expr(a)
   136  	}
   137  
   138  	return o.copyExpr(n)
   139  }
   140  
   141  // safeExpr returns a safe version of n.
   142  // The definition of safe is that n can appear multiple times
   143  // without violating the semantics of the original program,
   144  // and that assigning to the safe version has the same effect
   145  // as assigning to the original n.
   146  //
   147  // The intended use is to apply to x when rewriting x += y into x = x + y.
   148  func (o *orderState) safeExpr(n ir.Node) ir.Node {
   149  	switch n.Op() {
   150  	case ir.ONAME, ir.OLITERAL, ir.ONIL:
   151  		return n
   152  
   153  	case ir.OLEN, ir.OCAP:
   154  		n := n.(*ir.UnaryExpr)
   155  		l := o.safeExpr(n.X)
   156  		if l == n.X {
   157  			return n
   158  		}
   159  		a := ir.Copy(n).(*ir.UnaryExpr)
   160  		a.X = l
   161  		return typecheck.Expr(a)
   162  
   163  	case ir.ODOT:
   164  		n := n.(*ir.SelectorExpr)
   165  		l := o.safeExpr(n.X)
   166  		if l == n.X {
   167  			return n
   168  		}
   169  		a := ir.Copy(n).(*ir.SelectorExpr)
   170  		a.X = l
   171  		return typecheck.Expr(a)
   172  
   173  	case ir.ODOTPTR:
   174  		n := n.(*ir.SelectorExpr)
   175  		l := o.cheapExpr(n.X)
   176  		if l == n.X {
   177  			return n
   178  		}
   179  		a := ir.Copy(n).(*ir.SelectorExpr)
   180  		a.X = l
   181  		return typecheck.Expr(a)
   182  
   183  	case ir.ODEREF:
   184  		n := n.(*ir.StarExpr)
   185  		l := o.cheapExpr(n.X)
   186  		if l == n.X {
   187  			return n
   188  		}
   189  		a := ir.Copy(n).(*ir.StarExpr)
   190  		a.X = l
   191  		return typecheck.Expr(a)
   192  
   193  	case ir.OINDEX, ir.OINDEXMAP:
   194  		n := n.(*ir.IndexExpr)
   195  		var l ir.Node
   196  		if n.X.Type().IsArray() {
   197  			l = o.safeExpr(n.X)
   198  		} else {
   199  			l = o.cheapExpr(n.X)
   200  		}
   201  		r := o.cheapExpr(n.Index)
   202  		if l == n.X && r == n.Index {
   203  			return n
   204  		}
   205  		a := ir.Copy(n).(*ir.IndexExpr)
   206  		a.X = l
   207  		a.Index = r
   208  		return typecheck.Expr(a)
   209  
   210  	default:
   211  		base.Fatalf("order.safeExpr %v", n.Op())
   212  		return nil // not reached
   213  	}
   214  }
   215  
   216  // addrTemp ensures that n is okay to pass by address to runtime routines.
   217  // If the original argument n is not okay, addrTemp creates a tmp, emits
   218  // tmp = n, and then returns tmp.
   219  // The result of addrTemp MUST be assigned back to n, e.g.
   220  //
   221  //	n.Left = o.addrTemp(n.Left)
   222  func (o *orderState) addrTemp(n ir.Node) ir.Node {
   223  	// Note: Avoid addrTemp with static assignment for literal strings
   224  	// when compiling FIPS packages.
   225  	// The problem is that panic("foo") ends up creating a static RODATA temp
   226  	// for the implicit conversion of "foo" to any, and we can't handle
   227  	// the relocations in that temp.
   228  	if n.Op() == ir.ONIL || (n.Op() == ir.OLITERAL && !base.Ctxt.IsFIPS()) {
   229  		// This is a basic literal or nil that we can store
   230  		// directly in the read-only data section.
   231  		n = typecheck.DefaultLit(n, nil)
   232  		types.CalcSize(n.Type())
   233  		vstat := readonlystaticname(n.Type())
   234  		var s staticinit.Schedule
   235  		s.StaticAssign(vstat, 0, n, n.Type())
   236  		if s.Out != nil {
   237  			base.Fatalf("staticassign of const generated code: %+v", n)
   238  		}
   239  		vstat = typecheck.Expr(vstat).(*ir.Name)
   240  		return vstat
   241  	}
   242  
   243  	// Check now for a composite literal to possibly store in the read-only data section.
   244  	v := staticValue(n)
   245  	if v == nil {
   246  		v = n
   247  	}
   248  	optEnabled := func(n ir.Node) bool {
   249  		// Do this optimization only when enabled for this node.
   250  		return base.LiteralAllocHash.MatchPos(n.Pos(), nil)
   251  	}
   252  	if (v.Op() == ir.OSTRUCTLIT || v.Op() == ir.OARRAYLIT) && !base.Ctxt.IsFIPS() {
   253  		if ir.IsZero(v) && 0 < v.Type().Size() && v.Type().Size() <= abi.ZeroValSize && optEnabled(n) {
   254  			// This zero value can be represented by the read-only zeroVal.
   255  			zeroVal := ir.NewLinksymExpr(v.Pos(), ir.Syms.ZeroVal, n.Type())
   256  			vstat := typecheck.Expr(zeroVal).(*ir.LinksymOffsetExpr)
   257  			return vstat
   258  		}
   259  		if isStaticCompositeLiteral(v) && optEnabled(n) {
   260  			// v can be directly represented in the read-only data section.
   261  			lit := v.(*ir.CompLitExpr)
   262  			vstat := readonlystaticname(n.Type())
   263  			fixedlit(inInitFunction, initKindStatic, lit, vstat, nil) // nil init
   264  			vstat = typecheck.Expr(vstat).(*ir.Name)
   265  			return vstat
   266  		}
   267  	}
   268  
   269  	// Prevent taking the address of an SSA-able local variable (#63332).
   270  	//
   271  	// TODO(mdempsky): Note that OuterValue unwraps OCONVNOPs, but
   272  	// IsAddressable does not. It should be possible to skip copying for
   273  	// at least some of these OCONVNOPs (e.g., reinsert them after the
   274  	// OADDR operation), but at least walkCompare needs to be fixed to
   275  	// support that (see trybot failures on go.dev/cl/541715, PS1).
   276  	if ir.IsAddressable(n) {
   277  		if name, ok := ir.OuterValue(n).(*ir.Name); ok && name.Op() == ir.ONAME {
   278  			if name.Class == ir.PAUTO && !name.Addrtaken() && ssa.CanSSA(name.Type()) {
   279  				goto Copy
   280  			}
   281  		}
   282  
   283  		return n
   284  	}
   285  
   286  Copy:
   287  	return o.copyExpr(n)
   288  }
   289  
   290  // mapKeyTemp prepares n to be a key in a map runtime call and returns n.
   291  // The first parameter is the position of n's containing node, for use in case
   292  // that n's position is not unique (e.g., if n is an ONAME).
   293  func (o *orderState) mapKeyTemp(outerPos src.XPos, t *types.Type, n ir.Node) ir.Node {
   294  	pos := outerPos
   295  	if ir.HasUniquePos(n) {
   296  		pos = n.Pos()
   297  	}
   298  	// Most map calls need to take the address of the key.
   299  	// Exception: map*_fast* calls. See golang.org/issue/19015.
   300  	alg := mapfast(t)
   301  	if alg == mapslow {
   302  		return o.addrTemp(n)
   303  	}
   304  	var kt *types.Type
   305  	switch alg {
   306  	case mapfast32:
   307  		kt = types.Types[types.TUINT32]
   308  	case mapfast64:
   309  		kt = types.Types[types.TUINT64]
   310  	case mapfast32ptr, mapfast64ptr:
   311  		kt = types.Types[types.TUNSAFEPTR]
   312  	case mapfaststr:
   313  		kt = types.Types[types.TSTRING]
   314  	}
   315  	nt := n.Type()
   316  	switch {
   317  	case nt == kt:
   318  		return n
   319  	case nt.Kind() == kt.Kind(), nt.IsPtrShaped() && kt.IsPtrShaped():
   320  		// can directly convert (e.g. named type to underlying type, or one pointer to another)
   321  		return typecheck.Expr(ir.NewConvExpr(pos, ir.OCONVNOP, kt, n))
   322  	case nt.IsInteger() && kt.IsInteger():
   323  		// can directly convert (e.g. int32 to uint32)
   324  		if n.Op() == ir.OLITERAL && nt.IsSigned() {
   325  			// avoid constant overflow error
   326  			n = ir.NewConstExpr(constant.MakeUint64(uint64(ir.Int64Val(n))), n)
   327  			n.SetType(kt)
   328  			return n
   329  		}
   330  		return typecheck.Expr(ir.NewConvExpr(pos, ir.OCONV, kt, n))
   331  	default:
   332  		// Unsafe cast through memory.
   333  		// We'll need to do a load with type kt. Create a temporary of type kt to
   334  		// ensure sufficient alignment. nt may be under-aligned.
   335  		if uint8(kt.Alignment()) < uint8(nt.Alignment()) {
   336  			base.Fatalf("mapKeyTemp: key type is not sufficiently aligned, kt=%v nt=%v", kt, nt)
   337  		}
   338  		tmp := o.newTemp(kt, true)
   339  		// *(*nt)(&tmp) = n
   340  		var e ir.Node = typecheck.NodAddr(tmp)
   341  		e = ir.NewConvExpr(pos, ir.OCONVNOP, nt.PtrTo(), e)
   342  		e = ir.NewStarExpr(pos, e)
   343  		o.append(ir.NewAssignStmt(pos, e, n))
   344  		return tmp
   345  	}
   346  }
   347  
   348  // mapKeyReplaceStrConv replaces OBYTES2STR by OBYTES2STRTMP
   349  // in n to avoid string allocations for keys in map lookups.
   350  // Returns a bool that signals if a modification was made.
   351  //
   352  // For:
   353  //
   354  //	x = m[string(k)]
   355  //	x = m[T1{... Tn{..., string(k), ...}}]
   356  //
   357  // where k is []byte, T1 to Tn is a nesting of struct and array literals,
   358  // the allocation of backing bytes for the string can be avoided
   359  // by reusing the []byte backing array. These are special cases
   360  // for avoiding allocations when converting byte slices to strings.
   361  // It would be nice to handle these generally, but because
   362  // []byte keys are not allowed in maps, the use of string(k)
   363  // comes up in important cases in practice. See issue 3512.
   364  //
   365  // Note that this code does not handle the case:
   366  //
   367  //	s := string(k)
   368  //	x = m[s]
   369  //
   370  // Cases like this are handled during SSA, search for slicebytetostring
   371  // in ../ssa/_gen/generic.rules.
   372  func mapKeyReplaceStrConv(n ir.Node) bool {
   373  	var replaced bool
   374  	switch n.Op() {
   375  	case ir.OBYTES2STR:
   376  		n := n.(*ir.ConvExpr)
   377  		n.SetOp(ir.OBYTES2STRTMP)
   378  		replaced = true
   379  	case ir.OSTRUCTLIT:
   380  		n := n.(*ir.CompLitExpr)
   381  		for _, elem := range n.List {
   382  			elem := elem.(*ir.StructKeyExpr)
   383  			if mapKeyReplaceStrConv(elem.Value) {
   384  				replaced = true
   385  			}
   386  		}
   387  	case ir.OARRAYLIT:
   388  		n := n.(*ir.CompLitExpr)
   389  		for _, elem := range n.List {
   390  			if elem.Op() == ir.OKEY {
   391  				elem = elem.(*ir.KeyExpr).Value
   392  			}
   393  			if mapKeyReplaceStrConv(elem) {
   394  				replaced = true
   395  			}
   396  		}
   397  	}
   398  	return replaced
   399  }
   400  
   401  type ordermarker int
   402  
   403  // markTemp returns the top of the temporary variable stack.
   404  func (o *orderState) markTemp() ordermarker {
   405  	return ordermarker(len(o.temp))
   406  }
   407  
   408  // popTemp pops temporaries off the stack until reaching the mark,
   409  // which must have been returned by markTemp.
   410  func (o *orderState) popTemp(mark ordermarker) {
   411  	for _, n := range o.temp[mark:] {
   412  		key := n.Type().LinkString()
   413  		o.free[key] = append(o.free[key], n)
   414  	}
   415  	o.temp = o.temp[:mark]
   416  }
   417  
   418  // stmtList orders each of the statements in the list.
   419  func (o *orderState) stmtList(l ir.Nodes) {
   420  	s := l
   421  	for i := range s {
   422  		orderMakeSliceCopy(s[i:])
   423  		o.stmt(s[i])
   424  	}
   425  }
   426  
   427  // orderMakeSliceCopy matches the pattern:
   428  //
   429  //	m = OMAKESLICE([]T, x); OCOPY(m, s)
   430  //
   431  // and rewrites it to:
   432  //
   433  //	m = OMAKESLICECOPY([]T, x, s); nil
   434  func orderMakeSliceCopy(s []ir.Node) {
   435  	if base.Flag.N != 0 || base.Flag.Cfg.Instrumenting {
   436  		return
   437  	}
   438  	if len(s) < 2 || s[0] == nil || s[0].Op() != ir.OAS || s[1] == nil || s[1].Op() != ir.OCOPY {
   439  		return
   440  	}
   441  
   442  	as := s[0].(*ir.AssignStmt)
   443  	cp := s[1].(*ir.BinaryExpr)
   444  	if as.Y == nil || as.Y.Op() != ir.OMAKESLICE || ir.IsBlank(as.X) ||
   445  		as.X.Op() != ir.ONAME || cp.X.Op() != ir.ONAME || cp.Y.Op() != ir.ONAME ||
   446  		as.X.Name() != cp.X.Name() || cp.X.Name() == cp.Y.Name() {
   447  		// The line above this one is correct with the differing equality operators:
   448  		// we want as.X and cp.X to be the same name,
   449  		// but we want the initial data to be coming from a different name.
   450  		return
   451  	}
   452  
   453  	mk := as.Y.(*ir.MakeExpr)
   454  	if mk.Esc() == ir.EscNone || mk.Len == nil || mk.Cap != nil {
   455  		return
   456  	}
   457  	mk.SetOp(ir.OMAKESLICECOPY)
   458  	mk.Cap = cp.Y
   459  	// Set bounded when m = OMAKESLICE([]T, len(s)); OCOPY(m, s)
   460  	mk.SetBounded(mk.Len.Op() == ir.OLEN && ir.SameSafeExpr(mk.Len.(*ir.UnaryExpr).X, cp.Y))
   461  	as.Y = typecheck.Expr(mk)
   462  	s[1] = nil // remove separate copy call
   463  }
   464  
   465  // edge inserts coverage instrumentation for libfuzzer.
   466  func (o *orderState) edge() {
   467  	if base.Debug.Libfuzzer == 0 {
   468  		return
   469  	}
   470  
   471  	// Create a new uint8 counter to be allocated in section __sancov_cntrs
   472  	counter := staticinit.StaticName(types.Types[types.TUINT8])
   473  	counter.SetLibfuzzer8BitCounter(true)
   474  	// As well as setting SetLibfuzzer8BitCounter, we preemptively set the
   475  	// symbol type to SLIBFUZZER_8BIT_COUNTER so that the race detector
   476  	// instrumentation pass (which does not have access to the flags set by
   477  	// SetLibfuzzer8BitCounter) knows to ignore them. This information is
   478  	// lost by the time it reaches the compile step, so SetLibfuzzer8BitCounter
   479  	// is still necessary.
   480  	counter.Linksym().Type = objabi.SLIBFUZZER_8BIT_COUNTER
   481  
   482  	// We guarantee that the counter never becomes zero again once it has been
   483  	// incremented once. This implementation follows the NeverZero optimization
   484  	// presented by the paper:
   485  	// "AFL++: Combining Incremental Steps of Fuzzing Research"
   486  	// The NeverZero policy avoids the overflow to 0 by setting the counter to one
   487  	// after it reaches 255 and so, if an edge is executed at least one time, the entry is
   488  	// never 0.
   489  	// Another policy presented in the paper is the Saturated Counters policy which
   490  	// freezes the counter when it reaches the value of 255. However, a range
   491  	// of experiments showed that doing so decreases overall performance.
   492  	o.append(ir.NewIfStmt(base.Pos,
   493  		ir.NewBinaryExpr(base.Pos, ir.OEQ, counter, ir.NewInt(base.Pos, 0xff)),
   494  		[]ir.Node{ir.NewAssignStmt(base.Pos, counter, ir.NewInt(base.Pos, 1))},
   495  		[]ir.Node{ir.NewAssignOpStmt(base.Pos, ir.OADD, counter, ir.NewInt(base.Pos, 1))}))
   496  }
   497  
   498  // orderBlock orders the block of statements in n into a new slice,
   499  // and then replaces the old slice in n with the new slice.
   500  // free is a map that can be used to obtain temporary variables by type.
   501  func orderBlock(n *ir.Nodes, free map[string][]*ir.Name) {
   502  	if len(*n) != 0 {
   503  		// Set reasonable position for instrumenting code. See issue 53688.
   504  		// It would be nice if ir.Nodes had a position (the opening {, probably),
   505  		// but it doesn't. So we use the first statement's position instead.
   506  		ir.SetPos((*n)[0])
   507  	}
   508  	var order orderState
   509  	order.free = free
   510  	mark := order.markTemp()
   511  	order.edge()
   512  	order.stmtList(*n)
   513  	order.popTemp(mark)
   514  	*n = order.out
   515  }
   516  
   517  // exprInPlace orders the side effects in *np and
   518  // leaves them as the init list of the final *np.
   519  // The result of exprInPlace MUST be assigned back to n, e.g.
   520  //
   521  //	n.Left = o.exprInPlace(n.Left)
   522  func (o *orderState) exprInPlace(n ir.Node) ir.Node {
   523  	var order orderState
   524  	order.free = o.free
   525  	n = order.expr(n, nil)
   526  	n = ir.InitExpr(order.out, n)
   527  
   528  	// insert new temporaries from order
   529  	// at head of outer list.
   530  	o.temp = append(o.temp, order.temp...)
   531  	return n
   532  }
   533  
   534  // orderStmtInPlace orders the side effects of the single statement *np
   535  // and replaces it with the resulting statement list.
   536  // The result of orderStmtInPlace MUST be assigned back to n, e.g.
   537  //
   538  //	n.Left = orderStmtInPlace(n.Left)
   539  //
   540  // free is a map that can be used to obtain temporary variables by type.
   541  func orderStmtInPlace(n ir.Node, free map[string][]*ir.Name) ir.Node {
   542  	var order orderState
   543  	order.free = free
   544  	mark := order.markTemp()
   545  	order.stmt(n)
   546  	order.popTemp(mark)
   547  	return ir.NewBlockStmt(src.NoXPos, order.out)
   548  }
   549  
   550  // init moves n's init list to o.out.
   551  func (o *orderState) init(n ir.Node) {
   552  	if ir.MayBeShared(n) {
   553  		// For concurrency safety, don't mutate potentially shared nodes.
   554  		// First, ensure that no work is required here.
   555  		if len(n.Init()) > 0 {
   556  			base.Fatalf("order.init shared node with ninit")
   557  		}
   558  		return
   559  	}
   560  	o.stmtList(ir.TakeInit(n))
   561  }
   562  
   563  // call orders the call expression n.
   564  // n.Op is OCALLFUNC/OCALLINTER or a builtin like OCOPY.
   565  func (o *orderState) call(nn ir.Node) {
   566  	if len(nn.Init()) > 0 {
   567  		// Caller should have already called o.init(nn).
   568  		base.Fatalf("%v with unexpected ninit", nn.Op())
   569  	}
   570  	if nn.Op() == ir.OCALLMETH {
   571  		base.FatalfAt(nn.Pos(), "OCALLMETH missed by typecheck")
   572  	}
   573  
   574  	// Builtin functions.
   575  	if nn.Op() != ir.OCALLFUNC && nn.Op() != ir.OCALLINTER {
   576  		switch n := nn.(type) {
   577  		default:
   578  			base.Fatalf("unexpected call: %+v", n)
   579  		case *ir.UnaryExpr:
   580  			n.X = o.expr(n.X, nil)
   581  		case *ir.ConvExpr:
   582  			n.X = o.expr(n.X, nil)
   583  		case *ir.BinaryExpr:
   584  			n.X = o.expr(n.X, nil)
   585  			n.Y = o.expr(n.Y, nil)
   586  		case *ir.MakeExpr:
   587  			n.Len = o.expr(n.Len, nil)
   588  			n.Cap = o.expr(n.Cap, nil)
   589  		case *ir.CallExpr:
   590  			o.exprList(n.Args)
   591  		}
   592  		return
   593  	}
   594  
   595  	n := nn.(*ir.CallExpr)
   596  	typecheck.AssertFixedCall(n)
   597  
   598  	if ir.IsFuncPCIntrinsic(n) && ir.IsIfaceOfFunc(n.Args[0]) != nil {
   599  		// For internal/abi.FuncPCABIxxx(fn), if fn is a defined function,
   600  		// do not introduce temporaries here, so it is easier to rewrite it
   601  		// to symbol address reference later in walk.
   602  		return
   603  	}
   604  
   605  	n.Fun = o.expr(n.Fun, nil)
   606  	o.exprList(n.Args)
   607  }
   608  
   609  // mapAssign appends n to o.out.
   610  func (o *orderState) mapAssign(n ir.Node) {
   611  	switch n.Op() {
   612  	default:
   613  		base.Fatalf("order.mapAssign %v", n.Op())
   614  
   615  	case ir.OAS:
   616  		n := n.(*ir.AssignStmt)
   617  		if n.X.Op() == ir.OINDEXMAP {
   618  			n.Y = o.safeMapRHS(n.Y)
   619  		}
   620  		o.out = append(o.out, n)
   621  	case ir.OASOP:
   622  		n := n.(*ir.AssignOpStmt)
   623  		if n.X.Op() == ir.OINDEXMAP {
   624  			n.Y = o.safeMapRHS(n.Y)
   625  		}
   626  		o.out = append(o.out, n)
   627  	}
   628  }
   629  
   630  func (o *orderState) safeMapRHS(r ir.Node) ir.Node {
   631  	// Make sure we evaluate the RHS before starting the map insert.
   632  	// We need to make sure the RHS won't panic.  See issue 22881.
   633  	if r.Op() == ir.OAPPEND {
   634  		r := r.(*ir.CallExpr)
   635  		s := r.Args[1:]
   636  		for i, n := range s {
   637  			s[i] = o.cheapExpr(n)
   638  		}
   639  		return r
   640  	}
   641  	return o.cheapExpr(r)
   642  }
   643  
   644  // stmt orders the statement n, appending to o.out.
   645  func (o *orderState) stmt(n ir.Node) {
   646  	if n == nil {
   647  		return
   648  	}
   649  
   650  	lno := ir.SetPos(n)
   651  	o.init(n)
   652  
   653  	switch n.Op() {
   654  	default:
   655  		base.Fatalf("order.stmt %v", n.Op())
   656  
   657  	case ir.OINLMARK:
   658  		o.out = append(o.out, n)
   659  
   660  	case ir.OAS:
   661  		n := n.(*ir.AssignStmt)
   662  		t := o.markTemp()
   663  
   664  		// There's a delicate interaction here between two OINDEXMAP
   665  		// optimizations.
   666  		//
   667  		// First, we want to handle m[k] = append(m[k], ...) with a single
   668  		// runtime call to mapassign. This requires the m[k] expressions to
   669  		// satisfy ir.SameSafeExpr in walkAssign.
   670  		//
   671  		// But if k is a slow map key type that's passed by reference (e.g.,
   672  		// byte), then we want to avoid marking user variables as addrtaken,
   673  		// if that might prevent the compiler from keeping k in a register.
   674  		//
   675  		// TODO(mdempsky): It would be better if walk was responsible for
   676  		// inserting temporaries as needed.
   677  		mapAppend := n.X.Op() == ir.OINDEXMAP && n.Y.Op() == ir.OAPPEND &&
   678  			ir.SameSafeExpr(n.X, n.Y.(*ir.CallExpr).Args[0])
   679  
   680  		n.X = o.expr(n.X, nil)
   681  		if mapAppend {
   682  			indexLHS := n.X.(*ir.IndexExpr)
   683  			indexLHS.X = o.cheapExpr(indexLHS.X)
   684  			indexLHS.Index = o.cheapExpr(indexLHS.Index)
   685  
   686  			call := n.Y.(*ir.CallExpr)
   687  			arg0 := call.Args[0]
   688  			// ir.SameSafeExpr skips OCONVNOPs, so we must do the same here (#66096).
   689  			for arg0.Op() == ir.OCONVNOP {
   690  				arg0 = arg0.(*ir.ConvExpr).X
   691  			}
   692  			indexRHS := arg0.(*ir.IndexExpr)
   693  			indexRHS.X = indexLHS.X
   694  			indexRHS.Index = indexLHS.Index
   695  
   696  			o.exprList(call.Args[1:])
   697  		} else {
   698  			n.Y = o.expr(n.Y, n.X)
   699  		}
   700  		o.mapAssign(n)
   701  		o.popTemp(t)
   702  
   703  	case ir.OASOP:
   704  		n := n.(*ir.AssignOpStmt)
   705  		t := o.markTemp()
   706  		n.X = o.expr(n.X, nil)
   707  		n.Y = o.expr(n.Y, nil)
   708  
   709  		if base.Flag.Cfg.Instrumenting || n.X.Op() == ir.OINDEXMAP && (n.AsOp == ir.ODIV || n.AsOp == ir.OMOD) {
   710  			// Rewrite m[k] op= r into m[k] = m[k] op r so
   711  			// that we can ensure that if op panics
   712  			// because r is zero, the panic happens before
   713  			// the map assignment.
   714  			// DeepCopy is a big hammer here, but safeExpr
   715  			// makes sure there is nothing too deep being copied.
   716  			l1 := o.safeExpr(n.X)
   717  			l2 := ir.DeepCopy(src.NoXPos, l1)
   718  			if l2.Op() == ir.OINDEXMAP {
   719  				l2 := l2.(*ir.IndexExpr)
   720  				l2.Assigned = false
   721  			}
   722  			l2 = o.copyExpr(l2)
   723  			r := o.expr(typecheck.Expr(ir.NewBinaryExpr(n.Pos(), n.AsOp, l2, n.Y)), nil)
   724  			as := typecheck.Stmt(ir.NewAssignStmt(n.Pos(), l1, r))
   725  			o.mapAssign(as)
   726  			o.popTemp(t)
   727  			return
   728  		}
   729  
   730  		o.mapAssign(n)
   731  		o.popTemp(t)
   732  
   733  	case ir.OAS2:
   734  		n := n.(*ir.AssignListStmt)
   735  		t := o.markTemp()
   736  		o.exprList(n.Lhs)
   737  		o.exprList(n.Rhs)
   738  		o.out = append(o.out, n)
   739  		o.popTemp(t)
   740  
   741  	// Special: avoid copy of func call n.Right
   742  	case ir.OAS2FUNC:
   743  		n := n.(*ir.AssignListStmt)
   744  		t := o.markTemp()
   745  		o.exprList(n.Lhs)
   746  		call := n.Rhs[0]
   747  		o.init(call)
   748  		if ic, ok := call.(*ir.InlinedCallExpr); ok {
   749  			o.stmtList(ic.Body)
   750  
   751  			n.SetOp(ir.OAS2)
   752  			n.Rhs = ic.ReturnVars
   753  
   754  			o.exprList(n.Rhs)
   755  			o.out = append(o.out, n)
   756  		} else {
   757  			o.call(call)
   758  			o.as2func(n)
   759  		}
   760  		o.popTemp(t)
   761  
   762  	// Special: use temporary variables to hold result,
   763  	// so that runtime can take address of temporary.
   764  	// No temporary for blank assignment.
   765  	//
   766  	// OAS2MAPR: make sure key is addressable if needed,
   767  	//           and make sure OINDEXMAP is not copied out.
   768  	case ir.OAS2DOTTYPE, ir.OAS2RECV, ir.OAS2MAPR:
   769  		n := n.(*ir.AssignListStmt)
   770  		t := o.markTemp()
   771  		o.exprList(n.Lhs)
   772  
   773  		switch r := n.Rhs[0]; r.Op() {
   774  		case ir.ODOTTYPE2:
   775  			r := r.(*ir.TypeAssertExpr)
   776  			r.X = o.expr(r.X, nil)
   777  		case ir.ODYNAMICDOTTYPE2:
   778  			r := r.(*ir.DynamicTypeAssertExpr)
   779  			r.X = o.expr(r.X, nil)
   780  			r.RType = o.expr(r.RType, nil)
   781  			r.ITab = o.expr(r.ITab, nil)
   782  		case ir.ORECV:
   783  			r := r.(*ir.UnaryExpr)
   784  			r.X = o.expr(r.X, nil)
   785  		case ir.OINDEXMAP:
   786  			r := r.(*ir.IndexExpr)
   787  			r.X = o.expr(r.X, nil)
   788  			r.Index = o.expr(r.Index, nil)
   789  			// See similar conversion for OINDEXMAP below.
   790  			_ = mapKeyReplaceStrConv(r.Index)
   791  			r.Index = o.mapKeyTemp(r.Pos(), r.X.Type(), r.Index)
   792  		default:
   793  			base.Fatalf("order.stmt: %v", r.Op())
   794  		}
   795  
   796  		o.as2ok(n)
   797  		o.popTemp(t)
   798  
   799  	// Special: does not save n onto out.
   800  	case ir.OBLOCK:
   801  		n := n.(*ir.BlockStmt)
   802  		o.stmtList(n.List)
   803  
   804  	// Special: n->left is not an expression; save as is.
   805  	case ir.OBREAK,
   806  		ir.OCONTINUE,
   807  		ir.ODCL,
   808  		ir.OFALL,
   809  		ir.OGOTO,
   810  		ir.OLABEL,
   811  		ir.OTAILCALL:
   812  		o.out = append(o.out, n)
   813  
   814  	// Special: handle call arguments.
   815  	case ir.OCALLFUNC, ir.OCALLINTER:
   816  		n := n.(*ir.CallExpr)
   817  		t := o.markTemp()
   818  		o.call(n)
   819  		o.out = append(o.out, n)
   820  		o.popTemp(t)
   821  
   822  	case ir.OINLCALL:
   823  		n := n.(*ir.InlinedCallExpr)
   824  		o.stmtList(n.Body)
   825  
   826  		// discard results; double-check for no side effects
   827  		for _, result := range n.ReturnVars {
   828  			if staticinit.AnySideEffects(result) {
   829  				base.FatalfAt(result.Pos(), "inlined call result has side effects: %v", result)
   830  			}
   831  		}
   832  
   833  	case ir.OCHECKNIL, ir.OCLEAR, ir.OCLOSE, ir.OPANIC, ir.ORECV:
   834  		n := n.(*ir.UnaryExpr)
   835  		t := o.markTemp()
   836  		n.X = o.expr(n.X, nil)
   837  		o.out = append(o.out, n)
   838  		o.popTemp(t)
   839  
   840  	case ir.OCOPY:
   841  		n := n.(*ir.BinaryExpr)
   842  		t := o.markTemp()
   843  		n.X = o.expr(n.X, nil)
   844  		n.Y = o.expr(n.Y, nil)
   845  		o.out = append(o.out, n)
   846  		o.popTemp(t)
   847  
   848  	case ir.OPRINT, ir.OPRINTLN, ir.ORECOVER:
   849  		n := n.(*ir.CallExpr)
   850  		t := o.markTemp()
   851  		o.call(n)
   852  		o.out = append(o.out, n)
   853  		o.popTemp(t)
   854  
   855  	// Special: order arguments to inner call but not call itself.
   856  	case ir.ODEFER, ir.OGO:
   857  		n := n.(*ir.GoDeferStmt)
   858  		t := o.markTemp()
   859  		o.init(n.Call)
   860  		o.call(n.Call)
   861  		o.out = append(o.out, n)
   862  		o.popTemp(t)
   863  
   864  	case ir.ODELETE:
   865  		n := n.(*ir.CallExpr)
   866  		t := o.markTemp()
   867  		n.Args[0] = o.expr(n.Args[0], nil)
   868  		n.Args[1] = o.expr(n.Args[1], nil)
   869  		n.Args[1] = o.mapKeyTemp(n.Pos(), n.Args[0].Type(), n.Args[1])
   870  		o.out = append(o.out, n)
   871  		o.popTemp(t)
   872  
   873  	// Clean temporaries from condition evaluation at
   874  	// beginning of loop body and after for statement.
   875  	case ir.OFOR:
   876  		n := n.(*ir.ForStmt)
   877  		t := o.markTemp()
   878  		n.Cond = o.exprInPlace(n.Cond)
   879  		orderBlock(&n.Body, o.free)
   880  		n.Post = orderStmtInPlace(n.Post, o.free)
   881  		o.out = append(o.out, n)
   882  		o.popTemp(t)
   883  
   884  	// Clean temporaries from condition at
   885  	// beginning of both branches.
   886  	case ir.OIF:
   887  		n := n.(*ir.IfStmt)
   888  		t := o.markTemp()
   889  		n.Cond = o.exprInPlace(n.Cond)
   890  		o.popTemp(t)
   891  		orderBlock(&n.Body, o.free)
   892  		orderBlock(&n.Else, o.free)
   893  		o.out = append(o.out, n)
   894  
   895  	case ir.ORANGE:
   896  		// n.Right is the expression being ranged over.
   897  		// order it, and then make a copy if we need one.
   898  		// We almost always do, to ensure that we don't
   899  		// see any value changes made during the loop.
   900  		// Usually the copy is cheap (e.g., array pointer,
   901  		// chan, slice, string are all tiny).
   902  		// The exception is ranging over an array value
   903  		// (not a slice, not a pointer to array),
   904  		// which must make a copy to avoid seeing updates made during
   905  		// the range body. Ranging over an array value is uncommon though.
   906  
   907  		// Mark []byte(str) range expression to reuse string backing storage.
   908  		// It is safe because the storage cannot be mutated.
   909  		n := n.(*ir.RangeStmt)
   910  		if x, ok := n.X.(*ir.ConvExpr); ok {
   911  			switch x.Op() {
   912  			case ir.OSTR2BYTES:
   913  				x.SetOp(ir.OSTR2BYTESTMP)
   914  				fallthrough
   915  			case ir.OSTR2BYTESTMP:
   916  				x.MarkNonNil() // "range []byte(nil)" is fine
   917  			}
   918  		}
   919  
   920  		t := o.markTemp()
   921  		n.X = o.expr(n.X, nil)
   922  
   923  		orderBody := true
   924  		xt := typecheck.RangeExprType(n.X.Type())
   925  		switch k := xt.Kind(); {
   926  		default:
   927  			base.Fatalf("order.stmt range %v", n.Type())
   928  
   929  		case types.IsInt[k]:
   930  			// Used only once, no need to copy.
   931  
   932  		case k == types.TARRAY, k == types.TSLICE:
   933  			if n.Value == nil || ir.IsBlank(n.Value) {
   934  				// for i := range x will only use x once, to compute len(x).
   935  				// No need to copy it.
   936  				break
   937  			}
   938  			fallthrough
   939  
   940  		case k == types.TCHAN, k == types.TSTRING:
   941  			// chan, string, slice, array ranges use value multiple times.
   942  			// make copy.
   943  			r := n.X
   944  
   945  			if r.Type().IsString() && r.Type() != types.Types[types.TSTRING] {
   946  				r = ir.NewConvExpr(base.Pos, ir.OCONV, nil, r)
   947  				r.SetType(types.Types[types.TSTRING])
   948  				r = typecheck.Expr(r)
   949  			}
   950  
   951  			n.X = o.copyExpr(r)
   952  
   953  		case k == types.TMAP:
   954  			if isMapClear(n) {
   955  				// Preserve the body of the map clear pattern so it can
   956  				// be detected during walk. The loop body will not be used
   957  				// when optimizing away the range loop to a runtime call.
   958  				orderBody = false
   959  				break
   960  			}
   961  
   962  			// copy the map value in case it is a map literal.
   963  			// TODO(rsc): Make tmp = literal expressions reuse tmp.
   964  			// For maps tmp is just one word so it hardly matters.
   965  			r := n.X
   966  			n.X = o.copyExpr(r)
   967  
   968  			// n.Prealloc is the temp for the iterator.
   969  			// MapIterType contains pointers and needs to be zeroed.
   970  			n.Prealloc = o.newTemp(reflectdata.MapIterType(), true)
   971  		}
   972  		n.Key = o.exprInPlace(n.Key)
   973  		n.Value = o.exprInPlace(n.Value)
   974  		if orderBody {
   975  			orderBlock(&n.Body, o.free)
   976  		}
   977  		o.out = append(o.out, n)
   978  		o.popTemp(t)
   979  
   980  	case ir.ORETURN:
   981  		n := n.(*ir.ReturnStmt)
   982  		o.exprList(n.Results)
   983  		o.out = append(o.out, n)
   984  
   985  	// Special: clean case temporaries in each block entry.
   986  	// Select must enter one of its blocks, so there is no
   987  	// need for a cleaning at the end.
   988  	// Doubly special: evaluation order for select is stricter
   989  	// than ordinary expressions. Even something like p.c
   990  	// has to be hoisted into a temporary, so that it cannot be
   991  	// reordered after the channel evaluation for a different
   992  	// case (if p were nil, then the timing of the fault would
   993  	// give this away).
   994  	case ir.OSELECT:
   995  		n := n.(*ir.SelectStmt)
   996  		t := o.markTemp()
   997  		for _, ncas := range n.Cases {
   998  			r := ncas.Comm
   999  			ir.SetPos(ncas)
  1000  
  1001  			// Append any new body prologue to ninit.
  1002  			// The next loop will insert ninit into nbody.
  1003  			if len(ncas.Init()) != 0 {
  1004  				base.Fatalf("order select ninit")
  1005  			}
  1006  			if r == nil {
  1007  				continue
  1008  			}
  1009  			switch r.Op() {
  1010  			default:
  1011  				ir.Dump("select case", r)
  1012  				base.Fatalf("unknown op in select %v", r.Op())
  1013  
  1014  			case ir.OSELRECV2:
  1015  				// case x, ok = <-c
  1016  				r := r.(*ir.AssignListStmt)
  1017  				recv := r.Rhs[0].(*ir.UnaryExpr)
  1018  				recv.X = o.expr(recv.X, nil)
  1019  				if !ir.IsAutoTmp(recv.X) {
  1020  					recv.X = o.copyExpr(recv.X)
  1021  				}
  1022  				init := ir.TakeInit(r)
  1023  
  1024  				colas := r.Def
  1025  				do := func(i int, t *types.Type) {
  1026  					n := r.Lhs[i]
  1027  					if ir.IsBlank(n) {
  1028  						return
  1029  					}
  1030  					// If this is case x := <-ch or case x, y := <-ch, the case has
  1031  					// the ODCL nodes to declare x and y. We want to delay that
  1032  					// declaration (and possible allocation) until inside the case body.
  1033  					// Delete the ODCL nodes here and recreate them inside the body below.
  1034  					if colas {
  1035  						if len(init) > 0 && init[0].Op() == ir.ODCL && init[0].(*ir.Decl).X == n {
  1036  							init = init[1:]
  1037  
  1038  							// iimport may have added a default initialization assignment,
  1039  							// due to how it handles ODCL statements.
  1040  							if len(init) > 0 && init[0].Op() == ir.OAS && init[0].(*ir.AssignStmt).X == n {
  1041  								init = init[1:]
  1042  							}
  1043  						}
  1044  						dcl := typecheck.Stmt(ir.NewDecl(base.Pos, ir.ODCL, n.(*ir.Name)))
  1045  						ncas.PtrInit().Append(dcl)
  1046  					}
  1047  					tmp := o.newTemp(t, t.HasPointers())
  1048  					as := typecheck.Stmt(ir.NewAssignStmt(base.Pos, n, typecheck.Conv(tmp, n.Type())))
  1049  					ncas.PtrInit().Append(as)
  1050  					r.Lhs[i] = tmp
  1051  				}
  1052  				do(0, recv.X.Type().Elem())
  1053  				do(1, types.Types[types.TBOOL])
  1054  				if len(init) != 0 {
  1055  					ir.DumpList("ninit", init)
  1056  					base.Fatalf("ninit on select recv")
  1057  				}
  1058  				orderBlock(ncas.PtrInit(), o.free)
  1059  
  1060  			case ir.OSEND:
  1061  				r := r.(*ir.SendStmt)
  1062  				if len(r.Init()) != 0 {
  1063  					ir.DumpList("ninit", r.Init())
  1064  					base.Fatalf("ninit on select send")
  1065  				}
  1066  
  1067  				// case c <- x
  1068  				// r->left is c, r->right is x, both are always evaluated.
  1069  				r.Chan = o.expr(r.Chan, nil)
  1070  
  1071  				if !ir.IsAutoTmp(r.Chan) {
  1072  					r.Chan = o.copyExpr(r.Chan)
  1073  				}
  1074  				r.Value = o.expr(r.Value, nil)
  1075  				if !ir.IsAutoTmp(r.Value) {
  1076  					r.Value = o.copyExpr(r.Value)
  1077  				}
  1078  			}
  1079  		}
  1080  		// Now that we have accumulated all the temporaries, clean them.
  1081  		// Also insert any ninit queued during the previous loop.
  1082  		// (The temporary cleaning must follow that ninit work.)
  1083  		for _, cas := range n.Cases {
  1084  			orderBlock(&cas.Body, o.free)
  1085  
  1086  			// TODO(mdempsky): Is this actually necessary?
  1087  			// walkSelect appears to walk Ninit.
  1088  			cas.Body.Prepend(ir.TakeInit(cas)...)
  1089  		}
  1090  
  1091  		o.out = append(o.out, n)
  1092  		o.popTemp(t)
  1093  
  1094  	// Special: value being sent is passed as a pointer; make it addressable.
  1095  	case ir.OSEND:
  1096  		n := n.(*ir.SendStmt)
  1097  		t := o.markTemp()
  1098  		n.Chan = o.expr(n.Chan, nil)
  1099  		n.Value = o.expr(n.Value, nil)
  1100  		if base.Flag.Cfg.Instrumenting {
  1101  			// Force copying to the stack so that (chan T)(nil) <- x
  1102  			// is still instrumented as a read of x.
  1103  			n.Value = o.copyExpr(n.Value)
  1104  		} else {
  1105  			n.Value = o.addrTemp(n.Value)
  1106  		}
  1107  		o.out = append(o.out, n)
  1108  		o.popTemp(t)
  1109  
  1110  	// TODO(rsc): Clean temporaries more aggressively.
  1111  	// Note that because walkSwitch will rewrite some of the
  1112  	// switch into a binary search, this is not as easy as it looks.
  1113  	// (If we ran that code here we could invoke order.stmt on
  1114  	// the if-else chain instead.)
  1115  	// For now just clean all the temporaries at the end.
  1116  	// In practice that's fine.
  1117  	case ir.OSWITCH:
  1118  		n := n.(*ir.SwitchStmt)
  1119  		if base.Debug.Libfuzzer != 0 && !hasDefaultCase(n) {
  1120  			// Add empty "default:" case for instrumentation.
  1121  			n.Cases = append(n.Cases, ir.NewCaseStmt(base.Pos, nil, nil))
  1122  		}
  1123  
  1124  		t := o.markTemp()
  1125  		n.Tag = o.expr(n.Tag, nil)
  1126  		for _, ncas := range n.Cases {
  1127  			o.exprListInPlace(ncas.List)
  1128  			orderBlock(&ncas.Body, o.free)
  1129  		}
  1130  
  1131  		o.out = append(o.out, n)
  1132  		o.popTemp(t)
  1133  	}
  1134  
  1135  	base.Pos = lno
  1136  }
  1137  
  1138  func hasDefaultCase(n *ir.SwitchStmt) bool {
  1139  	for _, ncas := range n.Cases {
  1140  		if len(ncas.List) == 0 {
  1141  			return true
  1142  		}
  1143  	}
  1144  	return false
  1145  }
  1146  
  1147  // exprList orders the expression list l into o.
  1148  func (o *orderState) exprList(l ir.Nodes) {
  1149  	s := l
  1150  	for i := range s {
  1151  		s[i] = o.expr(s[i], nil)
  1152  	}
  1153  }
  1154  
  1155  // exprListInPlace orders the expression list l but saves
  1156  // the side effects on the individual expression ninit lists.
  1157  func (o *orderState) exprListInPlace(l ir.Nodes) {
  1158  	s := l
  1159  	for i := range s {
  1160  		s[i] = o.exprInPlace(s[i])
  1161  	}
  1162  }
  1163  
  1164  func (o *orderState) exprNoLHS(n ir.Node) ir.Node {
  1165  	return o.expr(n, nil)
  1166  }
  1167  
  1168  // expr orders a single expression, appending side
  1169  // effects to o.out as needed.
  1170  // If this is part of an assignment lhs = *np, lhs is given.
  1171  // Otherwise lhs == nil. (When lhs != nil it may be possible
  1172  // to avoid copying the result of the expression to a temporary.)
  1173  // The result of expr MUST be assigned back to n, e.g.
  1174  //
  1175  //	n.Left = o.expr(n.Left, lhs)
  1176  func (o *orderState) expr(n, lhs ir.Node) ir.Node {
  1177  	if n == nil {
  1178  		return n
  1179  	}
  1180  	lno := ir.SetPos(n)
  1181  	n = o.expr1(n, lhs)
  1182  	base.Pos = lno
  1183  	return n
  1184  }
  1185  
  1186  func (o *orderState) expr1(n, lhs ir.Node) ir.Node {
  1187  	o.init(n)
  1188  
  1189  	switch n.Op() {
  1190  	default:
  1191  		if o.edit == nil {
  1192  			o.edit = o.exprNoLHS // create closure once
  1193  		}
  1194  		ir.EditChildren(n, o.edit)
  1195  		return n
  1196  
  1197  	// Addition of strings turns into a function call.
  1198  	// Allocate a temporary to hold the strings.
  1199  	// Fewer than 5 strings use direct runtime helpers.
  1200  	case ir.OADDSTR:
  1201  		n := n.(*ir.AddStringExpr)
  1202  		o.exprList(n.List)
  1203  
  1204  		if len(n.List) > 5 {
  1205  			t := types.NewArray(types.Types[types.TSTRING], int64(len(n.List)))
  1206  			n.Prealloc = o.newTemp(t, false)
  1207  		}
  1208  
  1209  		// Mark string(byteSlice) arguments to reuse byteSlice backing
  1210  		// buffer during conversion. String concatenation does not
  1211  		// memorize the strings for later use, so it is safe.
  1212  		// However, we can do it only if there is at least one non-empty string literal.
  1213  		// Otherwise if all other arguments are empty strings,
  1214  		// concatstrings will return the reference to the temp string
  1215  		// to the caller.
  1216  		hasbyte := false
  1217  
  1218  		haslit := false
  1219  		for _, n1 := range n.List {
  1220  			hasbyte = hasbyte || n1.Op() == ir.OBYTES2STR
  1221  			haslit = haslit || n1.Op() == ir.OLITERAL && len(ir.StringVal(n1)) != 0
  1222  		}
  1223  
  1224  		if haslit && hasbyte {
  1225  			for _, n2 := range n.List {
  1226  				if n2.Op() == ir.OBYTES2STR {
  1227  					n2 := n2.(*ir.ConvExpr)
  1228  					n2.SetOp(ir.OBYTES2STRTMP)
  1229  				}
  1230  			}
  1231  		}
  1232  		return n
  1233  
  1234  	case ir.OINDEXMAP:
  1235  		n := n.(*ir.IndexExpr)
  1236  		n.X = o.expr(n.X, nil)
  1237  		n.Index = o.expr(n.Index, nil)
  1238  		needCopy := false
  1239  
  1240  		if !n.Assigned {
  1241  			// Enforce that any []byte slices we are not copying
  1242  			// can not be changed before the map index by forcing
  1243  			// the map index to happen immediately following the
  1244  			// conversions. See copyExpr a few lines below.
  1245  			needCopy = mapKeyReplaceStrConv(n.Index)
  1246  
  1247  			if base.Flag.Cfg.Instrumenting {
  1248  				// Race detector needs the copy.
  1249  				needCopy = true
  1250  			}
  1251  		}
  1252  
  1253  		// key may need to be addressable
  1254  		n.Index = o.mapKeyTemp(n.Pos(), n.X.Type(), n.Index)
  1255  		if needCopy {
  1256  			return o.copyExpr(n)
  1257  		}
  1258  		return n
  1259  
  1260  	// concrete type (not interface) argument might need an addressable
  1261  	// temporary to pass to the runtime conversion routine.
  1262  	case ir.OCONVIFACE:
  1263  		n := n.(*ir.ConvExpr)
  1264  		n.X = o.expr(n.X, nil)
  1265  		if n.X.Type().IsInterface() {
  1266  			return n
  1267  		}
  1268  		if _, _, needsaddr := dataWordFuncName(n.X.Type()); needsaddr || isStaticCompositeLiteral(n.X) {
  1269  			// Need a temp if we need to pass the address to the conversion function.
  1270  			// We also process static composite literal node here, making a named static global
  1271  			// whose address we can put directly in an interface (see OCONVIFACE case in walk).
  1272  			n.X = o.addrTemp(n.X)
  1273  		}
  1274  		return n
  1275  
  1276  	case ir.OCONVNOP:
  1277  		n := n.(*ir.ConvExpr)
  1278  		if n.X.Op() == ir.OCALLMETH {
  1279  			base.FatalfAt(n.X.Pos(), "OCALLMETH missed by typecheck")
  1280  		}
  1281  		if n.Type().IsKind(types.TUNSAFEPTR) && n.X.Type().IsKind(types.TUINTPTR) && (n.X.Op() == ir.OCALLFUNC || n.X.Op() == ir.OCALLINTER) {
  1282  			call := n.X.(*ir.CallExpr)
  1283  			// When reordering unsafe.Pointer(f()) into a separate
  1284  			// statement, the conversion and function call must stay
  1285  			// together. See golang.org/issue/15329.
  1286  			o.init(call)
  1287  			o.call(call)
  1288  			if lhs == nil || lhs.Op() != ir.ONAME || base.Flag.Cfg.Instrumenting {
  1289  				return o.copyExpr(n)
  1290  			}
  1291  		} else {
  1292  			n.X = o.expr(n.X, nil)
  1293  		}
  1294  		return n
  1295  
  1296  	case ir.OANDAND, ir.OOROR:
  1297  		// ... = LHS && RHS
  1298  		//
  1299  		// var r bool
  1300  		// r = LHS
  1301  		// if r {       // or !r, for OROR
  1302  		//     r = RHS
  1303  		// }
  1304  		// ... = r
  1305  
  1306  		n := n.(*ir.LogicalExpr)
  1307  		r := o.newTemp(n.Type(), false)
  1308  
  1309  		// Evaluate left-hand side.
  1310  		lhs := o.expr(n.X, nil)
  1311  		o.out = append(o.out, typecheck.Stmt(ir.NewAssignStmt(base.Pos, r, lhs)))
  1312  
  1313  		// Evaluate right-hand side, save generated code.
  1314  		saveout := o.out
  1315  		o.out = nil
  1316  		t := o.markTemp()
  1317  		o.edge()
  1318  		rhs := o.expr(n.Y, nil)
  1319  		o.out = append(o.out, typecheck.Stmt(ir.NewAssignStmt(base.Pos, r, rhs)))
  1320  		o.popTemp(t)
  1321  		gen := o.out
  1322  		o.out = saveout
  1323  
  1324  		// If left-hand side doesn't cause a short-circuit, issue right-hand side.
  1325  		nif := ir.NewIfStmt(base.Pos, r, nil, nil)
  1326  		if n.Op() == ir.OANDAND {
  1327  			nif.Body = gen
  1328  		} else {
  1329  			nif.Else = gen
  1330  		}
  1331  		o.out = append(o.out, nif)
  1332  		return r
  1333  
  1334  	case ir.OCALLMETH:
  1335  		base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
  1336  		panic("unreachable")
  1337  
  1338  	case ir.OCALLFUNC,
  1339  		ir.OCALLINTER,
  1340  		ir.OCAP,
  1341  		ir.OCOMPLEX,
  1342  		ir.OCOPY,
  1343  		ir.OIMAG,
  1344  		ir.OLEN,
  1345  		ir.OMAKECHAN,
  1346  		ir.OMAKEMAP,
  1347  		ir.OMAKESLICE,
  1348  		ir.OMAKESLICECOPY,
  1349  		ir.OMAX,
  1350  		ir.OMIN,
  1351  		ir.ONEW,
  1352  		ir.OREAL,
  1353  		ir.ORECOVER,
  1354  		ir.OSTR2BYTES,
  1355  		ir.OSTR2BYTESTMP,
  1356  		ir.OSTR2RUNES:
  1357  
  1358  		if isRuneCount(n) {
  1359  			// len([]rune(s)) is rewritten to runtime.countrunes(s) later.
  1360  			conv := n.(*ir.UnaryExpr).X.(*ir.ConvExpr)
  1361  			conv.X = o.expr(conv.X, nil)
  1362  		} else {
  1363  			o.call(n)
  1364  		}
  1365  
  1366  		if lhs == nil || lhs.Op() != ir.ONAME || base.Flag.Cfg.Instrumenting {
  1367  			return o.copyExpr(n)
  1368  		}
  1369  		return n
  1370  
  1371  	case ir.OINLCALL:
  1372  		n := n.(*ir.InlinedCallExpr)
  1373  		o.stmtList(n.Body)
  1374  		return n.SingleResult()
  1375  
  1376  	case ir.OAPPEND:
  1377  		// Check for append(x, make([]T, y)...) .
  1378  		n := n.(*ir.CallExpr)
  1379  		if isAppendOfMake(n) {
  1380  			n.Args[0] = o.expr(n.Args[0], nil) // order x
  1381  			mk := n.Args[1].(*ir.MakeExpr)
  1382  			mk.Len = o.expr(mk.Len, nil) // order y
  1383  		} else {
  1384  			o.exprList(n.Args)
  1385  		}
  1386  
  1387  		if lhs == nil || lhs.Op() != ir.ONAME && !ir.SameSafeExpr(lhs, n.Args[0]) {
  1388  			return o.copyExpr(n)
  1389  		}
  1390  		return n
  1391  
  1392  	case ir.OSLICE, ir.OSLICEARR, ir.OSLICESTR, ir.OSLICE3, ir.OSLICE3ARR:
  1393  		n := n.(*ir.SliceExpr)
  1394  		n.X = o.expr(n.X, nil)
  1395  		n.Low = o.cheapExpr(o.expr(n.Low, nil))
  1396  		n.High = o.cheapExpr(o.expr(n.High, nil))
  1397  		n.Max = o.cheapExpr(o.expr(n.Max, nil))
  1398  		if lhs == nil || lhs.Op() != ir.ONAME && !ir.SameSafeExpr(lhs, n.X) {
  1399  			return o.copyExpr(n)
  1400  		}
  1401  		return n
  1402  
  1403  	case ir.OCLOSURE:
  1404  		n := n.(*ir.ClosureExpr)
  1405  		if n.Transient() && len(n.Func.ClosureVars) > 0 {
  1406  			n.Prealloc = o.newTemp(typecheck.ClosureType(n), false)
  1407  		}
  1408  		return n
  1409  
  1410  	case ir.OMETHVALUE:
  1411  		n := n.(*ir.SelectorExpr)
  1412  		n.X = o.expr(n.X, nil)
  1413  		if n.Transient() {
  1414  			t := typecheck.MethodValueType(n)
  1415  			n.Prealloc = o.newTemp(t, false)
  1416  		}
  1417  		return n
  1418  
  1419  	case ir.OSLICELIT:
  1420  		n := n.(*ir.CompLitExpr)
  1421  		o.exprList(n.List)
  1422  		if n.Transient() {
  1423  			t := types.NewArray(n.Type().Elem(), n.Len)
  1424  			n.Prealloc = o.newTemp(t, false)
  1425  		}
  1426  		return n
  1427  
  1428  	case ir.ODOTTYPE, ir.ODOTTYPE2:
  1429  		n := n.(*ir.TypeAssertExpr)
  1430  		n.X = o.expr(n.X, nil)
  1431  		if !types.IsDirectIface(n.Type()) || base.Flag.Cfg.Instrumenting {
  1432  			return o.copyExprClear(n)
  1433  		}
  1434  		return n
  1435  
  1436  	case ir.ORECV:
  1437  		n := n.(*ir.UnaryExpr)
  1438  		n.X = o.expr(n.X, nil)
  1439  		return o.copyExprClear(n)
  1440  
  1441  	case ir.OEQ, ir.ONE, ir.OLT, ir.OLE, ir.OGT, ir.OGE:
  1442  		n := n.(*ir.BinaryExpr)
  1443  		n.X = o.expr(n.X, nil)
  1444  		n.Y = o.expr(n.Y, nil)
  1445  
  1446  		t := n.X.Type()
  1447  		switch {
  1448  		case t.IsString():
  1449  			// Mark string(byteSlice) arguments to reuse byteSlice backing
  1450  			// buffer during conversion. String comparison does not
  1451  			// memorize the strings for later use, so it is safe.
  1452  			if n.X.Op() == ir.OBYTES2STR {
  1453  				n.X.(*ir.ConvExpr).SetOp(ir.OBYTES2STRTMP)
  1454  			}
  1455  			if n.Y.Op() == ir.OBYTES2STR {
  1456  				n.Y.(*ir.ConvExpr).SetOp(ir.OBYTES2STRTMP)
  1457  			}
  1458  
  1459  		case t.IsStruct() || t.IsArray():
  1460  			// for complex comparisons, we need both args to be
  1461  			// addressable so we can pass them to the runtime.
  1462  			n.X = o.addrTemp(n.X)
  1463  			n.Y = o.addrTemp(n.Y)
  1464  		}
  1465  		return n
  1466  
  1467  	case ir.OMAPLIT:
  1468  		// Order map by converting:
  1469  		//   map[int]int{
  1470  		//     a(): b(),
  1471  		//     c(): d(),
  1472  		//     e(): f(),
  1473  		//   }
  1474  		// to
  1475  		//   m := map[int]int{}
  1476  		//   m[a()] = b()
  1477  		//   m[c()] = d()
  1478  		//   m[e()] = f()
  1479  		// Then order the result.
  1480  		// Without this special case, order would otherwise compute all
  1481  		// the keys and values before storing any of them to the map.
  1482  		// See issue 26552.
  1483  		n := n.(*ir.CompLitExpr)
  1484  		entries := n.List
  1485  		statics := entries[:0]
  1486  		var dynamics []*ir.KeyExpr
  1487  		for _, r := range entries {
  1488  			r := r.(*ir.KeyExpr)
  1489  
  1490  			if !isStaticCompositeLiteral(r.Key) || !isStaticCompositeLiteral(r.Value) {
  1491  				dynamics = append(dynamics, r)
  1492  				continue
  1493  			}
  1494  
  1495  			// Recursively ordering some static entries can change them to dynamic;
  1496  			// e.g., OCONVIFACE nodes. See #31777.
  1497  			r = o.expr(r, nil).(*ir.KeyExpr)
  1498  			if !isStaticCompositeLiteral(r.Key) || !isStaticCompositeLiteral(r.Value) {
  1499  				dynamics = append(dynamics, r)
  1500  				continue
  1501  			}
  1502  
  1503  			statics = append(statics, r)
  1504  		}
  1505  		n.List = statics
  1506  
  1507  		if len(dynamics) == 0 {
  1508  			return n
  1509  		}
  1510  
  1511  		// Emit the creation of the map (with all its static entries).
  1512  		m := o.newTemp(n.Type(), false)
  1513  		as := ir.NewAssignStmt(base.Pos, m, n)
  1514  		typecheck.Stmt(as)
  1515  		o.stmt(as)
  1516  
  1517  		// Emit eval+insert of dynamic entries, one at a time.
  1518  		for _, r := range dynamics {
  1519  			lhs := typecheck.AssignExpr(ir.NewIndexExpr(base.Pos, m, r.Key)).(*ir.IndexExpr)
  1520  			base.AssertfAt(lhs.Op() == ir.OINDEXMAP, lhs.Pos(), "want OINDEXMAP, have %+v", lhs)
  1521  			lhs.RType = n.RType
  1522  
  1523  			as := ir.NewAssignStmt(base.Pos, lhs, r.Value)
  1524  			typecheck.Stmt(as)
  1525  			o.stmt(as)
  1526  		}
  1527  
  1528  		// Remember that we issued these assignments so we can include that count
  1529  		// in the map alloc hint.
  1530  		// We're assuming here that all the keys in the map literal are distinct.
  1531  		// If any are equal, this will be an overcount. Probably not worth accounting
  1532  		// for that, as equal keys in map literals are rare, and at worst we waste
  1533  		// a bit of space.
  1534  		n.Len += int64(len(dynamics))
  1535  
  1536  		return m
  1537  	}
  1538  
  1539  	// No return - type-assertions above. Each case must return for itself.
  1540  }
  1541  
  1542  // as2func orders OAS2FUNC nodes. It creates temporaries to ensure left-to-right assignment.
  1543  // The caller should order the right-hand side of the assignment before calling order.as2func.
  1544  // It rewrites,
  1545  //
  1546  //	a, b, a = ...
  1547  //
  1548  // as
  1549  //
  1550  //	tmp1, tmp2, tmp3 = ...
  1551  //	a, b, a = tmp1, tmp2, tmp3
  1552  //
  1553  // This is necessary to ensure left to right assignment order.
  1554  func (o *orderState) as2func(n *ir.AssignListStmt) {
  1555  	results := n.Rhs[0].Type()
  1556  	as := ir.NewAssignListStmt(n.Pos(), ir.OAS2, nil, nil)
  1557  	for i, nl := range n.Lhs {
  1558  		if !ir.IsBlank(nl) {
  1559  			typ := results.Field(i).Type
  1560  			tmp := o.newTemp(typ, typ.HasPointers())
  1561  			n.Lhs[i] = tmp
  1562  			as.Lhs = append(as.Lhs, nl)
  1563  			as.Rhs = append(as.Rhs, tmp)
  1564  		}
  1565  	}
  1566  
  1567  	o.out = append(o.out, n)
  1568  	o.stmt(typecheck.Stmt(as))
  1569  }
  1570  
  1571  // as2ok orders OAS2XXX with ok.
  1572  // Just like as2func, this also adds temporaries to ensure left-to-right assignment.
  1573  func (o *orderState) as2ok(n *ir.AssignListStmt) {
  1574  	as := ir.NewAssignListStmt(n.Pos(), ir.OAS2, nil, nil)
  1575  
  1576  	do := func(i int, typ *types.Type) {
  1577  		if nl := n.Lhs[i]; !ir.IsBlank(nl) {
  1578  			var tmp ir.Node = o.newTemp(typ, typ.HasPointers())
  1579  			n.Lhs[i] = tmp
  1580  			as.Lhs = append(as.Lhs, nl)
  1581  			if i == 1 {
  1582  				// The "ok" result is an untyped boolean according to the Go
  1583  				// spec. We need to explicitly convert it to the LHS type in
  1584  				// case the latter is a defined boolean type (#8475).
  1585  				tmp = typecheck.Conv(tmp, nl.Type())
  1586  			}
  1587  			as.Rhs = append(as.Rhs, tmp)
  1588  		}
  1589  	}
  1590  
  1591  	do(0, n.Rhs[0].Type())
  1592  	do(1, types.Types[types.TBOOL])
  1593  
  1594  	o.out = append(o.out, n)
  1595  	o.stmt(typecheck.Stmt(as))
  1596  }
  1597  

View as plain text