Source file src/cmd/compile/internal/riscv64/ssa.go

     1  // Copyright 2016 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  package riscv64
     6  
     7  import (
     8  	"cmd/compile/internal/base"
     9  	"cmd/compile/internal/ir"
    10  	"cmd/compile/internal/logopt"
    11  	"cmd/compile/internal/objw"
    12  	"cmd/compile/internal/ssa"
    13  	"cmd/compile/internal/ssagen"
    14  	"cmd/compile/internal/types"
    15  	"cmd/internal/obj"
    16  	"cmd/internal/obj/riscv"
    17  	"internal/abi"
    18  )
    19  
    20  // ssaRegToReg maps ssa register numbers to obj register numbers.
    21  var ssaRegToReg = []int16{
    22  	riscv.REG_X0,
    23  	// X1 (LR): unused
    24  	riscv.REG_X2,
    25  	riscv.REG_X3,
    26  	riscv.REG_X4,
    27  	riscv.REG_X5,
    28  	riscv.REG_X6,
    29  	riscv.REG_X7,
    30  	riscv.REG_X8,
    31  	riscv.REG_X9,
    32  	riscv.REG_X10,
    33  	riscv.REG_X11,
    34  	riscv.REG_X12,
    35  	riscv.REG_X13,
    36  	riscv.REG_X14,
    37  	riscv.REG_X15,
    38  	riscv.REG_X16,
    39  	riscv.REG_X17,
    40  	riscv.REG_X18,
    41  	riscv.REG_X19,
    42  	riscv.REG_X20,
    43  	riscv.REG_X21,
    44  	riscv.REG_X22,
    45  	riscv.REG_X23,
    46  	riscv.REG_X24,
    47  	riscv.REG_X25,
    48  	riscv.REG_X26,
    49  	riscv.REG_X27,
    50  	riscv.REG_X28,
    51  	riscv.REG_X29,
    52  	riscv.REG_X30,
    53  	riscv.REG_X31,
    54  	riscv.REG_F0,
    55  	riscv.REG_F1,
    56  	riscv.REG_F2,
    57  	riscv.REG_F3,
    58  	riscv.REG_F4,
    59  	riscv.REG_F5,
    60  	riscv.REG_F6,
    61  	riscv.REG_F7,
    62  	riscv.REG_F8,
    63  	riscv.REG_F9,
    64  	riscv.REG_F10,
    65  	riscv.REG_F11,
    66  	riscv.REG_F12,
    67  	riscv.REG_F13,
    68  	riscv.REG_F14,
    69  	riscv.REG_F15,
    70  	riscv.REG_F16,
    71  	riscv.REG_F17,
    72  	riscv.REG_F18,
    73  	riscv.REG_F19,
    74  	riscv.REG_F20,
    75  	riscv.REG_F21,
    76  	riscv.REG_F22,
    77  	riscv.REG_F23,
    78  	riscv.REG_F24,
    79  	riscv.REG_F25,
    80  	riscv.REG_F26,
    81  	riscv.REG_F27,
    82  	riscv.REG_F28,
    83  	riscv.REG_F29,
    84  	riscv.REG_F30,
    85  	riscv.REG_F31,
    86  	0, // SB isn't a real register.  We fill an Addr.Reg field with 0 in this case.
    87  }
    88  
    89  func loadByType(t *types.Type) obj.As {
    90  	width := t.Size()
    91  
    92  	if t.IsFloat() {
    93  		switch width {
    94  		case 4:
    95  			return riscv.AMOVF
    96  		case 8:
    97  			return riscv.AMOVD
    98  		default:
    99  			base.Fatalf("unknown float width for load %d in type %v", width, t)
   100  			return 0
   101  		}
   102  	}
   103  
   104  	switch width {
   105  	case 1:
   106  		if t.IsSigned() {
   107  			return riscv.AMOVB
   108  		} else {
   109  			return riscv.AMOVBU
   110  		}
   111  	case 2:
   112  		if t.IsSigned() {
   113  			return riscv.AMOVH
   114  		} else {
   115  			return riscv.AMOVHU
   116  		}
   117  	case 4:
   118  		if t.IsSigned() {
   119  			return riscv.AMOVW
   120  		} else {
   121  			return riscv.AMOVWU
   122  		}
   123  	case 8:
   124  		return riscv.AMOV
   125  	default:
   126  		base.Fatalf("unknown width for load %d in type %v", width, t)
   127  		return 0
   128  	}
   129  }
   130  
   131  // storeByType returns the store instruction of the given type.
   132  func storeByType(t *types.Type) obj.As {
   133  	width := t.Size()
   134  
   135  	if t.IsFloat() {
   136  		switch width {
   137  		case 4:
   138  			return riscv.AMOVF
   139  		case 8:
   140  			return riscv.AMOVD
   141  		default:
   142  			base.Fatalf("unknown float width for store %d in type %v", width, t)
   143  			return 0
   144  		}
   145  	}
   146  
   147  	switch width {
   148  	case 1:
   149  		return riscv.AMOVB
   150  	case 2:
   151  		return riscv.AMOVH
   152  	case 4:
   153  		return riscv.AMOVW
   154  	case 8:
   155  		return riscv.AMOV
   156  	default:
   157  		base.Fatalf("unknown width for store %d in type %v", width, t)
   158  		return 0
   159  	}
   160  }
   161  
   162  // largestMove returns the largest move instruction possible and its size,
   163  // given the alignment of the total size of the move.
   164  //
   165  // e.g., a 16-byte move may use MOV, but an 11-byte move must use MOVB.
   166  //
   167  // Note that the moves may not be on naturally aligned addresses depending on
   168  // the source and destination.
   169  //
   170  // This matches the calculation in ssa.moveSize.
   171  func largestMove(alignment int64) (obj.As, int64) {
   172  	switch {
   173  	case alignment%8 == 0:
   174  		return riscv.AMOV, 8
   175  	case alignment%4 == 0:
   176  		return riscv.AMOVW, 4
   177  	case alignment%2 == 0:
   178  		return riscv.AMOVH, 2
   179  	default:
   180  		return riscv.AMOVB, 1
   181  	}
   182  }
   183  
   184  // ssaMarkMoves marks any MOVXconst ops that need to avoid clobbering flags.
   185  // RISC-V has no flags, so this is a no-op.
   186  func ssaMarkMoves(s *ssagen.State, b *ssa.Block) {}
   187  
   188  func ssaGenValue(s *ssagen.State, v *ssa.Value) {
   189  	s.SetPos(v.Pos)
   190  
   191  	switch v.Op {
   192  	case ssa.OpInitMem:
   193  		// memory arg needs no code
   194  	case ssa.OpArg:
   195  		// input args need no code
   196  	case ssa.OpPhi:
   197  		ssagen.CheckLoweredPhi(v)
   198  	case ssa.OpCopy, ssa.OpRISCV64MOVDreg:
   199  		if v.Type.IsMemory() {
   200  			return
   201  		}
   202  		rs := v.Args[0].Reg()
   203  		rd := v.Reg()
   204  		if rs == rd {
   205  			return
   206  		}
   207  		as := riscv.AMOV
   208  		if v.Type.IsFloat() {
   209  			as = riscv.AMOVD
   210  		}
   211  		p := s.Prog(as)
   212  		p.From.Type = obj.TYPE_REG
   213  		p.From.Reg = rs
   214  		p.To.Type = obj.TYPE_REG
   215  		p.To.Reg = rd
   216  	case ssa.OpRISCV64MOVDnop:
   217  		// nothing to do
   218  	case ssa.OpLoadReg:
   219  		if v.Type.IsFlags() {
   220  			v.Fatalf("load flags not implemented: %v", v.LongString())
   221  			return
   222  		}
   223  		p := s.Prog(loadByType(v.Type))
   224  		ssagen.AddrAuto(&p.From, v.Args[0])
   225  		p.To.Type = obj.TYPE_REG
   226  		p.To.Reg = v.Reg()
   227  	case ssa.OpStoreReg:
   228  		if v.Type.IsFlags() {
   229  			v.Fatalf("store flags not implemented: %v", v.LongString())
   230  			return
   231  		}
   232  		p := s.Prog(storeByType(v.Type))
   233  		p.From.Type = obj.TYPE_REG
   234  		p.From.Reg = v.Args[0].Reg()
   235  		ssagen.AddrAuto(&p.To, v)
   236  	case ssa.OpArgIntReg, ssa.OpArgFloatReg:
   237  		// The assembler needs to wrap the entry safepoint/stack growth code with spill/unspill
   238  		// The loop only runs once.
   239  		for _, a := range v.Block.Func.RegArgs {
   240  			// Pass the spill/unspill information along to the assembler, offset by size of
   241  			// the saved LR slot.
   242  			addr := ssagen.SpillSlotAddr(a, riscv.REG_SP, base.Ctxt.Arch.FixedFrameSize)
   243  			s.FuncInfo().AddSpill(
   244  				obj.RegSpill{Reg: a.Reg, Addr: addr, Unspill: loadByType(a.Type), Spill: storeByType(a.Type)})
   245  		}
   246  		v.Block.Func.RegArgs = nil
   247  
   248  		ssagen.CheckArgReg(v)
   249  	case ssa.OpSP, ssa.OpSB, ssa.OpGetG:
   250  		// nothing to do
   251  	case ssa.OpRISCV64MOVBreg, ssa.OpRISCV64MOVHreg, ssa.OpRISCV64MOVWreg,
   252  		ssa.OpRISCV64MOVBUreg, ssa.OpRISCV64MOVHUreg, ssa.OpRISCV64MOVWUreg:
   253  		a := v.Args[0]
   254  		for a.Op == ssa.OpCopy || a.Op == ssa.OpRISCV64MOVDreg {
   255  			a = a.Args[0]
   256  		}
   257  		as := v.Op.Asm()
   258  		rs := v.Args[0].Reg()
   259  		rd := v.Reg()
   260  		if a.Op == ssa.OpLoadReg {
   261  			t := a.Type
   262  			switch {
   263  			case v.Op == ssa.OpRISCV64MOVBreg && t.Size() == 1 && t.IsSigned(),
   264  				v.Op == ssa.OpRISCV64MOVHreg && t.Size() == 2 && t.IsSigned(),
   265  				v.Op == ssa.OpRISCV64MOVWreg && t.Size() == 4 && t.IsSigned(),
   266  				v.Op == ssa.OpRISCV64MOVBUreg && t.Size() == 1 && !t.IsSigned(),
   267  				v.Op == ssa.OpRISCV64MOVHUreg && t.Size() == 2 && !t.IsSigned(),
   268  				v.Op == ssa.OpRISCV64MOVWUreg && t.Size() == 4 && !t.IsSigned():
   269  				// arg is a proper-typed load and already sign/zero-extended
   270  				if rs == rd {
   271  					return
   272  				}
   273  				as = riscv.AMOV
   274  			default:
   275  			}
   276  		}
   277  		p := s.Prog(as)
   278  		p.From.Type = obj.TYPE_REG
   279  		p.From.Reg = rs
   280  		p.To.Type = obj.TYPE_REG
   281  		p.To.Reg = rd
   282  	case ssa.OpRISCV64ADD, ssa.OpRISCV64SUB, ssa.OpRISCV64SUBW, ssa.OpRISCV64XNOR, ssa.OpRISCV64XOR,
   283  		ssa.OpRISCV64OR, ssa.OpRISCV64ORN, ssa.OpRISCV64AND, ssa.OpRISCV64ANDN,
   284  		ssa.OpRISCV64SLL, ssa.OpRISCV64SLLW, ssa.OpRISCV64SRA, ssa.OpRISCV64SRAW, ssa.OpRISCV64SRL, ssa.OpRISCV64SRLW,
   285  		ssa.OpRISCV64SLT, ssa.OpRISCV64SLTU, ssa.OpRISCV64MUL, ssa.OpRISCV64MULW, ssa.OpRISCV64MULH,
   286  		ssa.OpRISCV64MULHU, ssa.OpRISCV64DIV, ssa.OpRISCV64DIVU, ssa.OpRISCV64DIVW,
   287  		ssa.OpRISCV64DIVUW, ssa.OpRISCV64REM, ssa.OpRISCV64REMU, ssa.OpRISCV64REMW,
   288  		ssa.OpRISCV64REMUW,
   289  		ssa.OpRISCV64ROL, ssa.OpRISCV64ROLW, ssa.OpRISCV64ROR, ssa.OpRISCV64RORW,
   290  		ssa.OpRISCV64FADDS, ssa.OpRISCV64FSUBS, ssa.OpRISCV64FMULS, ssa.OpRISCV64FDIVS,
   291  		ssa.OpRISCV64FEQS, ssa.OpRISCV64FNES, ssa.OpRISCV64FLTS, ssa.OpRISCV64FLES,
   292  		ssa.OpRISCV64FADDD, ssa.OpRISCV64FSUBD, ssa.OpRISCV64FMULD, ssa.OpRISCV64FDIVD,
   293  		ssa.OpRISCV64FEQD, ssa.OpRISCV64FNED, ssa.OpRISCV64FLTD, ssa.OpRISCV64FLED, ssa.OpRISCV64FSGNJD,
   294  		ssa.OpRISCV64MIN, ssa.OpRISCV64MAX, ssa.OpRISCV64MINU, ssa.OpRISCV64MAXU,
   295  		ssa.OpRISCV64SH1ADD, ssa.OpRISCV64SH2ADD, ssa.OpRISCV64SH3ADD:
   296  		r := v.Reg()
   297  		r1 := v.Args[0].Reg()
   298  		r2 := v.Args[1].Reg()
   299  		p := s.Prog(v.Op.Asm())
   300  		p.From.Type = obj.TYPE_REG
   301  		p.From.Reg = r2
   302  		p.Reg = r1
   303  		p.To.Type = obj.TYPE_REG
   304  		p.To.Reg = r
   305  
   306  	case ssa.OpRISCV64LoweredFMAXD, ssa.OpRISCV64LoweredFMIND, ssa.OpRISCV64LoweredFMAXS, ssa.OpRISCV64LoweredFMINS:
   307  		// Most of FMIN/FMAX result match Go's required behaviour, unless one of the
   308  		// inputs is a NaN. As such, we need to explicitly test for NaN
   309  		// before using FMIN/FMAX.
   310  
   311  		// FADD Rarg0, Rarg1, Rout // FADD is used to propagate a NaN to the result in these cases.
   312  		// FEQ  Rarg0, Rarg0, Rtmp
   313  		// BEQZ Rtmp, end
   314  		// FEQ  Rarg1, Rarg1, Rtmp
   315  		// BEQZ Rtmp, end
   316  		// F(MIN | MAX)
   317  
   318  		r0 := v.Args[0].Reg()
   319  		r1 := v.Args[1].Reg()
   320  		out := v.Reg()
   321  		add, feq := riscv.AFADDD, riscv.AFEQD
   322  		if v.Op == ssa.OpRISCV64LoweredFMAXS || v.Op == ssa.OpRISCV64LoweredFMINS {
   323  			add = riscv.AFADDS
   324  			feq = riscv.AFEQS
   325  		}
   326  
   327  		p1 := s.Prog(add)
   328  		p1.From.Type = obj.TYPE_REG
   329  		p1.From.Reg = r0
   330  		p1.Reg = r1
   331  		p1.To.Type = obj.TYPE_REG
   332  		p1.To.Reg = out
   333  
   334  		p2 := s.Prog(feq)
   335  		p2.From.Type = obj.TYPE_REG
   336  		p2.From.Reg = r0
   337  		p2.Reg = r0
   338  		p2.To.Type = obj.TYPE_REG
   339  		p2.To.Reg = riscv.REG_TMP
   340  
   341  		p3 := s.Prog(riscv.ABEQ)
   342  		p3.From.Type = obj.TYPE_REG
   343  		p3.From.Reg = riscv.REG_ZERO
   344  		p3.Reg = riscv.REG_TMP
   345  		p3.To.Type = obj.TYPE_BRANCH
   346  
   347  		p4 := s.Prog(feq)
   348  		p4.From.Type = obj.TYPE_REG
   349  		p4.From.Reg = r1
   350  		p4.Reg = r1
   351  		p4.To.Type = obj.TYPE_REG
   352  		p4.To.Reg = riscv.REG_TMP
   353  
   354  		p5 := s.Prog(riscv.ABEQ)
   355  		p5.From.Type = obj.TYPE_REG
   356  		p5.From.Reg = riscv.REG_ZERO
   357  		p5.Reg = riscv.REG_TMP
   358  		p5.To.Type = obj.TYPE_BRANCH
   359  
   360  		p6 := s.Prog(v.Op.Asm())
   361  		p6.From.Type = obj.TYPE_REG
   362  		p6.From.Reg = r1
   363  		p6.Reg = r0
   364  		p6.To.Type = obj.TYPE_REG
   365  		p6.To.Reg = out
   366  
   367  		nop := s.Prog(obj.ANOP)
   368  		p3.To.SetTarget(nop)
   369  		p5.To.SetTarget(nop)
   370  
   371  	case ssa.OpRISCV64LoweredMuluhilo:
   372  		r0 := v.Args[0].Reg()
   373  		r1 := v.Args[1].Reg()
   374  		p := s.Prog(riscv.AMULHU)
   375  		p.From.Type = obj.TYPE_REG
   376  		p.From.Reg = r1
   377  		p.Reg = r0
   378  		p.To.Type = obj.TYPE_REG
   379  		p.To.Reg = v.Reg0()
   380  		p1 := s.Prog(riscv.AMUL)
   381  		p1.From.Type = obj.TYPE_REG
   382  		p1.From.Reg = r1
   383  		p1.Reg = r0
   384  		p1.To.Type = obj.TYPE_REG
   385  		p1.To.Reg = v.Reg1()
   386  	case ssa.OpRISCV64LoweredMuluover:
   387  		r0 := v.Args[0].Reg()
   388  		r1 := v.Args[1].Reg()
   389  		p := s.Prog(riscv.AMULHU)
   390  		p.From.Type = obj.TYPE_REG
   391  		p.From.Reg = r1
   392  		p.Reg = r0
   393  		p.To.Type = obj.TYPE_REG
   394  		p.To.Reg = v.Reg1()
   395  		p1 := s.Prog(riscv.AMUL)
   396  		p1.From.Type = obj.TYPE_REG
   397  		p1.From.Reg = r1
   398  		p1.Reg = r0
   399  		p1.To.Type = obj.TYPE_REG
   400  		p1.To.Reg = v.Reg0()
   401  		p2 := s.Prog(riscv.ASNEZ)
   402  		p2.From.Type = obj.TYPE_REG
   403  		p2.From.Reg = v.Reg1()
   404  		p2.To.Type = obj.TYPE_REG
   405  		p2.To.Reg = v.Reg1()
   406  	case ssa.OpRISCV64FMADDD, ssa.OpRISCV64FMSUBD, ssa.OpRISCV64FNMADDD, ssa.OpRISCV64FNMSUBD,
   407  		ssa.OpRISCV64FMADDS, ssa.OpRISCV64FMSUBS, ssa.OpRISCV64FNMADDS, ssa.OpRISCV64FNMSUBS:
   408  		r := v.Reg()
   409  		r1 := v.Args[0].Reg()
   410  		r2 := v.Args[1].Reg()
   411  		r3 := v.Args[2].Reg()
   412  		p := s.Prog(v.Op.Asm())
   413  		p.From.Type = obj.TYPE_REG
   414  		p.From.Reg = r2
   415  		p.Reg = r1
   416  		p.AddRestSource(obj.Addr{Type: obj.TYPE_REG, Reg: r3})
   417  		p.To.Type = obj.TYPE_REG
   418  		p.To.Reg = r
   419  	case ssa.OpRISCV64FSQRTS, ssa.OpRISCV64FNEGS, ssa.OpRISCV64FABSD, ssa.OpRISCV64FSQRTD, ssa.OpRISCV64FNEGD,
   420  		ssa.OpRISCV64FMVSX, ssa.OpRISCV64FMVXS, ssa.OpRISCV64FMVDX, ssa.OpRISCV64FMVXD,
   421  		ssa.OpRISCV64FCVTSW, ssa.OpRISCV64FCVTSL, ssa.OpRISCV64FCVTWS, ssa.OpRISCV64FCVTLS,
   422  		ssa.OpRISCV64FCVTDW, ssa.OpRISCV64FCVTDL, ssa.OpRISCV64FCVTWD, ssa.OpRISCV64FCVTLD, ssa.OpRISCV64FCVTDS, ssa.OpRISCV64FCVTSD,
   423  		ssa.OpRISCV64NOT, ssa.OpRISCV64NEG, ssa.OpRISCV64NEGW, ssa.OpRISCV64CLZ, ssa.OpRISCV64CLZW, ssa.OpRISCV64CTZ, ssa.OpRISCV64CTZW,
   424  		ssa.OpRISCV64REV8, ssa.OpRISCV64CPOP, ssa.OpRISCV64CPOPW:
   425  		p := s.Prog(v.Op.Asm())
   426  		p.From.Type = obj.TYPE_REG
   427  		p.From.Reg = v.Args[0].Reg()
   428  		p.To.Type = obj.TYPE_REG
   429  		p.To.Reg = v.Reg()
   430  	case ssa.OpRISCV64ADDI, ssa.OpRISCV64ADDIW, ssa.OpRISCV64XORI, ssa.OpRISCV64ORI, ssa.OpRISCV64ANDI,
   431  		ssa.OpRISCV64SLLI, ssa.OpRISCV64SLLIW, ssa.OpRISCV64SRAI, ssa.OpRISCV64SRAIW,
   432  		ssa.OpRISCV64SRLI, ssa.OpRISCV64SRLIW, ssa.OpRISCV64SLTI, ssa.OpRISCV64SLTIU,
   433  		ssa.OpRISCV64RORI, ssa.OpRISCV64RORIW:
   434  		p := s.Prog(v.Op.Asm())
   435  		p.From.Type = obj.TYPE_CONST
   436  		p.From.Offset = v.AuxInt
   437  		p.Reg = v.Args[0].Reg()
   438  		p.To.Type = obj.TYPE_REG
   439  		p.To.Reg = v.Reg()
   440  	case ssa.OpRISCV64MOVDconst:
   441  		p := s.Prog(v.Op.Asm())
   442  		p.From.Type = obj.TYPE_CONST
   443  		p.From.Offset = v.AuxInt
   444  		p.To.Type = obj.TYPE_REG
   445  		p.To.Reg = v.Reg()
   446  	case ssa.OpRISCV64MOVaddr:
   447  		p := s.Prog(v.Op.Asm())
   448  		p.From.Type = obj.TYPE_ADDR
   449  		p.To.Type = obj.TYPE_REG
   450  		p.To.Reg = v.Reg()
   451  
   452  		var wantreg string
   453  		// MOVW $sym+off(base), R
   454  		switch v.Aux.(type) {
   455  		default:
   456  			v.Fatalf("aux is of unknown type %T", v.Aux)
   457  		case *obj.LSym:
   458  			wantreg = "SB"
   459  			ssagen.AddAux(&p.From, v)
   460  		case *ir.Name:
   461  			wantreg = "SP"
   462  			ssagen.AddAux(&p.From, v)
   463  		case nil:
   464  			// No sym, just MOVW $off(SP), R
   465  			wantreg = "SP"
   466  			p.From.Reg = riscv.REG_SP
   467  			p.From.Offset = v.AuxInt
   468  		}
   469  		if reg := v.Args[0].RegName(); reg != wantreg {
   470  			v.Fatalf("bad reg %s for symbol type %T, want %s", reg, v.Aux, wantreg)
   471  		}
   472  	case ssa.OpRISCV64MOVBload, ssa.OpRISCV64MOVHload, ssa.OpRISCV64MOVWload, ssa.OpRISCV64MOVDload,
   473  		ssa.OpRISCV64MOVBUload, ssa.OpRISCV64MOVHUload, ssa.OpRISCV64MOVWUload,
   474  		ssa.OpRISCV64FMOVWload, ssa.OpRISCV64FMOVDload:
   475  		p := s.Prog(v.Op.Asm())
   476  		p.From.Type = obj.TYPE_MEM
   477  		p.From.Reg = v.Args[0].Reg()
   478  		ssagen.AddAux(&p.From, v)
   479  		p.To.Type = obj.TYPE_REG
   480  		p.To.Reg = v.Reg()
   481  	case ssa.OpRISCV64MOVBstore, ssa.OpRISCV64MOVHstore, ssa.OpRISCV64MOVWstore, ssa.OpRISCV64MOVDstore,
   482  		ssa.OpRISCV64FMOVWstore, ssa.OpRISCV64FMOVDstore:
   483  		p := s.Prog(v.Op.Asm())
   484  		p.From.Type = obj.TYPE_REG
   485  		p.From.Reg = v.Args[1].Reg()
   486  		p.To.Type = obj.TYPE_MEM
   487  		p.To.Reg = v.Args[0].Reg()
   488  		ssagen.AddAux(&p.To, v)
   489  	case ssa.OpRISCV64MOVBstorezero, ssa.OpRISCV64MOVHstorezero, ssa.OpRISCV64MOVWstorezero, ssa.OpRISCV64MOVDstorezero:
   490  		p := s.Prog(v.Op.Asm())
   491  		p.From.Type = obj.TYPE_REG
   492  		p.From.Reg = riscv.REG_ZERO
   493  		p.To.Type = obj.TYPE_MEM
   494  		p.To.Reg = v.Args[0].Reg()
   495  		ssagen.AddAux(&p.To, v)
   496  	case ssa.OpRISCV64SEQZ, ssa.OpRISCV64SNEZ:
   497  		p := s.Prog(v.Op.Asm())
   498  		p.From.Type = obj.TYPE_REG
   499  		p.From.Reg = v.Args[0].Reg()
   500  		p.To.Type = obj.TYPE_REG
   501  		p.To.Reg = v.Reg()
   502  	case ssa.OpRISCV64CALLstatic, ssa.OpRISCV64CALLclosure, ssa.OpRISCV64CALLinter:
   503  		s.Call(v)
   504  	case ssa.OpRISCV64CALLtail:
   505  		s.TailCall(v)
   506  	case ssa.OpRISCV64LoweredWB:
   507  		p := s.Prog(obj.ACALL)
   508  		p.To.Type = obj.TYPE_MEM
   509  		p.To.Name = obj.NAME_EXTERN
   510  		// AuxInt encodes how many buffer entries we need.
   511  		p.To.Sym = ir.Syms.GCWriteBarrier[v.AuxInt-1]
   512  
   513  	case ssa.OpRISCV64LoweredPanicBoundsRR, ssa.OpRISCV64LoweredPanicBoundsRC, ssa.OpRISCV64LoweredPanicBoundsCR, ssa.OpRISCV64LoweredPanicBoundsCC:
   514  		// Compute the constant we put in the PCData entry for this call.
   515  		code, signed := ssa.BoundsKind(v.AuxInt).Code()
   516  		xIsReg := false
   517  		yIsReg := false
   518  		xVal := 0
   519  		yVal := 0
   520  		switch v.Op {
   521  		case ssa.OpRISCV64LoweredPanicBoundsRR:
   522  			xIsReg = true
   523  			xVal = int(v.Args[0].Reg() - riscv.REG_X5)
   524  			yIsReg = true
   525  			yVal = int(v.Args[1].Reg() - riscv.REG_X5)
   526  		case ssa.OpRISCV64LoweredPanicBoundsRC:
   527  			xIsReg = true
   528  			xVal = int(v.Args[0].Reg() - riscv.REG_X5)
   529  			c := v.Aux.(ssa.PanicBoundsC).C
   530  			if c >= 0 && c <= abi.BoundsMaxConst {
   531  				yVal = int(c)
   532  			} else {
   533  				// Move constant to a register
   534  				yIsReg = true
   535  				if yVal == xVal {
   536  					yVal = 1
   537  				}
   538  				p := s.Prog(riscv.AMOV)
   539  				p.From.Type = obj.TYPE_CONST
   540  				p.From.Offset = c
   541  				p.To.Type = obj.TYPE_REG
   542  				p.To.Reg = riscv.REG_X5 + int16(yVal)
   543  			}
   544  		case ssa.OpRISCV64LoweredPanicBoundsCR:
   545  			yIsReg = true
   546  			yVal := int(v.Args[0].Reg() - riscv.REG_X5)
   547  			c := v.Aux.(ssa.PanicBoundsC).C
   548  			if c >= 0 && c <= abi.BoundsMaxConst {
   549  				xVal = int(c)
   550  			} else {
   551  				// Move constant to a register
   552  				if xVal == yVal {
   553  					xVal = 1
   554  				}
   555  				p := s.Prog(riscv.AMOV)
   556  				p.From.Type = obj.TYPE_CONST
   557  				p.From.Offset = c
   558  				p.To.Type = obj.TYPE_REG
   559  				p.To.Reg = riscv.REG_X5 + int16(xVal)
   560  			}
   561  		case ssa.OpRISCV64LoweredPanicBoundsCC:
   562  			c := v.Aux.(ssa.PanicBoundsCC).Cx
   563  			if c >= 0 && c <= abi.BoundsMaxConst {
   564  				xVal = int(c)
   565  			} else {
   566  				// Move constant to a register
   567  				xIsReg = true
   568  				p := s.Prog(riscv.AMOV)
   569  				p.From.Type = obj.TYPE_CONST
   570  				p.From.Offset = c
   571  				p.To.Type = obj.TYPE_REG
   572  				p.To.Reg = riscv.REG_X5 + int16(xVal)
   573  			}
   574  			c = v.Aux.(ssa.PanicBoundsCC).Cy
   575  			if c >= 0 && c <= abi.BoundsMaxConst {
   576  				yVal = int(c)
   577  			} else {
   578  				// Move constant to a register
   579  				yIsReg = true
   580  				yVal = 1
   581  				p := s.Prog(riscv.AMOV)
   582  				p.From.Type = obj.TYPE_CONST
   583  				p.From.Offset = c
   584  				p.To.Type = obj.TYPE_REG
   585  				p.To.Reg = riscv.REG_X5 + int16(yVal)
   586  			}
   587  		}
   588  		c := abi.BoundsEncode(code, signed, xIsReg, yIsReg, xVal, yVal)
   589  
   590  		p := s.Prog(obj.APCDATA)
   591  		p.From.SetConst(abi.PCDATA_PanicBounds)
   592  		p.To.SetConst(int64(c))
   593  		p = s.Prog(obj.ACALL)
   594  		p.To.Type = obj.TYPE_MEM
   595  		p.To.Name = obj.NAME_EXTERN
   596  		p.To.Sym = ir.Syms.PanicBounds
   597  
   598  	case ssa.OpRISCV64LoweredAtomicLoad8:
   599  		s.Prog(riscv.AFENCE)
   600  		p := s.Prog(riscv.AMOVBU)
   601  		p.From.Type = obj.TYPE_MEM
   602  		p.From.Reg = v.Args[0].Reg()
   603  		p.To.Type = obj.TYPE_REG
   604  		p.To.Reg = v.Reg0()
   605  		s.Prog(riscv.AFENCE)
   606  
   607  	case ssa.OpRISCV64LoweredAtomicLoad32, ssa.OpRISCV64LoweredAtomicLoad64:
   608  		as := riscv.ALRW
   609  		if v.Op == ssa.OpRISCV64LoweredAtomicLoad64 {
   610  			as = riscv.ALRD
   611  		}
   612  		p := s.Prog(as)
   613  		p.From.Type = obj.TYPE_MEM
   614  		p.From.Reg = v.Args[0].Reg()
   615  		p.To.Type = obj.TYPE_REG
   616  		p.To.Reg = v.Reg0()
   617  
   618  	case ssa.OpRISCV64LoweredAtomicStore8:
   619  		s.Prog(riscv.AFENCE)
   620  		p := s.Prog(riscv.AMOVB)
   621  		p.From.Type = obj.TYPE_REG
   622  		p.From.Reg = v.Args[1].Reg()
   623  		p.To.Type = obj.TYPE_MEM
   624  		p.To.Reg = v.Args[0].Reg()
   625  		s.Prog(riscv.AFENCE)
   626  
   627  	case ssa.OpRISCV64LoweredAtomicStore32, ssa.OpRISCV64LoweredAtomicStore64:
   628  		as := riscv.AAMOSWAPW
   629  		if v.Op == ssa.OpRISCV64LoweredAtomicStore64 {
   630  			as = riscv.AAMOSWAPD
   631  		}
   632  		p := s.Prog(as)
   633  		p.From.Type = obj.TYPE_REG
   634  		p.From.Reg = v.Args[1].Reg()
   635  		p.To.Type = obj.TYPE_MEM
   636  		p.To.Reg = v.Args[0].Reg()
   637  		p.RegTo2 = riscv.REG_ZERO
   638  
   639  	case ssa.OpRISCV64LoweredAtomicAdd32, ssa.OpRISCV64LoweredAtomicAdd64:
   640  		as := riscv.AAMOADDW
   641  		if v.Op == ssa.OpRISCV64LoweredAtomicAdd64 {
   642  			as = riscv.AAMOADDD
   643  		}
   644  		p := s.Prog(as)
   645  		p.From.Type = obj.TYPE_REG
   646  		p.From.Reg = v.Args[1].Reg()
   647  		p.To.Type = obj.TYPE_MEM
   648  		p.To.Reg = v.Args[0].Reg()
   649  		p.RegTo2 = riscv.REG_TMP
   650  
   651  		p2 := s.Prog(riscv.AADD)
   652  		p2.From.Type = obj.TYPE_REG
   653  		p2.From.Reg = riscv.REG_TMP
   654  		p2.Reg = v.Args[1].Reg()
   655  		p2.To.Type = obj.TYPE_REG
   656  		p2.To.Reg = v.Reg0()
   657  
   658  	case ssa.OpRISCV64LoweredAtomicExchange32, ssa.OpRISCV64LoweredAtomicExchange64:
   659  		as := riscv.AAMOSWAPW
   660  		if v.Op == ssa.OpRISCV64LoweredAtomicExchange64 {
   661  			as = riscv.AAMOSWAPD
   662  		}
   663  		p := s.Prog(as)
   664  		p.From.Type = obj.TYPE_REG
   665  		p.From.Reg = v.Args[1].Reg()
   666  		p.To.Type = obj.TYPE_MEM
   667  		p.To.Reg = v.Args[0].Reg()
   668  		p.RegTo2 = v.Reg0()
   669  
   670  	case ssa.OpRISCV64LoweredAtomicCas32, ssa.OpRISCV64LoweredAtomicCas64:
   671  		// MOV  ZERO, Rout
   672  		// LR	(Rarg0), Rtmp
   673  		// BNE	Rtmp, Rarg1, 3(PC)
   674  		// SC	Rarg2, (Rarg0), Rtmp
   675  		// BNE	Rtmp, ZERO, -3(PC)
   676  		// MOV	$1, Rout
   677  
   678  		lr := riscv.ALRW
   679  		sc := riscv.ASCW
   680  		if v.Op == ssa.OpRISCV64LoweredAtomicCas64 {
   681  			lr = riscv.ALRD
   682  			sc = riscv.ASCD
   683  		}
   684  
   685  		r0 := v.Args[0].Reg()
   686  		r1 := v.Args[1].Reg()
   687  		r2 := v.Args[2].Reg()
   688  		out := v.Reg0()
   689  
   690  		p := s.Prog(riscv.AMOV)
   691  		p.From.Type = obj.TYPE_REG
   692  		p.From.Reg = riscv.REG_ZERO
   693  		p.To.Type = obj.TYPE_REG
   694  		p.To.Reg = out
   695  
   696  		p1 := s.Prog(lr)
   697  		p1.From.Type = obj.TYPE_MEM
   698  		p1.From.Reg = r0
   699  		p1.To.Type = obj.TYPE_REG
   700  		p1.To.Reg = riscv.REG_TMP
   701  
   702  		p2 := s.Prog(riscv.ABNE)
   703  		p2.From.Type = obj.TYPE_REG
   704  		p2.From.Reg = r1
   705  		p2.Reg = riscv.REG_TMP
   706  		p2.To.Type = obj.TYPE_BRANCH
   707  
   708  		p3 := s.Prog(sc)
   709  		p3.From.Type = obj.TYPE_REG
   710  		p3.From.Reg = r2
   711  		p3.To.Type = obj.TYPE_MEM
   712  		p3.To.Reg = r0
   713  		p3.RegTo2 = riscv.REG_TMP
   714  
   715  		p4 := s.Prog(riscv.ABNE)
   716  		p4.From.Type = obj.TYPE_REG
   717  		p4.From.Reg = riscv.REG_TMP
   718  		p4.Reg = riscv.REG_ZERO
   719  		p4.To.Type = obj.TYPE_BRANCH
   720  		p4.To.SetTarget(p1)
   721  
   722  		p5 := s.Prog(riscv.AMOV)
   723  		p5.From.Type = obj.TYPE_CONST
   724  		p5.From.Offset = 1
   725  		p5.To.Type = obj.TYPE_REG
   726  		p5.To.Reg = out
   727  
   728  		p6 := s.Prog(obj.ANOP)
   729  		p2.To.SetTarget(p6)
   730  
   731  	case ssa.OpRISCV64LoweredAtomicAnd32, ssa.OpRISCV64LoweredAtomicOr32:
   732  		p := s.Prog(v.Op.Asm())
   733  		p.From.Type = obj.TYPE_REG
   734  		p.From.Reg = v.Args[1].Reg()
   735  		p.To.Type = obj.TYPE_MEM
   736  		p.To.Reg = v.Args[0].Reg()
   737  		p.RegTo2 = riscv.REG_ZERO
   738  
   739  	case ssa.OpRISCV64LoweredZero:
   740  		mov, sz := largestMove(v.AuxInt)
   741  
   742  		//	mov	ZERO, (Rarg0)
   743  		//	ADD	$sz, Rarg0
   744  		//	BGEU	Rarg1, Rarg0, -2(PC)
   745  
   746  		p := s.Prog(mov)
   747  		p.From.Type = obj.TYPE_REG
   748  		p.From.Reg = riscv.REG_ZERO
   749  		p.To.Type = obj.TYPE_MEM
   750  		p.To.Reg = v.Args[0].Reg()
   751  
   752  		p2 := s.Prog(riscv.AADD)
   753  		p2.From.Type = obj.TYPE_CONST
   754  		p2.From.Offset = sz
   755  		p2.To.Type = obj.TYPE_REG
   756  		p2.To.Reg = v.Args[0].Reg()
   757  
   758  		p3 := s.Prog(riscv.ABGEU)
   759  		p3.To.Type = obj.TYPE_BRANCH
   760  		p3.Reg = v.Args[0].Reg()
   761  		p3.From.Type = obj.TYPE_REG
   762  		p3.From.Reg = v.Args[1].Reg()
   763  		p3.To.SetTarget(p)
   764  
   765  	case ssa.OpRISCV64LoweredMove:
   766  		mov, sz := largestMove(v.AuxInt)
   767  
   768  		//	mov	(Rarg1), T2
   769  		//	mov	T2, (Rarg0)
   770  		//	ADD	$sz, Rarg0
   771  		//	ADD	$sz, Rarg1
   772  		//	BGEU	Rarg2, Rarg0, -4(PC)
   773  
   774  		p := s.Prog(mov)
   775  		p.From.Type = obj.TYPE_MEM
   776  		p.From.Reg = v.Args[1].Reg()
   777  		p.To.Type = obj.TYPE_REG
   778  		p.To.Reg = riscv.REG_T2
   779  
   780  		p2 := s.Prog(mov)
   781  		p2.From.Type = obj.TYPE_REG
   782  		p2.From.Reg = riscv.REG_T2
   783  		p2.To.Type = obj.TYPE_MEM
   784  		p2.To.Reg = v.Args[0].Reg()
   785  
   786  		p3 := s.Prog(riscv.AADD)
   787  		p3.From.Type = obj.TYPE_CONST
   788  		p3.From.Offset = sz
   789  		p3.To.Type = obj.TYPE_REG
   790  		p3.To.Reg = v.Args[0].Reg()
   791  
   792  		p4 := s.Prog(riscv.AADD)
   793  		p4.From.Type = obj.TYPE_CONST
   794  		p4.From.Offset = sz
   795  		p4.To.Type = obj.TYPE_REG
   796  		p4.To.Reg = v.Args[1].Reg()
   797  
   798  		p5 := s.Prog(riscv.ABGEU)
   799  		p5.To.Type = obj.TYPE_BRANCH
   800  		p5.Reg = v.Args[1].Reg()
   801  		p5.From.Type = obj.TYPE_REG
   802  		p5.From.Reg = v.Args[2].Reg()
   803  		p5.To.SetTarget(p)
   804  
   805  	case ssa.OpRISCV64LoweredNilCheck:
   806  		// Issue a load which will fault if arg is nil.
   807  		p := s.Prog(riscv.AMOVB)
   808  		p.From.Type = obj.TYPE_MEM
   809  		p.From.Reg = v.Args[0].Reg()
   810  		ssagen.AddAux(&p.From, v)
   811  		p.To.Type = obj.TYPE_REG
   812  		p.To.Reg = riscv.REG_ZERO
   813  		if logopt.Enabled() {
   814  			logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name)
   815  		}
   816  		if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos == 1 in generated wrappers
   817  			base.WarnfAt(v.Pos, "generated nil check")
   818  		}
   819  
   820  	case ssa.OpRISCV64LoweredGetClosurePtr:
   821  		// Closure pointer is S10 (riscv.REG_CTXT).
   822  		ssagen.CheckLoweredGetClosurePtr(v)
   823  
   824  	case ssa.OpRISCV64LoweredGetCallerSP:
   825  		// caller's SP is FixedFrameSize below the address of the first arg
   826  		p := s.Prog(riscv.AMOV)
   827  		p.From.Type = obj.TYPE_ADDR
   828  		p.From.Offset = -base.Ctxt.Arch.FixedFrameSize
   829  		p.From.Name = obj.NAME_PARAM
   830  		p.To.Type = obj.TYPE_REG
   831  		p.To.Reg = v.Reg()
   832  
   833  	case ssa.OpRISCV64LoweredGetCallerPC:
   834  		p := s.Prog(obj.AGETCALLERPC)
   835  		p.To.Type = obj.TYPE_REG
   836  		p.To.Reg = v.Reg()
   837  
   838  	case ssa.OpRISCV64DUFFZERO:
   839  		p := s.Prog(obj.ADUFFZERO)
   840  		p.To.Type = obj.TYPE_MEM
   841  		p.To.Name = obj.NAME_EXTERN
   842  		p.To.Sym = ir.Syms.Duffzero
   843  		p.To.Offset = v.AuxInt
   844  
   845  	case ssa.OpRISCV64DUFFCOPY:
   846  		p := s.Prog(obj.ADUFFCOPY)
   847  		p.To.Type = obj.TYPE_MEM
   848  		p.To.Name = obj.NAME_EXTERN
   849  		p.To.Sym = ir.Syms.Duffcopy
   850  		p.To.Offset = v.AuxInt
   851  
   852  	case ssa.OpRISCV64LoweredPubBarrier:
   853  		// FENCE
   854  		s.Prog(v.Op.Asm())
   855  
   856  	case ssa.OpRISCV64LoweredRound32F, ssa.OpRISCV64LoweredRound64F:
   857  		// input is already rounded
   858  
   859  	case ssa.OpClobber, ssa.OpClobberReg:
   860  		// TODO: implement for clobberdead experiment. Nop is ok for now.
   861  
   862  	default:
   863  		v.Fatalf("Unhandled op %v", v.Op)
   864  	}
   865  }
   866  
   867  var blockBranch = [...]obj.As{
   868  	ssa.BlockRISCV64BEQ:  riscv.ABEQ,
   869  	ssa.BlockRISCV64BEQZ: riscv.ABEQZ,
   870  	ssa.BlockRISCV64BGE:  riscv.ABGE,
   871  	ssa.BlockRISCV64BGEU: riscv.ABGEU,
   872  	ssa.BlockRISCV64BGEZ: riscv.ABGEZ,
   873  	ssa.BlockRISCV64BGTZ: riscv.ABGTZ,
   874  	ssa.BlockRISCV64BLEZ: riscv.ABLEZ,
   875  	ssa.BlockRISCV64BLT:  riscv.ABLT,
   876  	ssa.BlockRISCV64BLTU: riscv.ABLTU,
   877  	ssa.BlockRISCV64BLTZ: riscv.ABLTZ,
   878  	ssa.BlockRISCV64BNE:  riscv.ABNE,
   879  	ssa.BlockRISCV64BNEZ: riscv.ABNEZ,
   880  }
   881  
   882  func ssaGenBlock(s *ssagen.State, b, next *ssa.Block) {
   883  	s.SetPos(b.Pos)
   884  
   885  	switch b.Kind {
   886  	case ssa.BlockPlain, ssa.BlockDefer:
   887  		if b.Succs[0].Block() != next {
   888  			p := s.Prog(obj.AJMP)
   889  			p.To.Type = obj.TYPE_BRANCH
   890  			s.Branches = append(s.Branches, ssagen.Branch{P: p, B: b.Succs[0].Block()})
   891  		}
   892  	case ssa.BlockExit, ssa.BlockRetJmp:
   893  	case ssa.BlockRet:
   894  		s.Prog(obj.ARET)
   895  	case ssa.BlockRISCV64BEQ, ssa.BlockRISCV64BEQZ, ssa.BlockRISCV64BNE, ssa.BlockRISCV64BNEZ,
   896  		ssa.BlockRISCV64BLT, ssa.BlockRISCV64BLEZ, ssa.BlockRISCV64BGE, ssa.BlockRISCV64BGEZ,
   897  		ssa.BlockRISCV64BLTZ, ssa.BlockRISCV64BGTZ, ssa.BlockRISCV64BLTU, ssa.BlockRISCV64BGEU:
   898  
   899  		as := blockBranch[b.Kind]
   900  		invAs := riscv.InvertBranch(as)
   901  
   902  		var p *obj.Prog
   903  		switch next {
   904  		case b.Succs[0].Block():
   905  			p = s.Br(invAs, b.Succs[1].Block())
   906  		case b.Succs[1].Block():
   907  			p = s.Br(as, b.Succs[0].Block())
   908  		default:
   909  			if b.Likely != ssa.BranchUnlikely {
   910  				p = s.Br(as, b.Succs[0].Block())
   911  				s.Br(obj.AJMP, b.Succs[1].Block())
   912  			} else {
   913  				p = s.Br(invAs, b.Succs[1].Block())
   914  				s.Br(obj.AJMP, b.Succs[0].Block())
   915  			}
   916  		}
   917  
   918  		p.From.Type = obj.TYPE_REG
   919  		switch b.Kind {
   920  		case ssa.BlockRISCV64BEQ, ssa.BlockRISCV64BNE, ssa.BlockRISCV64BLT, ssa.BlockRISCV64BGE, ssa.BlockRISCV64BLTU, ssa.BlockRISCV64BGEU:
   921  			if b.NumControls() != 2 {
   922  				b.Fatalf("Unexpected number of controls (%d != 2): %s", b.NumControls(), b.LongString())
   923  			}
   924  			p.From.Reg = b.Controls[0].Reg()
   925  			p.Reg = b.Controls[1].Reg()
   926  
   927  		case ssa.BlockRISCV64BEQZ, ssa.BlockRISCV64BNEZ, ssa.BlockRISCV64BGEZ, ssa.BlockRISCV64BLEZ, ssa.BlockRISCV64BLTZ, ssa.BlockRISCV64BGTZ:
   928  			if b.NumControls() != 1 {
   929  				b.Fatalf("Unexpected number of controls (%d != 1): %s", b.NumControls(), b.LongString())
   930  			}
   931  			p.From.Reg = b.Controls[0].Reg()
   932  		}
   933  
   934  	default:
   935  		b.Fatalf("Unhandled block: %s", b.LongString())
   936  	}
   937  }
   938  
   939  func loadRegResult(s *ssagen.State, f *ssa.Func, t *types.Type, reg int16, n *ir.Name, off int64) *obj.Prog {
   940  	p := s.Prog(loadByType(t))
   941  	p.From.Type = obj.TYPE_MEM
   942  	p.From.Name = obj.NAME_AUTO
   943  	p.From.Sym = n.Linksym()
   944  	p.From.Offset = n.FrameOffset() + off
   945  	p.To.Type = obj.TYPE_REG
   946  	p.To.Reg = reg
   947  	return p
   948  }
   949  
   950  func spillArgReg(pp *objw.Progs, p *obj.Prog, f *ssa.Func, t *types.Type, reg int16, n *ir.Name, off int64) *obj.Prog {
   951  	p = pp.Append(p, storeByType(t), obj.TYPE_REG, reg, 0, obj.TYPE_MEM, 0, n.FrameOffset()+off)
   952  	p.To.Name = obj.NAME_PARAM
   953  	p.To.Sym = n.Linksym()
   954  	p.Pos = p.Pos.WithNotStmt()
   955  	return p
   956  }
   957  

View as plain text